Skip to content

Commit d4c3fe9

Browse files
committed
clean api_anakin_engine_rnn_tester
1 parent f76f42c commit d4c3fe9

File tree

1 file changed

+13
-81
lines changed

1 file changed

+13
-81
lines changed

paddle/fluid/inference/api/api_anakin_engine_rnn_tester.cc

Lines changed: 13 additions & 81 deletions
Original file line numberDiff line numberDiff line change
@@ -20,71 +20,16 @@ limitations under the License. */
2020
#include <iostream>
2121
#include <thread> // NOLINT
2222
#include <vector>
23-
#include "framework/core/net/net.h"
23+
#include "paddle/fluid/inference/api/helper.h"
2424
#include "paddle/fluid/inference/api/paddle_inference_api.h"
25+
#include "paddle/fluid/inference/api/timer.h"
26+
#include "utils/logger/logger.h"
2527

2628
DEFINE_string(model, "", "Directory of the inference model.");
2729
DEFINE_string(datapath, "", "Path of the dataset.");
2830
DEFINE_int32(batch_size, 1, "batch size.");
2931
DEFINE_int32(repeat, 1, "Running the inference program repeat times.");
3032

31-
// Timer for timer
32-
class Timer {
33-
public:
34-
double start;
35-
double startu;
36-
void tic() {
37-
struct timeval tp;
38-
gettimeofday(&tp, NULL);
39-
start = tp.tv_sec;
40-
startu = tp.tv_usec;
41-
}
42-
double toc() {
43-
struct timeval tp;
44-
gettimeofday(&tp, NULL);
45-
double used_time_ms =
46-
(tp.tv_sec - start) * 1000.0 + (tp.tv_usec - startu) / 1000.0;
47-
return used_time_ms;
48-
}
49-
};
50-
51-
std::vector<std::string> string_split(std::string in_str,
52-
std::string delimiter) {
53-
std::vector<std::string> seq;
54-
int found = in_str.find(delimiter);
55-
int pre_found = -1;
56-
while (found != std::string::npos) {
57-
if (pre_found == -1) {
58-
seq.push_back(in_str.substr(0, found));
59-
} else {
60-
seq.push_back(in_str.substr(pre_found + delimiter.length(),
61-
found - delimiter.length() - pre_found));
62-
}
63-
pre_found = found;
64-
found = in_str.find(delimiter, pre_found + delimiter.length());
65-
}
66-
seq.push_back(
67-
in_str.substr(pre_found + 1, in_str.length() - (pre_found + 1)));
68-
return seq;
69-
}
70-
std::vector<std::string> string_split(
71-
std::string in_str, std::vector<std::string>& delimiter) { // NOLINT
72-
std::vector<std::string> in;
73-
std::vector<std::string> out;
74-
out.push_back(in_str);
75-
for (auto del : delimiter) {
76-
in = out;
77-
out.clear();
78-
for (auto s : in) {
79-
auto out_s = string_split(s, del);
80-
for (auto o : out_s) {
81-
out.push_back(o);
82-
}
83-
}
84-
}
85-
return out;
86-
}
87-
8833
class Data {
8934
public:
9035
Data(std::string file_name, int batch_size)
@@ -120,36 +65,24 @@ void Data::get_batch_data(
12065
week_fea.clear();
12166
time_fea.clear();
12267
while (_file.getline(buf, 10000)) {
123-
std::string s = buf;
124-
std::vector<std::string> deli_vec = {":"};
125-
std::vector<std::string> data_vec = string_split(s, deli_vec);
68+
std::vector<std::string> data_vec;
69+
paddle::inference::split(buf, ':', &data_vec);
12670

12771
std::vector<std::string> seq;
128-
seq = string_split(data_vec[0], {"|"});
72+
paddle::inference::split(data_vec[0], '|', &seq);
12973

13074
for (auto link : seq) {
131-
std::vector<std::string> data = string_split(link, ",");
13275
std::vector<float> vec;
133-
for (int i = 0; i < data.size(); i++) {
134-
vec.push_back(atof(data[i].c_str()));
135-
}
76+
paddle::inference::split_to_float(link, ',', &vec);
13677
fea.push_back(vec);
13778
}
138-
std::vector<std::string> week_data;
139-
std::vector<std::string> time_data;
14079

141-
week_data = string_split(data_vec[2], ",");
14280
std::vector<float> vec_w;
143-
for (int i = 0; i < week_data.size(); i++) {
144-
vec_w.push_back(atof(week_data[i].c_str()));
145-
}
81+
paddle::inference::split_to_float(data_vec[2], ',', &vec_w);
14682
week_fea.push_back(vec_w);
14783

148-
time_data = string_split(data_vec[1], ",");
14984
std::vector<float> vec_t;
150-
for (int i = 0; i < time_data.size(); i++) {
151-
vec_t.push_back(atof(time_data[i].c_str()));
152-
}
85+
paddle::inference::split_to_float(data_vec[1], ',', &vec_t);
15386
time_fea.push_back(vec_t);
15487

15588
cum += seq.size();
@@ -275,14 +208,13 @@ void single_test() {
275208
inputs.push_back(tensor_2);
276209
inputs.push_back(tensor_0);
277210

278-
Timer timer;
211+
paddle::inference::Timer timer;
279212
timer.tic();
280213
for (int i = 0; i < FLAGS_repeat; i++) predictor->Run(inputs, &outputs);
281214

282-
LOG(INFO) << "batch_size = " << FLAGS_batch_size
283-
<< ", repeat = " << FLAGS_repeat
284-
<< ", sequence_length = " << seq_offset[seq_offset.size() - 1]
285-
<< ", latency: " << timer.toc() / FLAGS_repeat << "ms";
215+
paddle::inference::PrintTime(FLAGS_batch_size, FLAGS_repeat, 1, 0,
216+
timer.toc() / FLAGS_repeat);
217+
LOG(INFO) << "sequence_length = " << seq_offset[seq_offset.size() - 1];
286218

287219
float* data_o = static_cast<float*>(outputs[0].data.data());
288220
VLOG(3) << "outputs[0].data.length() = " << outputs[0].data.length();

0 commit comments

Comments
 (0)