@@ -20,71 +20,16 @@ limitations under the License. */
20
20
#include < iostream>
21
21
#include < thread> // NOLINT
22
22
#include < vector>
23
- #include " framework/core/net/net .h"
23
+ #include " paddle/fluid/inference/api/helper .h"
24
24
#include " paddle/fluid/inference/api/paddle_inference_api.h"
25
+ #include " paddle/fluid/inference/api/timer.h"
26
+ #include " utils/logger/logger.h"
25
27
26
28
DEFINE_string (model, " " , " Directory of the inference model." );
27
29
DEFINE_string (datapath, " " , " Path of the dataset." );
28
30
DEFINE_int32 (batch_size, 1 , " batch size." );
29
31
DEFINE_int32 (repeat, 1 , " Running the inference program repeat times." );
30
32
31
- // Timer for timer
32
- class Timer {
33
- public:
34
- double start;
35
- double startu;
36
- void tic () {
37
- struct timeval tp;
38
- gettimeofday (&tp, NULL );
39
- start = tp.tv_sec ;
40
- startu = tp.tv_usec ;
41
- }
42
- double toc () {
43
- struct timeval tp;
44
- gettimeofday (&tp, NULL );
45
- double used_time_ms =
46
- (tp.tv_sec - start) * 1000.0 + (tp.tv_usec - startu) / 1000.0 ;
47
- return used_time_ms;
48
- }
49
- };
50
-
51
- std::vector<std::string> string_split (std::string in_str,
52
- std::string delimiter) {
53
- std::vector<std::string> seq;
54
- int found = in_str.find (delimiter);
55
- int pre_found = -1 ;
56
- while (found != std::string::npos) {
57
- if (pre_found == -1 ) {
58
- seq.push_back (in_str.substr (0 , found));
59
- } else {
60
- seq.push_back (in_str.substr (pre_found + delimiter.length (),
61
- found - delimiter.length () - pre_found));
62
- }
63
- pre_found = found;
64
- found = in_str.find (delimiter, pre_found + delimiter.length ());
65
- }
66
- seq.push_back (
67
- in_str.substr (pre_found + 1 , in_str.length () - (pre_found + 1 )));
68
- return seq;
69
- }
70
- std::vector<std::string> string_split (
71
- std::string in_str, std::vector<std::string>& delimiter) { // NOLINT
72
- std::vector<std::string> in;
73
- std::vector<std::string> out;
74
- out.push_back (in_str);
75
- for (auto del : delimiter) {
76
- in = out;
77
- out.clear ();
78
- for (auto s : in) {
79
- auto out_s = string_split (s, del);
80
- for (auto o : out_s) {
81
- out.push_back (o);
82
- }
83
- }
84
- }
85
- return out;
86
- }
87
-
88
33
class Data {
89
34
public:
90
35
Data (std::string file_name, int batch_size)
@@ -120,36 +65,24 @@ void Data::get_batch_data(
120
65
week_fea.clear ();
121
66
time_fea.clear ();
122
67
while (_file.getline (buf, 10000 )) {
123
- std::string s = buf;
124
- std::vector<std::string> deli_vec = {" :" };
125
- std::vector<std::string> data_vec = string_split (s, deli_vec);
68
+ std::vector<std::string> data_vec;
69
+ paddle::inference::split (buf, ' :' , &data_vec);
126
70
127
71
std::vector<std::string> seq;
128
- seq = string_split (data_vec[0 ], { " | " } );
72
+ paddle::inference::split (data_vec[0 ], ' | ' , &seq );
129
73
130
74
for (auto link : seq) {
131
- std::vector<std::string> data = string_split (link, " ," );
132
75
std::vector<float > vec;
133
- for (int i = 0 ; i < data.size (); i++) {
134
- vec.push_back (atof (data[i].c_str ()));
135
- }
76
+ paddle::inference::split_to_float (link, ' ,' , &vec);
136
77
fea.push_back (vec);
137
78
}
138
- std::vector<std::string> week_data;
139
- std::vector<std::string> time_data;
140
79
141
- week_data = string_split (data_vec[2 ], " ," );
142
80
std::vector<float > vec_w;
143
- for (int i = 0 ; i < week_data.size (); i++) {
144
- vec_w.push_back (atof (week_data[i].c_str ()));
145
- }
81
+ paddle::inference::split_to_float (data_vec[2 ], ' ,' , &vec_w);
146
82
week_fea.push_back (vec_w);
147
83
148
- time_data = string_split (data_vec[1 ], " ," );
149
84
std::vector<float > vec_t ;
150
- for (int i = 0 ; i < time_data.size (); i++) {
151
- vec_t .push_back (atof (time_data[i].c_str ()));
152
- }
85
+ paddle::inference::split_to_float (data_vec[1 ], ' ,' , &vec_t );
153
86
time_fea.push_back (vec_t );
154
87
155
88
cum += seq.size ();
@@ -275,14 +208,13 @@ void single_test() {
275
208
inputs.push_back (tensor_2);
276
209
inputs.push_back (tensor_0);
277
210
278
- Timer timer;
211
+ paddle::inference:: Timer timer;
279
212
timer.tic ();
280
213
for (int i = 0 ; i < FLAGS_repeat; i++) predictor->Run (inputs, &outputs);
281
214
282
- LOG (INFO) << " batch_size = " << FLAGS_batch_size
283
- << " , repeat = " << FLAGS_repeat
284
- << " , sequence_length = " << seq_offset[seq_offset.size () - 1 ]
285
- << " , latency: " << timer.toc () / FLAGS_repeat << " ms" ;
215
+ paddle::inference::PrintTime (FLAGS_batch_size, FLAGS_repeat, 1 , 0 ,
216
+ timer.toc () / FLAGS_repeat);
217
+ LOG (INFO) << " sequence_length = " << seq_offset[seq_offset.size () - 1 ];
286
218
287
219
float * data_o = static_cast <float *>(outputs[0 ].data .data ());
288
220
VLOG (3 ) << " outputs[0].data.length() = " << outputs[0 ].data .length ();
0 commit comments