@@ -149,7 +149,7 @@ void TestInference(const std::string& dirname,
149
149
state = paddle::platform::ProfilerState::kCPU ;
150
150
} else {
151
151
#ifdef PADDLE_WITH_CUDA
152
- state = paddle::platform::ProfilerState::kCUDA ;
152
+ state = paddle::platform::ProfilerState::kAll ;
153
153
// The default device_id of paddle::platform::CUDAPlace is 0.
154
154
// Users can get the device_id using:
155
155
// int device_id = place.GetDeviceId();
@@ -172,7 +172,7 @@ void TestInference(const std::string& dirname,
172
172
}
173
173
// Disable the profiler and print the timing information
174
174
paddle::platform::DisableProfiler (paddle::platform::EventSortingKey::kDefault ,
175
- " load_program_profiler.txt " );
175
+ " load_program_profiler" );
176
176
paddle::platform::ResetProfiler ();
177
177
178
178
// 3. Get the feed_target_names and fetch_target_names
@@ -237,7 +237,7 @@ void TestInference(const std::string& dirname,
237
237
// Disable the profiler and print the timing information
238
238
paddle::platform::DisableProfiler (
239
239
paddle::platform::EventSortingKey::kDefault ,
240
- " run_inference_profiler.txt " );
240
+ " run_inference_profiler" );
241
241
paddle::platform::ResetProfiler ();
242
242
}
243
243
0 commit comments