Skip to content

Commit fab02ef

Browse files
committed
remove performance module tests and refactor utility includes
1 parent 055a675 commit fab02ef

File tree

17 files changed

+531
-830
lines changed

17 files changed

+531
-830
lines changed

.gitmodules

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,3 +13,6 @@
1313
[submodule "3rdparty/libenvpp"]
1414
path = 3rdparty/libenvpp
1515
url = https://github.com/ph3at/libenvpp
16+
[submodule "3rdparty/benchmark"]
17+
path = 3rdparty/benchmark
18+
url = https://github.com/google/benchmark

3rdparty/benchmark

Submodule benchmark added at 27fc2bf

CMakeLists.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ endforeach()
4343
######################### External projects #########################
4444

4545
message( STATUS "PPC step: Setup external projects" )
46+
include(cmake/benchmark.cmake)
4647
include(cmake/gtest.cmake)
4748

4849
############################## Modules ##############################

cmake/benchmark.cmake

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
include_guard()
2+
3+
include(ExternalProject)
4+
5+
ExternalProject_Add(
6+
ppc_benchmark
7+
SOURCE_DIR "${CMAKE_SOURCE_DIR}/3rdparty/benchmark"
8+
PREFIX "${CMAKE_CURRENT_BINARY_DIR}/ppc_benchmark"
9+
BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}/ppc_benchmark/build"
10+
INSTALL_DIR "${CMAKE_CURRENT_BINARY_DIR}/ppc_benchmark/install"
11+
CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
12+
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
13+
-DCMAKE_C_COMPILER_LAUNCHER=${CMAKE_C_COMPILER_LAUNCHER}
14+
-DCMAKE_CXX_COMPILER_LAUNCHER=${CMAKE_CXX_COMPILER_LAUNCHER}
15+
-DCMAKE_CXX_STANDARD=${CMAKE_CXX_STANDARD}
16+
-DCMAKE_CXX_STANDARD_REQUIRED=${CMAKE_CXX_STANDARD_REQUIRED}
17+
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
18+
-DBENCHMARK_ENABLE_TESTING=OFF
19+
-DBENCHMARK_ENABLE_GTEST_TESTS=OFF
20+
-DBENCHMARK_ENABLE_INSTALL=ON
21+
-DBENCHMARK_ENABLE_LIBPFM=OFF
22+
BUILD_COMMAND
23+
"${CMAKE_COMMAND}" --build "${CMAKE_CURRENT_BINARY_DIR}/ppc_benchmark/build"
24+
--config $<CONFIG> --parallel
25+
INSTALL_COMMAND
26+
"${CMAKE_COMMAND}" --install
27+
"${CMAKE_CURRENT_BINARY_DIR}/ppc_benchmark/build" --config $<CONFIG>
28+
--prefix "${CMAKE_CURRENT_BINARY_DIR}/ppc_benchmark/install")
29+
30+
function(ppc_link_benchmark target)
31+
target_include_directories(
32+
${target} PUBLIC ${CMAKE_SOURCE_DIR}/3rdparty/benchmark/include)
33+
add_dependencies(${target} ppc_benchmark)
34+
target_link_directories(${target} PUBLIC
35+
"${CMAKE_BINARY_DIR}/ppc_benchmark/install/lib")
36+
target_link_libraries(${target} PUBLIC benchmark)
37+
endfunction()
Lines changed: 1 addition & 129 deletions
Original file line numberDiff line numberDiff line change
@@ -1,133 +1,5 @@
11
#pragma once
22

3-
#include <cstdint>
4-
#include <functional>
5-
#include <iomanip>
6-
#include <iostream>
7-
#include <memory>
8-
#include <sstream>
9-
#include <stdexcept>
10-
#include <string>
11-
12-
#include "task/include/task.hpp"
13-
#include "util/include/util.hpp"
14-
153
namespace ppc::performance {
16-
17-
inline double DefaultTimer() {
18-
return -1.0;
19-
}
20-
21-
struct PerfAttr {
22-
/// @brief Number of times the task is run for performance evaluation.
23-
uint64_t num_running = 5;
24-
/// @brief Timer function returning current time in seconds.
25-
/// @cond
26-
std::function<double()> current_timer = DefaultTimer;
27-
/// @endcond
28-
};
29-
30-
struct PerfResults {
31-
/// @brief Measured execution time in seconds.
32-
double time_sec = 0.0;
33-
enum class TypeOfRunning : uint8_t {
34-
kPipeline,
35-
kTaskRun,
36-
kNone,
37-
};
38-
TypeOfRunning type_of_running = TypeOfRunning::kNone;
39-
constexpr static double kMaxTime = 10.0;
40-
};
41-
42-
template <typename InType, typename OutType>
43-
class Perf {
44-
public:
45-
// Init performance analysis with an initialized task and initialized data
46-
explicit Perf(const ppc::task::TaskPtr<InType, OutType> &task_ptr) : task_(task_ptr) {
47-
task_ptr->GetStateOfTesting() = ppc::task::StateOfTesting::kPerf;
48-
}
49-
// Check performance of full task's pipeline: PreProcessing() ->
50-
// Validation() -> Run() -> PostProcessing()
51-
void PipelineRun(const PerfAttr &perf_attr) {
52-
perf_results_.type_of_running = PerfResults::TypeOfRunning::kPipeline;
53-
54-
CommonRun(perf_attr, [&] {
55-
task_->Validation();
56-
task_->PreProcessing();
57-
task_->Run();
58-
task_->PostProcessing();
59-
}, perf_results_);
60-
}
61-
// Check performance of task's Run() function
62-
void TaskRun(const PerfAttr &perf_attr) {
63-
perf_results_.type_of_running = PerfResults::TypeOfRunning::kTaskRun;
64-
65-
task_->Validation();
66-
task_->PreProcessing();
67-
CommonRun(perf_attr, [&] { task_->Run(); }, perf_results_);
68-
task_->PostProcessing();
69-
70-
task_->Validation();
71-
task_->PreProcessing();
72-
task_->Run();
73-
task_->PostProcessing();
74-
}
75-
// Print results for automation checkers
76-
void PrintPerfStatistic(const std::string &test_id) const {
77-
std::string type_test_name;
78-
if (perf_results_.type_of_running == PerfResults::TypeOfRunning::kTaskRun) {
79-
type_test_name = "task_run";
80-
} else if (perf_results_.type_of_running == PerfResults::TypeOfRunning::kPipeline) {
81-
type_test_name = "pipeline";
82-
} else {
83-
std::stringstream err_msg;
84-
err_msg << '\n' << "The type of performance check for the task was not selected.\n";
85-
throw std::runtime_error(err_msg.str().c_str());
86-
}
87-
88-
auto time_secs = perf_results_.time_sec;
89-
const auto max_time = ppc::util::GetPerfMaxTime();
90-
std::stringstream perf_res_str;
91-
if (time_secs < max_time) {
92-
perf_res_str << std::fixed << std::setprecision(10) << time_secs;
93-
std::cout << test_id << ":" << type_test_name << ":" << perf_res_str.str() << '\n';
94-
} else {
95-
std::stringstream err_msg;
96-
err_msg << '\n' << "Task execute time need to be: ";
97-
err_msg << "time < " << max_time << " secs." << '\n';
98-
err_msg << "Original time in secs: " << time_secs << '\n';
99-
perf_res_str << std::fixed << std::setprecision(10) << -1.0;
100-
std::cout << test_id << ":" << type_test_name << ":" << perf_res_str.str() << '\n';
101-
throw std::runtime_error(err_msg.str().c_str());
102-
}
103-
}
104-
/// @brief Retrieves the performance test results.
105-
/// @return The latest PerfResults structure.
106-
[[nodiscard]] PerfResults GetPerfResults() const {
107-
return perf_results_;
108-
}
109-
110-
private:
111-
PerfResults perf_results_;
112-
std::shared_ptr<ppc::task::Task<InType, OutType>> task_;
113-
static void CommonRun(const PerfAttr &perf_attr, const std::function<void()> &pipeline, PerfResults &perf_results) {
114-
auto begin = perf_attr.current_timer();
115-
for (uint64_t i = 0; i < perf_attr.num_running; i++) {
116-
pipeline();
117-
}
118-
auto end = perf_attr.current_timer();
119-
perf_results.time_sec = (end - begin) / static_cast<double>(perf_attr.num_running);
120-
}
121-
};
122-
123-
inline std::string GetStringParamName(PerfResults::TypeOfRunning type_of_running) {
124-
if (type_of_running == PerfResults::TypeOfRunning::kTaskRun) {
125-
return "task_run";
126-
}
127-
if (type_of_running == PerfResults::TypeOfRunning::kPipeline) {
128-
return "pipeline";
129-
}
130-
return "none";
4+
inline constexpr const char kTaskRunName[] = "task_run";
1315
}
132-
133-
} // namespace ppc::performance

0 commit comments

Comments
 (0)