|
1 | | -int zero = 0; |
2 | | -QApplication app = QApplication(zero, nullptr); |
3 | | -QWidget* active_window = QApplication::activeWindow(); |
| 1 | +#include <Window/file_pick_table_widget.h> |
| 2 | +#include <Window/main_window.h> |
4 | 3 |
|
5 | | -void fixed_length_contiguous(const Trajectory& trajectory) |
6 | | -{ |
7 | | - ankerl::nanobench::doNotOptimizeAway(trajectory.get_hotspot_fixed_length_contiguous(5)); |
8 | | -} |
| 4 | +std::string out_path = std::filesystem::current_path().string() + "\\templates\\"; |
9 | 5 |
|
| 6 | +/// \brief Generates an output file of the benchmark data of the given type. |
| 7 | +/// \param path path to the output file |
| 8 | +/// \param type_name name of the output file type |
| 9 | +/// \param mustache_template mustache template for the output file |
| 10 | +/// \param bench benchmark data |
| 11 | +/// \param template_path path to the mustache template file |
10 | 12 | void generate_file_type(const std::string& path, const std::string& type_name, const char* mustache_template, |
11 | 13 | ankerl::nanobench::Bench const& bench, std::filesystem::path template_path) |
12 | 14 | { |
13 | 15 | template_path = std::filesystem::path(path).append("mustache-template." + type_name); |
14 | 16 |
|
| 17 | + if (!parser::dir_exists(path)) |
| 18 | + std::filesystem::create_directory(path); |
| 19 | + |
15 | 20 | if (!parser::file_exists(template_path.string())) |
16 | | - throw std::runtime_error("Mustache template file does not exist"); |
| 21 | + //make new file |
| 22 | + { |
| 23 | + std::ofstream template_file(template_path.string()); |
| 24 | + template_file << mustache_template; |
| 25 | + template_file.close(); |
| 26 | + } |
17 | 27 |
|
18 | 28 | const std::string mustache_template_string = parser::get_file_raw(template_path.string()); |
19 | 29 |
|
20 | | - std::cout << "Generating " << type_name << " file..." << std::endl; |
21 | | - std::cout << mustache_template_string << std::endl; |
22 | 30 | std::ofstream render_out(path + bench.title() + "-visualization." + type_name); |
23 | 31 |
|
24 | | - ankerl::nanobench::render(template_path.string().c_str(), bench, render_out); |
| 32 | + ankerl::nanobench::render(mustache_template_string.c_str(), bench, render_out); |
25 | 33 | } |
26 | 34 |
|
27 | | -void generate_file_type(const std::string& path, const std::string& type_name, const char* mustache_template, |
28 | | - ankerl::nanobench::Bench const& bench) |
| 35 | +/// \brief Callback function for the benchmarking tool when a file is picked |
| 36 | +static inline file_pick_table_widget::t_file_pick_callback benchmark_callback = [&](const std::string& file_path) |
29 | 37 | { |
30 | | - generate_file_type(path, type_name, mustache_template, bench, parser::current_path_string); |
31 | | -} |
| 38 | + const std::vector<parser::parsed_trajectory> trajectories = parser::parse_config(file_path, ' '); |
32 | 39 |
|
33 | | -int main(const int argc, const char* argv[]) |
34 | | -{ |
35 | | - if (argc < 2) |
| 40 | + ankerl::nanobench::Bench bench; |
| 41 | + |
| 42 | + bench.performanceCounters(true); |
| 43 | + bench.title("benchmark"); |
| 44 | + |
| 45 | + std::cout << "Benchmarking..." << std::endl; |
| 46 | + |
| 47 | + for (const auto& parsed_trajectory : trajectories) |
36 | 48 | { |
37 | | - std::cerr << "Please provide a file path to the config file" << std::endl; |
38 | | - return 0; |
| 49 | + bench.warmup(10).minEpochIterations(100).run(parsed_trajectory.name, [&] |
| 50 | + { |
| 51 | + ankerl::nanobench::doNotOptimizeAway(parsed_trajectory.run_trajectory_function()); |
| 52 | + }); |
39 | 53 | } |
40 | | - const std::string file_path = argv[1]; |
41 | | - const std::string out_path = argc > 2 ? argv[2] : std::filesystem::current_path().string(); |
42 | | - try |
43 | | - { |
44 | | - const std::vector<parser::parsed_trajectory> trajectories = parser::parse_config(file_path, ' '); |
45 | 54 |
|
46 | | - ankerl::nanobench::Bench bench; |
| 55 | + generate_file_type(out_path, "json", ankerl::nanobench::templates::json(), bench, out_path); |
47 | 56 |
|
48 | | - bench.performanceCounters(true); |
49 | | - bench.title("benchmark"); |
| 57 | + generate_file_type(out_path, "html", ankerl::nanobench::templates::htmlBoxplot(), bench, out_path); |
50 | 58 |
|
51 | | - std::cout << "Benchmarking..." << std::endl; |
52 | | - for (const auto& parsed_trajectory : trajectories) |
53 | | - { |
54 | | - bench.warmup(10).minEpochIterations(100).run(parsed_trajectory.name, [&] |
55 | | - { |
56 | | - ankerl::nanobench::doNotOptimizeAway(parsed_trajectory.run_trajectory_function()); |
57 | | - }); |
58 | | - } |
| 59 | + generate_file_type(out_path, "csv", ankerl::nanobench::templates::csv(), bench, out_path); |
59 | 60 |
|
60 | | - generate_file_type(out_path, "json", ankerl::nanobench::templates::json(), bench, out_path); |
| 61 | + std::cout << "Output written to: " << out_path << std::endl; |
| 62 | +}; |
61 | 63 |
|
62 | | - generate_file_type(out_path, "html", ankerl::nanobench::templates::htmlBoxplot(), bench, out_path); |
| 64 | +int main(int argc, char* argv[]) |
| 65 | +{ |
| 66 | + Float::fixed_epsilon = 0.01f; |
| 67 | + if (argc > 2) |
| 68 | + { |
| 69 | + std::cerr << "Too many arguments provided" << std::endl; |
| 70 | + return 0; |
| 71 | + } |
63 | 72 |
|
64 | | - generate_file_type(out_path, "csv", ankerl::nanobench::templates::csv(), bench, out_path); |
| 73 | + if (argc == 2) |
| 74 | + out_path = parser::dir_exists(parser::current_path_string + "\\" + argv[1]) |
| 75 | + ? parser::current_path_string + "\\" + argv[1] |
| 76 | + : argv[1]; |
| 77 | + |
| 78 | + try |
| 79 | + { |
| 80 | + QApplication app(argc, nullptr); |
| 81 | + QWidget* active_window = QApplication::activeWindow(); |
| 82 | + main_window main_window(active_window, benchmark_callback, {"Benchmark", "Benchmarking tool"}, "Help"); |
| 83 | + main_window.show(); |
| 84 | + |
| 85 | + app.exec(); |
| 86 | + } |
| 87 | + catch (const parser::parsing_error& e) |
| 88 | + { |
| 89 | + std::cerr << "Something went wrong during the benchmark: " << e.what() << std::endl; |
65 | 90 | } |
66 | | - catch (const std::exception& e) |
| 91 | + catch (...) |
67 | 92 | { |
68 | | - std::cout << e.what() << std::endl; |
69 | | - return 1; |
| 93 | + std::cerr << "Something went wrong during the benchmark." << std::endl; |
70 | 94 | } |
71 | 95 | return 0; |
72 | 96 | } |
0 commit comments