Skip to content

Commit 05abafc

Browse files
Abhigyan Acherjeeguitargeek
authored andcommitted
[RF] Added benchmark plot scripts and update RooFitBinned benchmarks
1 parent 1fb0396 commit 05abafc

File tree

4 files changed

+138
-49
lines changed

4 files changed

+138
-49
lines changed

root/roofit/roofit/CMakeLists.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ if(cuda)
1919
endif()
2020

2121
file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/benchRooFitBackends_make_plot.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
22+
file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/compare_benchmarks.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
23+
file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/run_benchmarks.sh DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
2224

2325
RB_ADD_GBENCHMARK(benchCodeSquashAD
2426
benchCodeSquashAD.cxx

root/roofit/roofit/RooFitBinnedBenchmarks.cxx

Lines changed: 57 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -19,25 +19,27 @@ using namespace RooStats;
1919
using namespace HistFactory;
2020

2121
namespace {
22-
constexpr bool verbose = false;
22+
constexpr bool verbose = false;
2323

24-
// test matrix configuration
25-
const std::vector<int> nChannelsVector = {1, 2, 3};
26-
const std::vector<int> nBinsVector {5, 10, 15};
27-
const int nBinsForChannelScan = 10;
28-
const int nChannelsForBinScan = 1;
29-
const std::vector<int> nCPUVector {1, 2, 3};
24+
// test matrix configuration
25+
const std::vector<int> nChannelsVector = {1, 2, 3};
26+
const std::vector<int> nBinsVector{5, 10, 15};
27+
const int nBinsForChannelScan = 10;
28+
const int nChannelsForBinScan = 1;
29+
const std::vector<int> nCPUVector{1};
3030

31-
constexpr auto evalBackend = RooFit::EvalBackend::Value::Cpu;
31+
////default evaluation backend
32+
std::string evalBackend = "cpu";
3233

33-
auto const timeUnit = benchmark::kMillisecond;
34+
auto const timeUnit = benchmark::kMillisecond;
3435

35-
void setupRooMsgService() {
36-
RooMsgService::instance().setGlobalKillBelow(RooFit::FATAL);
37-
RooMsgService::instance().getStream(1).removeTopic(RooFit::Minimization);
38-
RooMsgService::instance().getStream(1).removeTopic(RooFit::NumIntegration);
39-
RooMsgService::instance().getStream(1).removeTopic(RooFit::Eval);
40-
}
36+
void setupRooMsgService()
37+
{
38+
RooMsgService::instance().setGlobalKillBelow(RooFit::FATAL);
39+
RooMsgService::instance().getStream(1).removeTopic(RooFit::Minimization);
40+
RooMsgService::instance().getStream(1).removeTopic(RooFit::NumIntegration);
41+
RooMsgService::instance().getStream(1).removeTopic(RooFit::Eval);
42+
}
4143

4244
} // namespace
4345

@@ -48,9 +50,9 @@ Sample addVariations(Sample asample, int nnps, bool channel_crosstalk, int chann
4850
Double_t random = R->Rndm();
4951
double uncertainty_up = (1 + random) / sqrt(100);
5052
double uncertainty_down = (1 - random) / sqrt(100);
51-
if(verbose) {
52-
std::cout << "in channel " << channel << "nuisance +/- [" << uncertainty_up << "," << uncertainty_down << "]"
53-
<< std::endl;
53+
if (verbose) {
54+
std::cout << "in channel " << channel << "nuisance +/- [" << uncertainty_up << "," << uncertainty_down << "]"
55+
<< std::endl;
5456
}
5557
std::string nuis_name = "norm_uncertainty_" + std::to_string(nuis);
5658
if (!channel_crosstalk) {
@@ -101,8 +103,8 @@ std::unique_ptr<RooStats::HistFactory::Channel> makeChannel(int channel, int nbi
101103

102104
void buildBinnedTest(int n_channels = 1, int nbins = 10, int nnps = 1, const char *name_rootfile = "")
103105
{
104-
if(verbose) {
105-
std::cout << "in build binned test with output" << name_rootfile << std::endl;
106+
if (verbose) {
107+
std::cout << "in build binned test with output" << name_rootfile << std::endl;
106108
}
107109
Measurement meas("meas", "meas");
108110
meas.SetPOI("SignalStrength");
@@ -121,19 +123,20 @@ void buildBinnedTest(int n_channels = 1, int nbins = 10, int nnps = 1, const cha
121123
} else {
122124
ws = std::unique_ptr<RooWorkspace>{hist2workspace.MakeCombinedModel(meas)};
123125
}
124-
for (RooAbsArg * arg : ws->components()) {
126+
for (RooAbsArg *arg : ws->components()) {
125127
if (arg->IsA() == RooRealSumPdf::Class()) {
126128
arg->setAttribute("BinnedLikelihood");
127-
if(verbose) std::cout << "component " << arg->GetName() << " is a binned likelihood" << std::endl;
129+
if (verbose)
130+
std::cout << "component " << arg->GetName() << " is a binned likelihood" << std::endl;
128131
}
129132
}
130133
ws->SetName("BinnedWorkspace");
131134
ws->writeToFile(name_rootfile);
132135
}
133136

134-
//############## End of Base Algorithms ##############################
135-
//####################################################################
136-
//############## Start Of # Tests #############################
137+
// ############## End of Base Algorithms ##############################
138+
// ####################################################################
139+
// ############## Start Of # Tests #############################
137140

138141
static void BM_RooFit_BinnedTestMigrad(benchmark::State &state)
139142
{
@@ -145,7 +148,8 @@ static void BM_RooFit_BinnedTestMigrad(benchmark::State &state)
145148
auto infile = std::make_unique<TFile>("workspace.root", "RECREATE");
146149
// if (infile->IsZombie()) {
147150
buildBinnedTest(chan, nbins, 2, "workspace.root");
148-
if(verbose) std::cout << "Workspace for tests was created!" << std::endl;
151+
if (verbose)
152+
std::cout << "Workspace for tests was created!" << std::endl;
149153
//}
150154
infile.reset(TFile::Open("workspace.root"));
151155
RooWorkspace *w = static_cast<RooWorkspace *>(infile->Get("BinnedWorkspace"));
@@ -174,7 +178,8 @@ static void BM_RooFit_BinnedTestHesse(benchmark::State &state)
174178
TFile *infile = new TFile("workspace.root");
175179
// if (infile->IsZombie()) {
176180
buildBinnedTest(chan, nbins, 2, "workspace.root");
177-
if(verbose) std::cout << "Workspace for tests was created!" << std::endl;
181+
if (verbose)
182+
std::cout << "Workspace for tests was created!" << std::endl;
178183
// }
179184
infile = TFile::Open("workspace.root");
180185
RooWorkspace *w = static_cast<RooWorkspace *>(infile->Get("BinnedWorkspace"));
@@ -211,7 +216,8 @@ static void BM_RooFit_BinnedTestMinos(benchmark::State &state)
211216
auto infile = std::make_unique<TFile>("workspace.root");
212217
// if (infile->IsZombie()) {
213218
buildBinnedTest(chan, nbins, 2, "workspace.root");
214-
if(verbose) std::cout << "Workspace for tests was created!" << std::endl;
219+
if (verbose)
220+
std::cout << "Workspace for tests was created!" << std::endl;
215221
//}
216222
infile.reset(TFile::Open("workspace.root"));
217223
RooWorkspace *w = static_cast<RooWorkspace *>(infile->Get("BinnedWorkspace"));
@@ -235,11 +241,11 @@ static void BM_RooFit_BinnedTestMinos(benchmark::State &state)
235241
}
236242
}
237243

238-
//############## Run # Tests ###############################
244+
// ############## Run # Tests ###############################
239245

240246
static void ChanArguments(benchmark::internal::Benchmark *b)
241247
{
242-
// channel scan
248+
// channel scan
243249
for (int nChannels : nChannelsVector) {
244250
for (int nCPU : nCPUVector) {
245251
b->Args({nChannels, nBinsForChannelScan, nCPU});
@@ -254,24 +260,26 @@ static void ChanArguments(benchmark::internal::Benchmark *b)
254260
}
255261
}
256262

257-
BENCHMARK(BM_RooFit_BinnedTestMigrad)
258-
->Apply(ChanArguments)
259-
->UseRealTime()
260-
->Unit(timeUnit)
261-
->Iterations(1);
262-
BENCHMARK(BM_RooFit_BinnedTestHesse)
263-
->Apply(ChanArguments)
264-
->UseRealTime()
265-
->Unit(timeUnit)
266-
->Iterations(1);
267-
BENCHMARK(BM_RooFit_BinnedTestMinos)
268-
->Apply(ChanArguments)
269-
->UseRealTime()
270-
->Unit(timeUnit)
271-
->Iterations(1);
263+
BENCHMARK(BM_RooFit_BinnedTestMigrad)->Apply(ChanArguments)->UseRealTime()->Unit(timeUnit)->Iterations(1);
264+
BENCHMARK(BM_RooFit_BinnedTestHesse)->Apply(ChanArguments)->UseRealTime()->Unit(timeUnit)->Iterations(1);
265+
BENCHMARK(BM_RooFit_BinnedTestMinos)->Apply(ChanArguments)->UseRealTime()->Unit(timeUnit)->Iterations(1);
266+
267+
// ############## End Of Tests ########################################
268+
// ####################################################################
269+
// ############## RUN #################################################
270+
271+
int main(int argc, char **argv)
272+
{
272273

273-
//############## End Of Tests ########################################
274-
//####################################################################
275-
//############## RUN #################################################
274+
benchmark::Initialize(&argc, argv);
276275

277-
BENCHMARK_MAIN();
276+
for (int i = 1; i < argc; ++i) {
277+
if (std::string(argv[i]) == "-b") {
278+
if (i + 1 < argc) {
279+
// Set the evalBackend value from the next command-line argument
280+
evalBackend = argv[i + 1];
281+
}
282+
}
283+
}
284+
benchmark::RunSpecifiedBenchmarks();
285+
}
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
import pandas as pd
2+
import csv
3+
import matplotlib.pyplot as plt
4+
import numpy as np
5+
from io import StringIO
6+
7+
8+
def read_csv(csv_file_path):
9+
with open(csv_file_path, "r") as csvfile:
10+
lines = [line for line in csvfile if ("BM_RooFit" in line or "name," in line)]
11+
print("\n".join(lines))
12+
return pd.read_csv(StringIO("\n".join(lines)))
13+
14+
15+
codegen_df = read_csv("out_codegen.csv")
16+
codegen_nograd_df = read_csv("out_codegen_ngrad.csv")
17+
legacy_df = read_csv("out_legacy.csv")
18+
cpu_df = read_csv("out_cpu.csv")
19+
20+
# Plotting
21+
plt.figure(figsize=(10, 6))
22+
23+
x = np.arange(len(codegen_df["name"].unique()))
24+
25+
26+
for i, benchmark in enumerate(codegen_df["name"].unique()):
27+
28+
codegen_time = codegen_df.loc[codegen_df["name"] == benchmark, "real_time"]
29+
codegen_nograd_time = codegen_nograd_df.loc[codegen_nograd_df["name"] == benchmark, "real_time"]
30+
cpu_time = cpu_df.loc[cpu_df["name"] == benchmark, "real_time"]
31+
legacy_time = legacy_df.loc[legacy_df["name"] == benchmark, "real_time"]
32+
33+
plt.bar(x[i] - 0.10, codegen_time, width=0.15, align="center", label="codegen", color="lightblue")
34+
plt.bar(x[i], codegen_nograd_time, width=0.15, align="edge", label="codegen_nograd", color="navy")
35+
plt.bar(x[i] + 0.15, cpu_time, width=0.15, align="edge", label="cpu", color="cyan")
36+
plt.bar(x[i] + 0.30, legacy_time, width=0.15, align="edge", label="legacy", color="gray")
37+
38+
39+
# Customize legend
40+
legend_labels = ["codegen", "codegen_nograd", "cpu", "legacy"]
41+
legend_colors = ["lightblue", "navy", "cyan", "gray"]
42+
legend_handles = [plt.Rectangle((0, 0), 1, 1, color=color) for color in legend_colors]
43+
plt.legend(legend_handles, legend_labels)
44+
45+
plt.yscale("log")
46+
47+
plt.xlabel("Benchmark")
48+
plt.ylabel("Time (milliseconds)")
49+
plt.title("Comparison of Benchmarks for Different Evaluation Backends")
50+
plt.xticks(x, rotation=90)
51+
plt.tight_layout()
52+
plt.savefig("comparision_plot.png")
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
#!/bin/bash
2+
3+
# Function to run a benchmark script and wait until the CSV file is generated
4+
# to make it executable chmod +x run_benchmarks.sh
5+
#then run it using ./run_benchmarks.sh
6+
#!/bin/bash
7+
8+
# Function to run the benchmark command and wait for CSV file to be generated
9+
run_benchmark() {
10+
echo "Running benchmark: $1"
11+
$1 &
12+
local pid=$!
13+
while [ ! -f $2 ]; do
14+
sleep 1
15+
done
16+
wait $pid
17+
echo "CSV file generated: $2"
18+
}
19+
20+
# Run benchmarks
21+
run_benchmark "./benchRooFitBinned -b codegen --benchmark_out_format=csv --benchmark_out=out_codegen.csv" "out_codegen.csv"
22+
run_benchmark "./benchRooFitBinned -b codegen_no_grad --benchmark_out_format=csv --benchmark_out=out_codegen_ngrad.csv" "out_codegen_ngrad.csv"
23+
run_benchmark "./benchRooFitBinned -b legacy --benchmark_out_format=csv --benchmark_out=out_legacy.csv" "out_legacy.csv"
24+
run_benchmark "./benchRooFitBinned -b cpu --benchmark_out_format=csv --benchmark_out=out_cpu.csv" "out_cpu.csv"
25+
26+
# Run Python script
27+
python3 compare_benchmarks.py

0 commit comments

Comments
 (0)