Skip to content

Commit e071f8a

Browse files
authored
Extend AOD creation, extend analysis tools, Improve FTOF writing to AODs (#71)
* Computing T0 before filling fTOF tree with all FTOF tracks * Add more arguments to the analysis runner * Add possibility to append to an existing production
1 parent 32dde1a commit e071f8a

File tree

3 files changed

+121
-57
lines changed

3 files changed

+121
-57
lines changed

examples/aod/createO2tables.C

Lines changed: 33 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,10 @@ int createO2tables(const char* inputFile = "delphes.root",
206206
// loop over tracks
207207
std::vector<TrackAlice3> tracks_for_vertexing;
208208
std::vector<o2::InteractionRecord> bcData;
209+
// Tracks used for the T0 evaluation
209210
std::vector<Track*> tof_tracks;
211+
std::vector<Track*> ftof_tracks;
212+
std::vector<std::pair<int, int>> ftof_tracks_indices;
210213
const int multiplicity = tracks->GetEntries();
211214

212215
// Build index array of tracks to randomize track writing order
@@ -321,25 +324,8 @@ int createO2tables(const char* inputFile = "delphes.root",
321324

322325
// check if has Forward TOF
323326
if (forward_tof_layer.hasTOF(*track)) {
324-
ftof.fIndexCollisions = ientry + eventOffset;
325-
ftof.fIndexTracks = fTrackCounter; // Index in the Track table
326-
327-
ftof.fFTOFLength = track->L * 0.1; // [cm]
328-
ftof.fFTOFSignal = track->TOuter * 1.e12; // [ps]
329-
330-
std::array<float, 5> deltat, nsigma;
331-
forward_tof_layer.makePID(*track, deltat, nsigma);
332-
ftof.fFTOFDeltaEl = deltat[0];
333-
ftof.fFTOFDeltaMu = deltat[1];
334-
ftof.fFTOFDeltaPi = deltat[2];
335-
ftof.fFTOFDeltaKa = deltat[3];
336-
ftof.fFTOFDeltaPr = deltat[4];
337-
ftof.fFTOFNsigmaEl = nsigma[0];
338-
ftof.fFTOFNsigmaMu = nsigma[1];
339-
ftof.fFTOFNsigmaPi = nsigma[2];
340-
ftof.fFTOFNsigmaKa = nsigma[3];
341-
ftof.fFTOFNsigmaPr = nsigma[4];
342-
FillTree(kFTOF);
327+
ftof_tracks.push_back(track);
328+
ftof_tracks_indices.push_back(std::pair<int, int>{ientry + eventOffset, fTrackCounter});
343329
}
344330

345331
// check if it is within the acceptance of the MID
@@ -362,6 +348,34 @@ int createO2tables(const char* inputFile = "delphes.root",
362348
fTrackCounter++;
363349
// fill histograms
364350
}
351+
352+
// Filling the fTOF tree after computing its T0
353+
std::array<float, 2> ftzero;
354+
355+
forward_tof_layer.eventTime(ftof_tracks, ftzero);
356+
for (int i = 0; i < ftof_tracks.size(); i++) {
357+
auto track = ftof_tracks[i];
358+
ftof.fIndexCollisions = ftof_tracks_indices[i].first;
359+
ftof.fIndexTracks = ftof_tracks_indices[i].second; // Index in the Track table
360+
361+
ftof.fFTOFLength = track->L * 0.1; // [cm]
362+
ftof.fFTOFSignal = track->TOuter * 1.e12; // [ps]
363+
364+
std::array<float, 5> deltat, nsigma;
365+
forward_tof_layer.makePID(*track, deltat, nsigma);
366+
ftof.fFTOFDeltaEl = deltat[0];
367+
ftof.fFTOFDeltaMu = deltat[1];
368+
ftof.fFTOFDeltaPi = deltat[2];
369+
ftof.fFTOFDeltaKa = deltat[3];
370+
ftof.fFTOFDeltaPr = deltat[4];
371+
ftof.fFTOFNsigmaEl = nsigma[0];
372+
ftof.fFTOFNsigmaMu = nsigma[1];
373+
ftof.fFTOFNsigmaPi = nsigma[2];
374+
ftof.fFTOFNsigmaKa = nsigma[3];
375+
ftof.fFTOFNsigmaPr = nsigma[4];
376+
FillTree(kFTOF);
377+
}
378+
365379
if (eventextra.fNentries[kTracks] != eventextra.fNentries[kTracksCov] || eventextra.fNentries[kTracks] != eventextra.fNentries[kTracksExtra]) {
366380
Printf("Issue with the counters");
367381
return 1;

examples/scripts/createO2tables.py

Lines changed: 57 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -98,26 +98,19 @@ def main(configuration_file,
9898
output_path,
9999
clean_delphes_files,
100100
create_luts,
101-
turn_off_vertexing):
101+
turn_off_vertexing,
102+
append_production):
103+
arguments = locals()
102104
global verbose_mode
103105
verbose_mode = verbose
104106
parser = configparser.RawConfigParser()
105107
parser.read(configuration_file)
106108

107109
run_cmd("./clean.sh > /dev/null 2>&1", check_status=False)
108110
# Dictionary of fetched options
109-
running_options = {
110-
"ARG configuration_file": configuration_file,
111-
"ARG config_entry": config_entry,
112-
"ARG njobs": njobs,
113-
"ARG nruns": nruns,
114-
"ARG nevents": nevents,
115-
"ARG verbose": verbose,
116-
"ARG qa": qa,
117-
"ARG output_path": output_path,
118-
"ARG clean_delphes_files": clean_delphes_files,
119-
"ARG create_luts": create_luts
120-
}
111+
running_options = {}
112+
for i in arguments:
113+
running_options["ARG "+i] = arguments[i]
121114

122115
def opt(entry, require=True):
123116
try:
@@ -211,18 +204,19 @@ def check_duplicate(option_name):
211204

212205
# Printing configuration
213206
msg(" --- running createO2tables.py", color=bcolors.HEADER)
214-
msg(" njobs =", njobs)
215-
msg(" nruns =", nruns)
216-
msg(" nevents =", nevents)
217-
msg(" lut path =", lut_path)
207+
msg(" n. jobs =", njobs)
208+
msg(" n. runs =", nruns)
209+
msg(" events per run =", nevents)
210+
msg(" tot. events =", "{:.0e}".format(nevents*nruns))
211+
msg(" LUT path =", f"'{lut_path}'")
218212
msg(" --- with detector configuration", color=bcolors.HEADER)
219-
msg(" bField =", bField, "[kG]")
213+
msg(" B field =", bField, "[kG]")
220214
msg(" sigmaT =", sigmaT, "[ns]")
221215
msg(" sigmaT0 =", sigmaT0, "[ns]")
222-
msg(" barrel_radius =", barrel_radius, "[cm]")
223-
msg(" barrel_half_length =", barrel_half_length, "[cm]")
216+
msg(" Barrel radius =", barrel_radius, "[cm]")
217+
msg(" Barrel half length =", barrel_half_length, "[cm]")
224218
if create_luts:
225-
msg(" minimum_track_radius =", minimum_track_radius, "[cm]")
219+
msg(" Minimum track radius =", minimum_track_radius, "[cm]")
226220
msg(" LUT =", lut_tag)
227221
msg(" etaMax =", etaMax)
228222

@@ -283,8 +277,20 @@ def set_config(config_file, config, value):
283277
"const double tof_sigmat =", f"{sigmaT}""\;/")
284278
set_config("createO2tables.C",
285279
"const double tof_sigmat0 =", f"{sigmaT0}""\;/")
286-
287280
run_list = range(nruns)
281+
if append_production:
282+
if output_path is None:
283+
fatal_msg("Output path is not defined, cannot append")
284+
last_preexisting_aod = [each for each in os.listdir(output_path)
285+
if each.endswith('.root') and "AODRun5" in each]
286+
if len(last_preexisting_aod) == 0:
287+
fatal_msg("Appending to a non existing production")
288+
last_preexisting_aod = sorted([int(each.replace("AODRun5.", "").replace(".root", ""))
289+
for each in last_preexisting_aod])[-1] + 1
290+
msg(f" Appending to production with {last_preexisting_aod} AODs",
291+
color=bcolors.BWARNING)
292+
run_list = range(last_preexisting_aod,
293+
last_preexisting_aod + nruns)
288294

289295
def configure_run(run_number):
290296
# Create executable that runs Generation, Delphes and analysis
@@ -422,11 +428,28 @@ def write_config(entry, prefix=""):
422428
if "ARG" not in i:
423429
write_config(i, prefix=" * ")
424430

431+
output_size = sum(os.path.getsize(os.path.join(output_path, f))
432+
for f in os.listdir(output_path)
433+
if os.path.isfile(os.path.join(output_path, f)))
434+
f.write("\n## Size of the ouput ##\n")
435+
f.write(f" - {output_size} bytes\n")
436+
f.write(f" - {output_size/1e6} MB\n")
437+
f.write(f" - {output_size/1e9} GB\n")
425438
run_cmd("echo >> " + summaryfile)
426439
run_cmd("echo + DelphesO2 Version + >> " + summaryfile)
427440
run_cmd("git rev-parse HEAD >> " + summaryfile, check_status=False)
441+
428442
if os.path.normpath(output_path) != os.getcwd():
429-
run_cmd(f"mv {summaryfile} {output_path}")
443+
if append_production:
444+
s = os.path.join(output_path, summaryfile)
445+
run_cmd(f"echo '' >> {s}")
446+
run_cmd(f"echo ' **' >> {s}")
447+
run_cmd(f"echo 'Appended production' >> {s}")
448+
run_cmd(f"echo ' **' >> {s}")
449+
run_cmd(f"echo '' >> {s}")
450+
run_cmd(f"cat {summaryfile} >> {s}")
451+
else:
452+
run_cmd(f"mv {summaryfile} {output_path}")
430453

431454
if qa:
432455
msg(" --- running test analysis", color=bcolors.HEADER)
@@ -441,7 +464,7 @@ def write_config(entry, prefix=""):
441464
parser.add_argument("--entry", "-e", type=str,
442465
default="DEFAULT",
443466
help="Entry in the configuration file, e.g. the INEL or CCBAR entries in the configuration file.")
444-
parser.add_argument("--output-path", "-o", type=str,
467+
parser.add_argument("--output-path", "--output_path", "-o", type=str,
445468
default=None,
446469
help="Output path, by default the current path is used as output.")
447470
parser.add_argument("--njobs", "-j", type=int,
@@ -463,10 +486,18 @@ def write_config(entry, prefix=""):
463486
parser.add_argument("--no-vertexing",
464487
action="store_true",
465488
help="Option turning off the vertexing.")
489+
parser.add_argument("--append", "-a",
490+
action="store_true",
491+
help="Option to append the results instead of starting over by shifting the AOD indexing. N.B. the user is responsible of the compatibility between appended AODs. Only works in conjuction by specifying an output path (option '-o')")
466492
parser.add_argument("--use-preexisting-luts", "-l",
467493
action="store_true",
468494
help="Option to use preexisting LUTs instead of creating new ones, in this case LUTs with the requested tag are fetched from the LUT path. By default new LUTs are created at each run.")
469495
args = parser.parse_args()
496+
# Check arguments
497+
if args.append and args.output_path is None:
498+
fatal_msg(
499+
"Asked to append production but did not specify output path (option '-o')")
500+
470501
main(configuration_file=args.configuration_file,
471502
config_entry=args.entry,
472503
njobs=args.njobs,
@@ -477,4 +508,5 @@ def write_config(entry, prefix=""):
477508
clean_delphes_files=args.clean_delphes,
478509
qa=args.qa,
479510
create_luts=not args.use_preexisting_luts,
480-
turn_off_vertexing=args.no_vertexing)
511+
turn_off_vertexing=args.no_vertexing,
512+
append_production=args.append)

examples/scripts/diagnostic_tools/doanalysis.py

Lines changed: 31 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -168,16 +168,24 @@ def main(mode,
168168
dpl_configuration_file=None,
169169
njobs=1,
170170
merge_output=False,
171-
only_merge=False):
171+
merge_only=False,
172+
shm_mem_size=16000000000,
173+
readers=1,
174+
extra_arguments=""):
172175
if len(input_file) == 1:
173176
input_file = input_file[0]
174177
else:
175178
input_file = input_file[0:n_max_files]
176-
msg("Running", f"'{mode}'", "analysis on",
177-
f"'{input_file}'", color=bcolors.BOKBLUE)
178-
msg("Maximum", n_max_files, "files with batch size",
179-
batch_size, "and", njobs, "jobs" if njobs > 1 else "job", color=bcolors.BOKBLUE)
180-
args = f"-b --shm-segment-size 16000000000 --readers 4"
179+
if not merge_only:
180+
msg("Running", f"'{mode}'", "analysis on",
181+
f"'{input_file}'", color=bcolors.BOKBLUE)
182+
msg("Maximum", n_max_files, "files with batch size",
183+
batch_size, "and", njobs, "jobs" if njobs > 1 else "job", color=bcolors.BOKBLUE)
184+
else:
185+
msg("Merging output of", f"'{mode}'",
186+
"analysis", color=bcolors.BOKBLUE)
187+
o2_arguments = f"-b --shm-segment-size {shm_mem_size} --readers {readers}"
188+
o2_arguments += extra_arguments
181189
if mode not in analyses:
182190
raise ValueError("Did not find analyses matching mode",
183191
mode, ", please choose in", ", ".join(analyses.keys()))
@@ -229,16 +237,16 @@ def build_list_of_files(file_list):
229237
run_list = []
230238
for i, j in enumerate(input_file_list):
231239
run_list.append(set_o2_analysis(an,
232-
o2_arguments=args,
240+
o2_arguments=o2_arguments,
233241
input_file=j,
234242
tag=tag,
235243
dpl_configuration_file=dpl_configuration_file))
236-
if not only_merge:
244+
if not merge_only:
237245
with multiprocessing.Pool(processes=njobs) as pool:
238246
pool.map(run_o2_analysis, run_list)
239-
msg("Analysis completed", color=bcolors.BOKGREEN)
240247

241-
if merge_output or only_merge:
248+
if merge_output or merge_only:
249+
msg("Merging results", color=bcolors.BOKBLUE)
242250
files_to_merge = []
243251
for i in input_file_list:
244252
p = os.path.dirname(os.path.abspath(i))
@@ -304,10 +312,17 @@ def build_list_of_files(file_list):
304312
help="Name of the dpl configuration file e.g. dpl-config_std.json")
305313
parser.add_argument("--merge_output", "--merge-output", "--merge",
306314
action="store_true", help="Flag to merge the output files into one")
315+
parser.add_argument("--readers", "-r",
316+
default=1, type=int,
317+
help="Number of parallel readers")
318+
parser.add_argument("--mem", "-m",
319+
default=16000000000, type=int,
320+
help="Size of the shared memory to allocate")
321+
parser.add_argument("--extra_arguments", "-e",
322+
default="", type=str,
323+
help="Extra arguments to feed to the workflow")
307324
parser.add_argument("--merge_only", "--merge-only", "--mergeonly",
308325
action="store_true", help="Flag avoid running the analysis and to merge the output files into one")
309-
parser.add_argument("-b",
310-
action="store_true", help="Background mode")
311326
args = parser.parse_args()
312327
if args.verbose:
313328
verbose_mode = False,
@@ -322,4 +337,7 @@ def build_list_of_files(file_list):
322337
out_tag=args.tag,
323338
merge_output=args.merge_output,
324339
out_path=args.out_path,
325-
only_merge=args.merge_only)
340+
merge_only=args.merge_only,
341+
readers=args.readers,
342+
extra_arguments=args.extra_arguments,
343+
shm_mem_size=args.mem)

0 commit comments

Comments
 (0)