@@ -81,17 +81,31 @@ private static List<String> getAllDatasetNames() {
8181 }
8282
8383 public static void main (String [] args ) throws IOException {
84+ // Add debug info at the very start
85+ System .out .println ("DEBUG: AutoBenchYAML starting execution" );
86+
8487 // Check for --output argument (required for this class)
8588 String outputPath = null ;
8689 for (int i = 0 ; i < args .length - 1 ; i ++) {
8790 if (args [i ].equals ("--output" )) outputPath = args [i +1 ];
8891 }
8992
93+ // Print all arguments for debugging
94+ System .out .println ("DEBUG: Command line arguments:" );
95+ for (int i = 0 ; i < args .length ; i ++) {
96+ System .out .println (" Arg[" + i + "]: " + args [i ]);
97+ }
98+
9099 if (outputPath == null ) {
91100 logger .error ("Error: --output argument is required for AutoBenchYAML" );
101+ System .err .println ("Error: --output argument is required for AutoBenchYAML" );
92102 System .exit (1 );
93103 }
94104
105+ // Force System.out flush to ensure logs are written
106+ System .out .println ("DEBUG: Output path: " + outputPath );
107+ System .out .flush ();
108+
95109 logger .info ("Heap space available is {}" , Runtime .getRuntime ().maxMemory ());
96110
97111 // Filter out --output and its argument from the args
@@ -100,83 +114,173 @@ public static void main(String[] args) throws IOException {
100114 .filter (arg -> !arg .equals ("--output" ) && !arg .equals (finalOutputPath ))
101115 .toArray (String []::new );
102116
117+ System .out .println ("DEBUG: Filtered arguments: " + Arrays .toString (filteredArgs ));
118+ System .out .flush ();
119+
103120 // generate a regex that matches any regex in filteredArgs, or if filteredArgs is empty/null, match everything
104121 var regex = filteredArgs .length == 0 ? ".*" : Arrays .stream (filteredArgs ).flatMap (s -> Arrays .stream (s .split ("\\ s" ))).map (s -> "(?:" + s + ")" ).collect (Collectors .joining ("|" ));
105122 // compile regex and do substring matching using find
106123 var pattern = Pattern .compile (regex );
107124
125+ System .out .println ("DEBUG: Using regex pattern: " + regex );
126+ System .out .flush ();
127+
108128 var datasetNames = getAllDatasetNames ().stream ().filter (dn -> pattern .matcher (dn ).find ()).collect (Collectors .toList ());
109129
130+ System .out .println ("DEBUG: Dataset names after filtering: " + datasetNames );
131+ System .out .println ("DEBUG: Dataset names size: " + datasetNames .size ());
132+ System .out .flush ();
133+
110134 logger .info ("Executing the following datasets: {}" , datasetNames );
111135 List <BenchResult > results = new ArrayList <>();
112136
113137 // Process datasets from regex patterns
114138 if (!datasetNames .isEmpty ()) {
139+ System .out .println ("DEBUG: Processing datasets from regex patterns" );
140+ System .out .flush ();
115141 for (var datasetName : datasetNames ) {
116142 logger .info ("Loading dataset: {}" , datasetName );
117- DataSet ds = DataSetLoader .loadDataSet (datasetName );
118- logger .info ("Dataset loaded: {} with {} vectors" , datasetName , ds .baseVectors .size ());
143+ System .out .println ("DEBUG: Attempting to load dataset: " + datasetName );
144+ System .out .flush ();
145+
146+ try {
147+ DataSet ds = DataSetLoader .loadDataSet (datasetName );
148+ System .out .println ("DEBUG: Dataset loaded successfully: " + datasetName );
149+ System .out .flush ();
150+
151+ logger .info ("Dataset loaded: {} with {} vectors" , datasetName , ds .baseVectors .size ());
152+ System .out .println ("DEBUG: Dataset has " + ds .baseVectors .size () + " vectors" );
153+ System .out .flush ();
119154
120- if (datasetName .endsWith (".hdf5" )) {
121- datasetName = datasetName .substring (0 , datasetName .length () - ".hdf5" .length ());
155+ if (datasetName .endsWith (".hdf5" )) {
156+ datasetName = datasetName .substring (0 , datasetName .length () - ".hdf5" .length ());
157+ }
158+
159+ System .out .println ("DEBUG: Getting default config for: " + datasetName );
160+ System .out .flush ();
161+
162+ MultiConfig config = MultiConfig .getDefaultConfig (datasetName );
163+
164+ System .out .println ("DEBUG: Got config, running benchmark" );
165+ System .out .flush ();
166+
167+ logger .info ("Using configuration: {}" , config );
168+
169+ results .addAll (Grid .runAllAndCollectResults (ds ,
170+ config .construction .outDegree ,
171+ config .construction .efConstruction ,
172+ config .construction .neighborOverflow ,
173+ config .construction .addHierarchy ,
174+ config .construction .getFeatureSets (),
175+ config .construction .getCompressorParameters (),
176+ config .search .getCompressorParameters (),
177+ config .search .topKOverquery ,
178+ config .search .useSearchPruning ));
179+
180+ System .out .println ("DEBUG: Benchmark completed for dataset: " + datasetName );
181+ System .out .flush ();
182+ } catch (Exception e ) {
183+ System .err .println ("ERROR: Exception while processing dataset " + datasetName );
184+ e .printStackTrace ();
185+ System .err .flush ();
122186 }
123- MultiConfig config = MultiConfig .getDefaultConfig (datasetName );
124- logger .info ("Using configuration: {}" , config );
125-
126- results .addAll (Grid .runAllAndCollectResults (ds ,
127- config .construction .outDegree ,
128- config .construction .efConstruction ,
129- config .construction .neighborOverflow ,
130- config .construction .addHierarchy ,
131- config .construction .getFeatureSets (),
132- config .construction .getCompressorParameters (),
133- config .search .getCompressorParameters (),
134- config .search .topKOverquery ,
135- config .search .useSearchPruning ));
136187 }
137188 }
138189
139190 // Process YAML configuration files
140191 List <String > configNames = Arrays .stream (filteredArgs ).filter (s -> s .endsWith (".yml" )).collect (Collectors .toList ());
141192 if (!configNames .isEmpty ()) {
193+ System .out .println ("DEBUG: Processing YAML configuration files: " + configNames );
194+ System .out .flush ();
195+
142196 for (var configName : configNames ) {
143197 logger .info ("Processing configuration file: {}" , configName );
144- MultiConfig config = MultiConfig .getConfig (configName );
145- String datasetName = config .dataset ;
146- logger .info ("Configuration specifies dataset: {}" , datasetName );
198+ System .out .println ("DEBUG: Processing configuration file: " + configName );
199+ System .out .flush ();
200+
201+ try {
202+ MultiConfig config = MultiConfig .getConfig (configName );
203+ String datasetName = config .dataset ;
204+ logger .info ("Configuration specifies dataset: {}" , datasetName );
205+ System .out .println ("DEBUG: Configuration specifies dataset: " + datasetName );
206+ System .out .flush ();
147207
148- logger .info ("Loading dataset: {}" , datasetName );
149- DataSet ds = DataSetLoader .loadDataSet (datasetName );
150- logger .info ("Dataset loaded: {} with {} vectors" , datasetName , ds .baseVectors .size ());
151-
152- results .addAll (Grid .runAllAndCollectResults (ds ,
153- config .construction .outDegree ,
154- config .construction .efConstruction ,
155- config .construction .neighborOverflow ,
156- config .construction .addHierarchy ,
157- config .construction .getFeatureSets (),
158- config .construction .getCompressorParameters (),
159- config .search .getCompressorParameters (),
160- config .search .topKOverquery ,
161- config .search .useSearchPruning ));
208+ logger .info ("Loading dataset: {}" , datasetName );
209+ System .out .println ("DEBUG: Loading dataset from YAML config: " + datasetName );
210+ System .out .flush ();
211+
212+ DataSet ds = DataSetLoader .loadDataSet (datasetName );
213+ logger .info ("Dataset loaded: {} with {} vectors" , datasetName , ds .baseVectors .size ());
214+ System .out .println ("DEBUG: Dataset loaded from YAML config: " + datasetName + " with " + ds .baseVectors .size () + " vectors" );
215+ System .out .flush ();
216+
217+ System .out .println ("DEBUG: Running benchmark with YAML config" );
218+ System .out .flush ();
219+
220+ results .addAll (Grid .runAllAndCollectResults (ds ,
221+ config .construction .outDegree ,
222+ config .construction .efConstruction ,
223+ config .construction .neighborOverflow ,
224+ config .construction .addHierarchy ,
225+ config .construction .getFeatureSets (),
226+ config .construction .getCompressorParameters (),
227+ config .search .getCompressorParameters (),
228+ config .search .topKOverquery ,
229+ config .search .useSearchPruning ));
230+
231+ System .out .println ("DEBUG: Benchmark completed for YAML config: " + configName );
232+ System .out .flush ();
233+ } catch (Exception e ) {
234+ System .err .println ("ERROR: Exception while processing YAML config " + configName );
235+ e .printStackTrace ();
236+ System .err .flush ();
237+ }
162238 }
239+ } else {
240+ System .out .println ("DEBUG: No YAML configuration files to process" );
241+ System .out .flush ();
163242 }
164243
244+ System .out .println ("DEBUG: Benchmark processing completed. Results size: " + results .size ());
245+ System .out .flush ();
246+
165247 // Calculate summary statistics
166- SummaryStats stats = BenchmarkSummarizer .summarize (results );
167- logger .info ("Benchmark summary: {}" , stats .toString ());
168-
169- // Write results to JSON file
170- ObjectMapper mapper = new ObjectMapper ();
171- File outputFile = new File (outputPath );
172- mapper .writerWithDefaultPrettyPrinter ().writeValue (outputFile , results );
173- logger .info ("Benchmark results written to {} (file exists: {})" , outputPath , outputFile .exists ());
174-
175- // Double check that the file was created and log its size
176- if (outputFile .exists ()) {
177- logger .info ("Output file size: {} bytes" , outputFile .length ());
178- } else {
179- logger .error ("Failed to create output file at {}" , outputPath );
248+ try {
249+ System .out .println ("DEBUG: Calculating summary statistics" );
250+ System .out .flush ();
251+
252+ SummaryStats stats = BenchmarkSummarizer .summarize (results );
253+ logger .info ("Benchmark summary: {}" , stats .toString ());
254+ System .out .println ("DEBUG: Benchmark summary: " + stats .toString ());
255+ System .out .flush ();
256+
257+ // Write results to JSON file
258+ System .out .println ("DEBUG: Writing results to JSON file: " + outputPath );
259+ System .out .flush ();
260+
261+ ObjectMapper mapper = new ObjectMapper ();
262+ File outputFile = new File (outputPath );
263+ mapper .writerWithDefaultPrettyPrinter ().writeValue (outputFile , results );
264+ logger .info ("Benchmark results written to {} (file exists: {})" , outputPath , outputFile .exists ());
265+ System .out .println ("DEBUG: Benchmark results written to " + outputPath + " (file exists: " + outputFile .exists () + ")" );
266+ System .out .flush ();
267+
268+ // Double check that the file was created and log its size
269+ if (outputFile .exists ()) {
270+ logger .info ("Output file size: {} bytes" , outputFile .length ());
271+ System .out .println ("DEBUG: Output file size: " + outputFile .length () + " bytes" );
272+ } else {
273+ logger .error ("Failed to create output file at {}" , outputPath );
274+ System .err .println ("ERROR: Failed to create output file at " + outputPath );
275+ }
276+ System .out .flush ();
277+ } catch (Exception e ) {
278+ System .err .println ("ERROR: Exception during final processing" );
279+ e .printStackTrace ();
280+ System .err .flush ();
180281 }
282+
283+ System .out .println ("DEBUG: AutoBenchYAML execution completed" );
284+ System .out .flush ();
181285 }
182286}
0 commit comments