2424#
2525###############################################################################
2626import argparse
27+ import csv
28+ import glob
2729import json
2830import logging
2931import os
@@ -331,7 +333,7 @@ def generate_reference_config(
331333 for obj in results :
332334 if obj .result_data .collection_result .status != ExecutionStatus .OK :
333335 logger .warning (
334- "Plugin: %s result status is %, skipping" ,
336+ "Plugin: %s result status is %s , skipping" ,
335337 obj .source ,
336338 obj .result_data .collection_result .status ,
337339 )
@@ -344,11 +346,13 @@ def generate_reference_config(
344346
345347 plugin = plugin_reg .plugins .get (obj .source )
346348
347- args = extract_analyzer_args_from_model (plugin , data_model , logger )
348- if not args :
349- continue
350- plugins [obj .source ] = {"analysis_args" : {}}
351- plugins [obj .source ]["analysis_args" ] = args .model_dump (exclude_none = True )
349+ if obj .source not in plugins :
350+ plugins [obj .source ] = {}
351+
352+ a_args = extract_analyzer_args_from_model (plugin , data_model , logger )
353+ if a_args :
354+ plugins [obj .source ]["analysis_args" ] = a_args .model_dump (exclude_none = True )
355+
352356 plugin_config .plugins = plugins
353357
354358 return plugin_config
@@ -422,3 +426,91 @@ def find_datamodel_and_result(base_path: str) -> list[Tuple[str, str]]:
422426 tuple_list .append ((datamodel_path , result_path ))
423427
424428 return tuple_list
429+
430+
431+ def dump_results_to_csv (
432+ results : list [PluginResult ],
433+ nodename : str ,
434+ log_path : str ,
435+ timestamp : str ,
436+ logger : logging .Logger ,
437+ ):
438+ """dump node-scraper summary results to csv file
439+
440+ Args:
441+ results (list[PluginResult]): list of PluginResults
442+ nodename (str): node where results come from
443+ log_path (str): path to results
444+ timestamp (str): time when results were taken
445+ logger (logging.Logger): instance of logger
446+ """
447+ fieldnames = ["nodename" , "plugin" , "status" , "timestamp" , "message" ]
448+ filename = log_path + "/nodescraper.csv"
449+ all_rows = []
450+ for res in results :
451+ row = {
452+ "nodename" : nodename ,
453+ "plugin" : res .source ,
454+ "status" : res .status .name ,
455+ "timestamp" : timestamp ,
456+ "message" : res .message ,
457+ }
458+ all_rows .append (row )
459+ dump_to_csv (all_rows , filename , fieldnames , logger )
460+
461+
462+ def dump_to_csv (all_rows : list , filename : str , fieldnames : list [str ], logger : logging .Logger ):
463+ """dump data to csv
464+
465+ Args:
466+ all_rows (list): rows to be written
467+ filename (str): name of file to write to
468+ fieldnames (list[str]): header for csv file
469+ logger (logging.Logger): isntance of logger
470+ """
471+ try :
472+ with open (filename , "w" , newline = "" ) as f :
473+ writer = csv .DictWriter (f , fieldnames = fieldnames )
474+ writer .writeheader ()
475+ for row in all_rows :
476+ writer .writerow (row )
477+ except Exception as exp :
478+ logger .error ("Could not dump data to csv file: %s" , exp )
479+ logger .info ("Data written to csv file: %s" , filename )
480+
481+
482+ def generate_summary (search_path : str , output_path : str | None , logger : logging .Logger ):
483+ """Concatenate csv files into 1 summary csv file
484+
485+ Args:
486+ search_path (str): Path for previous runs
487+ output_path (str | None): Path for new summary csv file
488+ logger (logging.Logger): instance of logger
489+ """
490+
491+ fieldnames = ["nodename" , "plugin" , "status" , "timestamp" , "message" ]
492+ all_rows = []
493+
494+ pattern = os .path .join (search_path , "**" , "nodescraper.csv" )
495+ matched_files = glob .glob (pattern , recursive = True )
496+
497+ if not matched_files :
498+ logger .error (f"No nodescraper.csv files found under { search_path } " )
499+ return
500+
501+ for filepath in matched_files :
502+ logger .info (f"Reading: { filepath } " )
503+ with open (filepath , newline = "" ) as f :
504+ reader = csv .DictReader (f )
505+ for row in reader :
506+ all_rows .append (row )
507+
508+ if not all_rows :
509+ logger .error ("No data rows found in matched CSV files." )
510+ return
511+
512+ if not output_path :
513+ output_path = os .getcwd ()
514+
515+ output_path = os .path .join (output_path , "summary.csv" )
516+ dump_to_csv (all_rows , output_path , fieldnames , logger )
0 commit comments