@@ -122,20 +122,35 @@ def analyze_ledger_snapshot(ledger, date, output_rows, sema):
122122 :param output_rows: a list of strings in the form of csv output rows
123123 :param sema: a multiprocessing semaphore
124124 """
125- input_filename = None
126- input_paths = [input_dir / f'{ ledger } _{ date } _raw_data.csv' for input_dir in hlp .get_input_directories ()]
127- for filename in input_paths :
128- if os .path .isfile (filename ):
129- input_filename = filename
130- break
131- if input_filename :
132- logging .info (f'[*] { ledger } - { date } ' )
133-
134- entries = get_entries (ledger , date , filename )
135- metrics_values = analyze_snapshot (entries )
136- del entries
137-
138- row = hlp .get_output_row (ledger , date , metrics_values )
125+ row = None
126+
127+ try :
128+ with open (hlp .get_output_filename ()) as f :
129+ csv_reader = csv .reader (f )
130+ for line in csv_reader :
131+ if line [0 ] == ledger and line [1 ] == date :
132+ row = line
133+ break
134+ except FileNotFoundError :
135+ pass
136+
137+ if not row :
138+ input_filename = None
139+ input_paths = [input_dir / f'{ ledger } _{ date } _raw_data.csv' for input_dir in hlp .get_input_directories ()]
140+ for filename in input_paths :
141+ if os .path .isfile (filename ):
142+ input_filename = filename
143+ break
144+ if input_filename :
145+ logging .info (f'[*] { ledger } - { date } ' )
146+
147+ entries = get_entries (ledger , date , filename )
148+ metrics_values = analyze_snapshot (entries )
149+ del entries
150+
151+ row = hlp .get_output_row (ledger , date , metrics_values )
152+
153+ if row :
139154 output_rows .append (row )
140155
141156 sema .release () # Release the semaphore s.t. the loop in analyze() can continue
0 commit comments