@@ -47,7 +47,7 @@ def send_csv_to_run_data(
4747 source_dir = pathlib .Path (source_folder )
4848
4949 if not source_dir .exists ():
50- LOGGER .error (f" { source_dir } does not exists" )
50+ LOGGER .error (T ( "coal.errors.file_system.file_not_found" ). format ( source_folder = source_dir ) )
5151 raise FileNotFoundError (f"{ source_dir } does not exist" )
5252
5353 with get_api_client ()[0 ] as api_client :
@@ -56,8 +56,8 @@ def send_csv_to_run_data(
5656 with open (csv_path ) as _f :
5757 dr = DictReader (_f )
5858 table_name = csv_path .name .replace (".csv" , "" )
59- LOGGER .info (f"Sending data to table CD_{ table_name } " )
60- LOGGER .debug (f" - Column list: { dr .fieldnames } " )
59+ LOGGER .info (T ( "coal.logs.run_data.sending_to_table" ). format ( table_name = f" CD_{ table_name } ") )
60+ LOGGER .debug (T ( "coal.logs.database.column_list" ). format ( columns = dr .fieldnames ) )
6161 data = []
6262
6363 for row in dr :
@@ -72,7 +72,7 @@ def send_csv_to_run_data(
7272 n_row [k ] = v
7373 data .append (n_row )
7474
75- LOGGER .info (f" - Sending { len (data )} rows" )
75+ LOGGER .info (T ( "coal.logs.database.row_count" ). format ( count = len (data )) )
7676 api_run .send_run_data (
7777 organization_id ,
7878 workspace_id ,
@@ -102,25 +102,25 @@ def send_store_to_run_data(
102102 source_dir = pathlib .Path (store_folder )
103103
104104 if not source_dir .exists ():
105- LOGGER .error (f" { source_dir } does not exists" )
105+ LOGGER .error (T ( "coal.errors.file_system.file_not_found" ). format ( source_folder = source_dir ) )
106106 raise FileNotFoundError (f"{ source_dir } does not exist" )
107107
108108 with get_api_client ()[0 ] as api_client :
109109 api_run = RunApi (api_client )
110110 _s = Store ()
111111 for table_name in _s .list_tables ():
112- LOGGER .info (f"Sending data to table CD_{ table_name } " )
112+ LOGGER .info (T ( "coal.logs.run_data.sending_to_table" ). format ( table_name = f" CD_{ table_name } ") )
113113 data = convert_table_as_pylist (table_name )
114114 if not len (data ):
115- LOGGER .info (" - No rows : skipping" )
115+ LOGGER .info (T ( "coal.logs.database.no_rows" ) )
116116 continue
117117 fieldnames = _s .get_table_schema (table_name ).names
118118 for row in data :
119119 for field in fieldnames :
120120 if row [field ] is None :
121121 del row [field ]
122- LOGGER .debug (f" - Column list: { fieldnames } " )
123- LOGGER .info (f" - Sending { len (data )} rows" )
122+ LOGGER .debug (T ( "coal.logs.database.column_list" ). format ( columns = fieldnames ) )
123+ LOGGER .info (T ( "coal.logs.database.row_count" ). format ( count = len (data )) )
124124 api_run .send_run_data (
125125 organization_id ,
126126 workspace_id ,
@@ -160,14 +160,14 @@ def load_csv_from_run_data(
160160 organization_id , workspace_id , runner_id , run_id , RunDataQuery (query = query )
161161 )
162162 if query_result .result :
163- LOGGER .info (f"Query returned { len (query_result .result )} rows" )
163+ LOGGER .info (T ( "coal.logs.database.query_results" ). format ( count = len (query_result .result )) )
164164 with open (target_dir / (file_name + ".csv" ), "w" ) as _f :
165165 headers = set ()
166166 for r in query_result .result :
167167 headers = headers | set (r .keys ())
168168 dw = DictWriter (_f , fieldnames = sorted (headers ))
169169 dw .writeheader ()
170170 dw .writerows (query_result .result )
171- LOGGER .info (f"Results saved as { target_dir / file_name } .csv" )
171+ LOGGER .info (T ( "coal.logs.database.saved_results" ). format ( file = f" { target_dir / file_name } .csv") )
172172 else :
173- LOGGER .info ("No results returned by the query" )
173+ LOGGER .info (T ( "coal.logs.database.no_results" ) )
0 commit comments