Skip to content

Commit dc93110

Browse files
committed
Remove all use of rich coloring from logs
Waiting for a better system to adjust text, language and formatting
1 parent 860b5a4 commit dc93110

14 files changed

+49
-49
lines changed

cosmotech/coal/cli/commands/adx_send_scenariodata.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ def adx_send_scenariodata(
109109
cluster_url=adx_uri,
110110
ingest_url=adx_ingest_uri)
111111
for k, v in queries.items():
112-
LOGGER.info(f"Create table query: [bold green]{v}[/]")
112+
LOGGER.info(f"Create table query: {v}")
113113
r: KustoResponseDataSet = adx_client.run_query(v)
114114
if r.errors_count == 0:
115115
LOGGER.info(f"Table {k} created successfully")
@@ -233,8 +233,8 @@ def insert_csv_files(files_data, adx_client: ADXQueriesWrapper, simulation_id, d
233233
status_color_mapping[IngestionStatus.FAILURE.value] = "red"
234234
status_color_mapping[IngestionStatus.SUCCESS.value] = "green"
235235
for _id, status in adx_client.check_ingestion_status(source_ids=list(ingestion_ids.keys())):
236-
LOGGER.info(f"[bold cyan]{ingestion_ids[_id]}[/] - "
237-
f"[bold {status_color_mapping[status.value]}]{status.name}[/]")
236+
LOGGER.info(f"{ingestion_ids[_id]} - "
237+
f"{status.name}")
238238
else:
239239
LOGGER.info("No wait for ingestion result")
240240

cosmotech/coal/cli/commands/api/postgres_send_runner_metadata.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ def postgres_send_runner_metadata(
119119
DO
120120
UPDATE SET name = EXCLUDED.name, last_run_id = EXCLUDED.last_run_id;
121121
"""
122-
LOGGER.info(f"creating table [cyan bold]{schema_table}[/]")
122+
LOGGER.info(f"creating table {schema_table}")
123123
curs.execute(sql_create_table)
124124
conn.commit()
125125
LOGGER.info(f"adding/updating runner metadata")

cosmotech/coal/cli/commands/api/rds_send_csv.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ def rds_send_csv(
8080
with open(csv_path) as _f:
8181
dr = DictReader(_f)
8282
table_name = csv_path.name.replace(".csv", "")
83-
LOGGER.info(f"Sending data to table [cyan bold]CD_{table_name}[/]")
83+
LOGGER.info(f"Sending data to table CD_{table_name}")
8484
LOGGER.debug(f" - Column list: {dr.fieldnames}")
8585
data = []
8686

cosmotech/coal/cli/commands/api/rds_send_store.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ def rds_send_store(
8080
api_run = RunApi(api_client)
8181
_s = Store()
8282
for table_name in _s.list_tables():
83-
LOGGER.info(f"Sending data to table [cyan bold]CD_{table_name}[/]")
83+
LOGGER.info(f"Sending data to table CD_{table_name}")
8484
data = convert_table_as_pylist(table_name)
8585
if not len(data):
8686
LOGGER.info(" - No rows : skipping")

cosmotech/coal/cli/commands/api/run_load_data.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,8 +82,8 @@ def download_runner_data(organization_id: str, workspace_id: str, runner_id: str
8282
"varType": var_type,
8383
"isInherited": is_inherited
8484
})
85-
LOGGER.debug(f" - [yellow]{param_id:<{max_name_size}}[/] [cyan]{var_type:<{max_type_size}}[/] "
86-
f"\"{value}\"{' [red bold]inherited[/]' if is_inherited else ''}")
85+
LOGGER.debug(f" - {param_id:<{max_name_size}} {var_type:<{max_type_size}} "
86+
f"\"{value}\"{' inherited' if is_inherited else ''}")
8787

8888
write_parameters(parameter_folder, parameters)
8989

cosmotech/coal/cli/commands/api/runtemplate_load_handler.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,8 @@ def runtemplate_load_handler(workspace_id, organization_id, run_template_id, han
6060
try:
6161
r_data: Workspace = api_w.find_workspace_by_id(organization_id=organization_id, workspace_id=workspace_id)
6262
except ServiceException as e:
63-
LOGGER.error(f"Workspace [green bold]{workspace_id}[/] was not found "
64-
f"in Organization [green bold]{organization_id}[/]")
63+
LOGGER.error(f"Workspace {workspace_id} was not found "
64+
f"in Organization {organization_id}")
6565
LOGGER.debug(e.body)
6666
raise click.Abort()
6767
solution_id = r_data.solution.solution_id
@@ -72,17 +72,17 @@ def runtemplate_load_handler(workspace_id, organization_id, run_template_id, han
7272
template_path = root_path / run_template_id
7373
for handler_id in handler_list.split(','):
7474
handler_path: pathlib.Path = template_path / handler_id
75-
LOGGER.info(f"Querying Handler [green bold]{handler_id}[/] for [green bold]{run_template_id}[/]")
75+
LOGGER.info(f"Querying Handler {handler_id} for {run_template_id} ")
7676
try:
7777
rt_data = api_sol.download_run_template_handler(organization_id=organization_id,
7878
solution_id=solution_id,
7979
run_template_id=run_template_id,
8080
handler_id=handler_id)
8181
except ServiceException as e:
8282
LOGGER.error(
83-
f"Handler [green bold]{handler_id}[/] was not found "
84-
f"for Run Template [green bold]{run_template_id}[/] "
85-
f"in Solution [green bold]{solution_id}[/]")
83+
f"Handler {handler_id} was not found "
84+
f"for Run Template {run_template_id} "
85+
f"in Solution {solution_id} ")
8686
LOGGER.debug(e.body)
8787
has_errors = True
8888
continue
@@ -93,7 +93,7 @@ def runtemplate_load_handler(workspace_id, organization_id, run_template_id, han
9393
with ZipFile(BytesIO(rt_data)) as _zip:
9494
_zip.extractall(handler_path)
9595
except BadZipfile:
96-
LOGGER.error(f"Handler [green bold]{handler_id}[/] is not a [blue]zip file[/]")
96+
LOGGER.error(f"Handler {handler_id} is not a zip file ")
9797
has_errors = True
9898
if has_errors:
9999
LOGGER.error("Issues were met during run, please check the previous logs")

cosmotech/coal/cli/commands/api/scenariorun_load_data.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -59,12 +59,12 @@ def download_scenario_data(
5959
for k in datasets.keys():
6060
if k in scenario_data.dataset_list:
6161
shutil.copytree(dl.dataset_to_file(k, datasets[k]), dataset_folder, dirs_exist_ok=True)
62-
LOGGER.debug(f" - [yellow]{dataset_folder}[/] ([green]{k}[/])")
62+
LOGGER.debug(f" - {dataset_folder} ({k} )")
6363
if k in datasets_parameters_ids.keys():
6464
param_dir = os.path.join(parameter_folder, datasets_parameters_ids[k])
6565
pathlib.Path(param_dir).mkdir(exist_ok=True, parents=True)
6666
shutil.copytree(dl.dataset_to_file(k, datasets[k]), param_dir, dirs_exist_ok=True)
67-
LOGGER.debug(f" - [yellow]{datasets_parameters_ids[k]}[/] ([green]{k}[/])")
67+
LOGGER.debug(f" - {datasets_parameters_ids[k]} ({k} )")
6868
else:
6969
LOGGER.info("No dataset write asked, skipping")
7070

@@ -91,8 +91,8 @@ def download_scenario_data(
9191
"varType": var_type,
9292
"isInherited": is_inherited
9393
})
94-
LOGGER.debug(f" - [yellow]{parameter_name:<{max_name_size}}[/] [cyan]{var_type:<{max_type_size}}[/] "
95-
f"\"{value}\"{' [red bold]inherited[/]' if is_inherited else ''}")
94+
LOGGER.debug(f" - {parameter_name:<{max_name_size}} {var_type:<{max_type_size}} "
95+
f"\"{value}\"{' inherited' if is_inherited else ''}")
9696
write_parameters(parameter_folder, parameters, write_csv, write_json)
9797

9898

cosmotech/coal/cli/commands/legacy/generate_orchestrator.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -89,13 +89,13 @@ def from_api(workspace_id, organization_id, run_template_id, output, describe):
8989

9090

9191
def generate_from_solution(sol: Solution, run_template_id, output: str, describe: bool = False):
92-
LOGGER.info(f"Searching [green bold]{run_template_id}[/] in the solution")
92+
LOGGER.info(f"Searching {run_template_id} in the solution")
9393
if _t := [t for t in sol.run_templates if t.id == run_template_id]:
9494
template: RunTemplate = _t[0]
9595
else:
96-
LOGGER.error(f"Run template [green bold]{run_template_id}[/] was not found.")
96+
LOGGER.error(f"Run template {run_template_id} was not found.")
9797
raise click.Abort()
98-
LOGGER.info(f"Found [green bold]{run_template_id}[/] in the solution generating json file")
98+
LOGGER.info(f"Found {run_template_id} in the solution generating json file")
9999
generate_from_template(template, output)
100100
if describe:
101101
f = Orchestrator()
@@ -110,7 +110,7 @@ def generate_from_template(template: RunTemplate, output: str):
110110
previous = None
111111
LOGGER.debug(template)
112112
if template.fetch_datasets is not False or template.fetch_scenario_parameters:
113-
LOGGER.info("- [green]fetch_scenario_parameters[/] step found")
113+
LOGGER.info("- fetch_scenario_parameters step found")
114114
_s = Step(id="fetch_scenario_parameters",
115115
commandId="csm-orc fetch-scenariorun-data",
116116
stop_library_load=True)
@@ -122,7 +122,7 @@ def run_template_phase(name, condition, source, _previous, default):
122122
template_is_active = template.get(condition) if template.get(condition) is not None else default
123123
if template_is_active:
124124
if template.get(source) == "cloud":
125-
LOGGER.info(f"- [green]{name}_cloud[/] step found")
125+
LOGGER.info(f"- {name}_cloud step found")
126126
_name = f"{name}_cloud"
127127
_step_dl_cloud = Step(id=_name,
128128
command="csm-orc",
@@ -168,7 +168,7 @@ def run_template_phase(name, condition, source, _previous, default):
168168
_step_dl_cloud.precedents = [_previous]
169169
_previous = _name
170170
_steps.append(_step_dl_cloud)
171-
LOGGER.info(f"- [green]{name}[/] step found")
171+
LOGGER.info(f"- {name} step found")
172172
_run_step = Step(id=name,
173173
commandId="csm-orc run-step",
174174
environment={
@@ -197,7 +197,7 @@ def run_template_phase(name, condition, source, _previous, default):
197197
previous, new_steps = run_template_phase("validator", "validate_data", "validator_source", previous, False)
198198
steps.extend(new_steps)
199199
if template.send_datasets_to_data_warehouse is True or template.send_input_parameters_to_data_warehouse is True:
200-
LOGGER.info("- [green]send_to_adx[/] step found")
200+
LOGGER.info("- send_to_adx step found")
201201
_send_to_adx_step = Step(id="send_to_adx",
202202
command="csm-orc",
203203
arguments=["send-to-adx"],

cosmotech/coal/cli/commands/legacy/init_local_parameter_folder.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -146,13 +146,13 @@ def generate_parameters(
146146
write_json: bool,
147147
write_csv: bool
148148
):
149-
LOGGER.info(f"Searching [green bold]{run_template_id}[/] in the solution")
149+
LOGGER.info(f"Searching {run_template_id} in the solution")
150150
if _t := [t for t in solution.run_templates if t.id == run_template_id]:
151151
template: RunTemplate = _t[0]
152152
else:
153-
LOGGER.error(f"Run template [green bold]{run_template_id}[/] was not found.")
153+
LOGGER.error(f"Run template {run_template_id} was not found.")
154154
raise click.Abort()
155-
LOGGER.info(f"Found [green bold]{run_template_id}[/] in the solution generating json file")
155+
LOGGER.info(f"Found {run_template_id} in the solution generating json file")
156156
parameter_groups = template.parameter_groups
157157
parameter_names = []
158158
for param_group in solution.parameter_groups:
@@ -173,7 +173,7 @@ def generate_parameters(
173173
"isInherited": False
174174
})
175175
if not (write_csv or write_json or dataset_parameters):
176-
LOGGER.warning(f"No parameters to write for [green bold]{run_template_id}[/]")
176+
LOGGER.warning(f"No parameters to write for {run_template_id} ")
177177
return 1
178178
output_folder_path = pathlib.Path(output_folder)
179179
output_folder_path.mkdir(parents=True, exist_ok=True)

cosmotech/coal/cli/commands/store/dump_to_postgresql.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -89,16 +89,16 @@ def dump_to_postgresql(
8989

9090
tables = list(_s.list_tables())
9191
if len(tables):
92-
LOGGER.info(f"Sending tables to [green bold]{postgres_db}.{postgres_schema}[/]")
92+
LOGGER.info(f"Sending tables to {postgres_db}.{postgres_schema} ")
9393
total_rows = 0
9494
_process_start = perf_counter()
9595
for table_name in tables:
9696
_s_time = perf_counter()
9797
target_table_name = f"{table_prefix}{table_name}"
98-
LOGGER.info(f" - [yellow]{target_table_name}[/]:")
98+
LOGGER.info(f" - {target_table_name} :")
9999
data = _s.get_table(table_name)
100100
if not len(data):
101-
LOGGER.info(f" -> [cyan bold]0[/] rows (skipping)")
101+
LOGGER.info(f" -> 0 rows (skipping)")
102102
continue
103103
_dl_time = perf_counter()
104104
rows = send_pyarrow_table_to_postgresql(data,
@@ -112,11 +112,11 @@ def dump_to_postgresql(
112112
replace)
113113
total_rows += rows
114114
_up_time = perf_counter()
115-
LOGGER.info(f" -> [cyan bold]{rows}[/] rows")
116-
LOGGER.debug(f" -> Load from datastore took [blue]{_dl_time - _s_time:0.3}s[/]")
117-
LOGGER.debug(f" -> Send to postgresql took [blue]{_up_time - _dl_time:0.3}s[/]")
115+
LOGGER.info(f" -> {rows} rows")
116+
LOGGER.debug(f" -> Load from datastore took {_dl_time - _s_time:0.3}s ")
117+
LOGGER.debug(f" -> Send to postgresql took {_up_time - _dl_time:0.3}s ")
118118
_process_end = perf_counter()
119-
LOGGER.info(f"Sent [cyan bold]{total_rows}[/] rows "
120-
f"in [blue]{_process_end - _process_start:0.3}s[/]")
119+
LOGGER.info(f"Sent {total_rows} rows "
120+
f"in {_process_end - _process_start:0.3}s ")
121121
else:
122122
LOGGER.info("Data store is empty")

0 commit comments

Comments
 (0)