Skip to content

Commit 5648325

Browse files
authored
chore: update pre-processing analytics logs (#1224)
1 parent 248e591 commit 5648325

File tree

4 files changed

+24
-27
lines changed

4 files changed

+24
-27
lines changed

functions-python/preprocessed_analytics/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ geoalchemy2==0.14.7
1717
# Google specific packages for this function
1818
google-cloud-bigquery
1919
google-cloud-storage
20+
flask
2021

2122
# Additional packages for this function
2223
pandas

functions-python/preprocessed_analytics/src/main.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,12 @@
66
import functions_framework
77
from flask import Response
88

9-
from shared.helpers.logger import Logger
9+
from shared.helpers.logger import init_logger
1010
from processors.base_analytics_processor import NoFeedDataException
1111
from processors.gbfs_analytics_processor import GBFSAnalyticsProcessor
1212
from processors.gtfs_analytics_processor import GTFSAnalyticsProcessor
1313

14-
logging.basicConfig(level=logging.INFO)
14+
init_logger()
1515

1616

1717
def get_compute_date(request: flask.Request) -> datetime:
@@ -33,29 +33,29 @@ def preprocess_analytics(request: flask.Request, processor_class) -> Response:
3333
"""
3434
Common logic to process analytics using the given processor class.
3535
"""
36-
Logger.init_logger()
37-
logging.info(f"{processor_class.__name__} Function triggered")
36+
logging.info("Function triggered: %s", processor_class.__name__)
3837
compute_date = get_compute_date(request)
39-
logging.info(f"Compute date: {compute_date}")
38+
logging.info("Compute date: %s", compute_date)
4039
try:
4140
processor = processor_class(compute_date)
4241
processor.run()
4342
except NoFeedDataException as e:
44-
logging.warning(f"No feed data found for date {compute_date}: {e}")
43+
logging.warning("No feed data found for date %s: %s", compute_date, e)
4544
return Response(f"No feed data found for date {compute_date}: {e}", status=404)
4645
except Exception as e:
4746
# Extracting the traceback details
4847
tb = traceback.format_exc()
48+
logging.error("Error processing %s analytics: %s", processor_class.__name__, e)
4949
logging.error(
50-
f"Error processing {processor_class.__name__} analytics: {e}\nTraceback:\n{tb}"
50+
"Error trace processing %s analytics: %s", processor_class.__name__, tb
5151
)
5252
return Response(
5353
f"Error processing analytics for date {compute_date}: {e}", status=500
5454
)
5555

56-
return Response(
57-
f"Successfully processed analytics for date: {compute_date}", status=200
58-
)
56+
message = f"Successfully processed analytics for date: {compute_date}"
57+
logging.info(message)
58+
return Response(message, status=200)
5959

6060

6161
@functions_framework.http

functions-python/preprocessed_analytics/src/processors/base_analytics_processor.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def _load_json(self, file_name: str) -> Tuple[List[Dict], storage.Blob]:
6161
)
6262
except Exception as e:
6363
logging.warning(
64-
f"Unable to convert data to DataFrame using Pandas: {e}"
64+
"Unable to convert data to DataFrame using Pandas: %s", e
6565
)
6666
return json.loads(json_data), blob
6767
return [], blob
@@ -72,13 +72,13 @@ def _save_blob(blob: storage.Blob, data: List[Dict]) -> None:
7272
# Convert the data to JSON format
7373
json_data = pd.DataFrame(data).to_json(orient="records", date_format="iso")
7474
except Exception as e:
75-
logging.warning(f"Unable to convert data to JSON using Pandas: {e}")
75+
logging.warning("Unable to convert data to JSON using Pandas: %s", e)
7676
json_data = json.dumps(data, default=str)
7777

7878
# Save the JSON file to the specified GCS bucket
7979
blob.upload_from_string(json_data, content_type="application/json")
8080
blob.make_public()
81-
logging.info(f"{blob.name} saved to bucket")
81+
logging.info("%s saved to bucket", blob.name)
8282

8383
def _save_json(self, file_name: str, data: List[Dict]) -> None:
8484
# Save the JSON file to the specified GCS bucket
@@ -90,7 +90,7 @@ def aggregate_summary_files(
9090
) -> None:
9191
blobs = self.analytics_bucket.list_blobs(prefix="summary/summary_")
9292
for blob in blobs:
93-
logging.info(f"Aggregating data from {blob.name}")
93+
logging.info("Aggregating data from %s", blob.name)
9494
summary_data, _ = self._load_json(blob.name)
9595
for key, new_data in summary_data.items():
9696
if key in metrics_file_data:
@@ -129,7 +129,7 @@ def save_analytics(self) -> None:
129129
file_name = f"analytics_{self.run_date.strftime('%Y-%m-%d')}.json"
130130
self._save_json(file_name, self.data)
131131
self.save()
132-
logging.info(f"Analytics saved to bucket as {file_name}")
132+
logging.info("Analytics saved to bucket as %s", file_name)
133133

134134
@with_db_session
135135
def run(self, db_session: Session) -> None:
@@ -139,16 +139,16 @@ def run(self, db_session: Session) -> None:
139139
self.save_summary()
140140
self.save_analytics()
141141
self.update_analytics_files()
142-
logging.info(f"Finished running analytics for date: {self.run_date}")
142+
logging.info("Finished running analytics for date: %s", self.run_date)
143143

144144
def _get_data(self, db_session: Session):
145145
query = self.get_latest_data(db_session)
146146
all_results = query.all()
147147
if len(all_results) == 0:
148148
raise NoFeedDataException("No feed data found")
149-
logging.info(f"Loaded {len(all_results)} feeds to process")
149+
logging.info("Loaded %s feeds to process", len(all_results))
150150
unique_feeds = {result[0].stable_id: result for result in all_results}
151-
logging.info(f"Nb of unique feeds loaded: {len(unique_feeds)}")
151+
logging.info("Nb of unique feeds loaded: %s", len(unique_feeds))
152152
return [(result[0], result[1]) for result in unique_feeds.values()]
153153

154154
def update_analytics_files(self) -> None:
@@ -189,4 +189,4 @@ def update_analytics_files(self) -> None:
189189
)
190190

191191
except Exception as e:
192-
logging.error(f"Error updating analytics files: {e}")
192+
logging.error("Error updating analytics files: %s", e)

functions-python/preprocessed_analytics/tests/test_main.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -58,24 +58,21 @@ def test_process_analytics_gbfs_success(self, mock_process_analytics, mock_run):
5858
)
5959
self.assertEqual(response.status_code, 200)
6060

61-
@patch("main.Logger.init_logger")
6261
@patch("main.GTFSAnalyticsProcessor.run")
63-
def test_process_analytics_gtfs_error(self, mock_run, _):
62+
def test_process_analytics_gtfs_error(self, mock_run):
6463
mock_run.side_effect = Exception("Test error")
6564
response = preprocess_analytics_gtfs(self.mock_request)
6665
self.assertEqual(response.status_code, 500)
6766

68-
@patch("main.Logger.init_logger")
6967
@patch("main.GBFSAnalyticsProcessor.run")
70-
def test_process_analytics_gbfs_error(self, mock_run, _):
68+
def test_process_analytics_gbfs_error(self, mock_run):
7169
mock_run.side_effect = Exception("Test error")
7270
response = preprocess_analytics_gbfs(self.mock_request)
7371
self.assertEqual(response.status_code, 500)
7472

75-
@patch("main.Logger.init_logger")
7673
@patch("main.GTFSAnalyticsProcessor.run")
7774
@patch("main.GTFSAnalyticsProcessor.__init__")
78-
def test_process_analytics_success(self, mock_init, mock_run, _):
75+
def test_process_analytics_success(self, mock_init, mock_run):
7976
mock_run.return_value = None
8077
mock_init.return_value = None
8178
response = preprocess_analytics(self.mock_request, GTFSAnalyticsProcessor)
@@ -84,9 +81,8 @@ def test_process_analytics_success(self, mock_init, mock_run, _):
8481
"Successfully processed analytics for date:", response.data.decode()
8582
)
8683

87-
@patch("main.Logger.init_logger")
8884
@patch("main.GTFSAnalyticsProcessor.run")
89-
def test_process_analytics_failure(self, mock_run, _):
85+
def test_process_analytics_failure(self, mock_run):
9086
mock_run.side_effect = Exception("Processing error")
9187
response = preprocess_analytics(self.mock_request, GTFSAnalyticsProcessor)
9288
self.assertEqual(response.status_code, 500)

0 commit comments

Comments
 (0)