Skip to content

Commit 84ae115

Browse files
fix formatting
1 parent 1efcb92 commit 84ae115

File tree

9 files changed

+259
-148
lines changed

9 files changed

+259
-148
lines changed

src/main.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
import logging
21
import asyncio
2+
import logging
33
from contextlib import asynccontextmanager
44

55
import uvicorn
@@ -50,14 +50,16 @@
5050

5151
prometheus_scheduler = PrometheusScheduler()
5252

53+
5354
@repeat_every(
5455
seconds=prometheus_scheduler.service_config.get("metrics_schedule", 30),
5556
logger=logger,
56-
raise_exceptions=False
57+
raise_exceptions=False,
5758
)
5859
async def schedule_metrics_calculation():
5960
await prometheus_scheduler.calculate()
6061

62+
6163
@asynccontextmanager
6264
async def lifespan(app: FastAPI):
6365
task = asyncio.create_task(schedule_metrics_calculation())

src/service/data/datasources/data_source.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import asyncio
21
import logging
32
import os
43
from concurrent.futures import ThreadPoolExecutor
@@ -90,11 +89,12 @@ async def get_dataframe_with_batch_size(
9089
start_row = max(0, available_rows - batch_size)
9190
n_rows = min(batch_size, available_rows)
9291

93-
input_data, output_data, metadata = await model_data.data(start_row=start_row, n_rows=n_rows)
92+
input_data, output_data, metadata = await model_data.data(
93+
start_row=start_row, n_rows=n_rows
94+
)
9495

9596
input_names, output_names, metadata_names = await model_data.column_names()
9697

97-
9898
# Combine the data into a single dataframe
9999
df_data = {}
100100

@@ -121,7 +121,9 @@ async def get_dataframe_with_batch_size(
121121
f"Error creating dataframe for model={model_id}: {str(e)}"
122122
)
123123

124-
async def get_organic_dataframe(self, model_id: str, batch_size: int) -> pd.DataFrame:
124+
async def get_organic_dataframe(
125+
self, model_id: str, batch_size: int
126+
) -> pd.DataFrame:
125127
"""
126128
Get a dataframe with only organic data (not synthetic).
127129
@@ -208,7 +210,9 @@ async def has_metadata(self, model_id: str) -> bool:
208210
try:
209211
return await self.get_metadata(model_id) is not None
210212
except Exception as e:
211-
logger.error(f"Error checking if metadata exists for model={model_id}: {str(e)}")
213+
logger.error(
214+
f"Error checking if metadata exists for model={model_id}: {str(e)}"
215+
)
212216
return False
213217

214218
# DATAFRAME QUERIES

src/service/payloads/metrics/base_metric_request.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,16 @@
11
from abc import abstractmethod
22
from typing import Dict, Optional
3+
34
from pydantic import BaseModel, ConfigDict
45

56

67
class BaseMetricRequest(BaseModel):
78
"""
89
Abstract base class for metric requests.
910
"""
11+
1012
# To allow extra fields to be set on instances
11-
model_config = ConfigDict(extra='allow')
13+
model_config = ConfigDict(extra="allow")
1214

1315
model_id: str
1416
metric_name: str

src/service/payloads/metrics/request_reconciler.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,9 @@ async def reconcile(request: BaseMetricRequest, data_source: DataSource) -> None
2424
request: The metric request to reconcile
2525
data_source: The data source to use for reconciliation
2626
"""
27-
storage_metadata: StorageMetadata = await data_source.get_metadata(request.model_id)
27+
storage_metadata: StorageMetadata = await data_source.get_metadata(
28+
request.model_id
29+
)
2830
RequestReconciler.reconcile_with_metadata(request, storage_metadata)
2931

3032
@staticmethod

src/service/payloads/service/schema_item.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
1-
from src.service.payloads.values.data_type import DataType
21
from dataclasses import dataclass
32

3+
from src.service.payloads.values.data_type import DataType
4+
5+
46
@dataclass
57
class SchemaItem:
68
type: DataType
@@ -24,4 +26,4 @@ def get_column_index(self) -> int:
2426
return self.column_index
2527

2628
def set_column_index(self, column_index: int) -> None:
27-
self.column_index = column_index
29+
self.column_index = column_index

src/service/prometheus/prometheus_publisher.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@
1515
# Must start with letter, underscore, or colon
1616
# Can contain letters, numbers, underscores, and colons
1717
# Lowercase only (shall we allow uppercase?)
18-
PROMETHEUS_METRIC_NAME_REGEX = re.compile(r'^[a-z_:][a-z0-9_:]*$')
18+
PROMETHEUS_METRIC_NAME_REGEX = re.compile(r"^[a-z_:][a-z0-9_:]*$")
19+
1920

2021
class PrometheusPublisher:
2122
def __init__(self, registry: CollectorRegistry = REGISTRY) -> None:
@@ -68,7 +69,7 @@ def remove_gauge(self, name: str, id: uuid.UUID) -> None:
6869

6970
if full_name in self._gauges:
7071
gauge = self._gauges[full_name]
71-
72+
7273
# IMPORTANT: Accessing private attributes of prometheus_client.Gauge
7374
# This is necessary because the prometheus_client library does not provide
7475
# public methods to:
@@ -184,7 +185,7 @@ def _get_full_metric_name(self, metric_name: str) -> str:
184185
f"underscore, or colon, and contain only lowercase letters, "
185186
f"numbers, underscores, and colons."
186187
)
187-
188+
188189
return full_name
189190

190191
@staticmethod

0 commit comments

Comments
 (0)