From 3000f8771dafc2b359b531c99d00c7fff013c6bb Mon Sep 17 00:00:00 2001 From: Flora Date: Fri, 9 May 2025 09:39:28 +0200 Subject: [PATCH] Change timestamp naming Signed-off-by: Flora --- README.md | 18 ++-- RELEASE_NOTES.md | 2 +- src/frequenz/client/reporting/_client.py | 84 +++++++++---------- src/frequenz/client/reporting/cli/__main__.py | 20 ++--- 4 files changed, 62 insertions(+), 62 deletions(-) diff --git a/README.md b/README.md index 31b0f3f..27db690 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,7 @@ Please also refer to [examples](https://github.com/frequenz-floss/frequenz-clien ```bash # Choose the version you want to install -VERSION=0.16.0 +VERSION=0.17.0 pip install frequenz-client-reporting==$VERSION ``` @@ -63,8 +63,8 @@ data = [ microgrid_id=1, component_id=100, metrics=[Metric.AC_ACTIVE_POWER, Metric.AC_REACTIVE_POWER], - start_dt=datetime.fromisoformat("2024-05-01T00:00:00"), - end_dt=datetime.fromisoformat("2024-05-02T00:00:00"), + start_time=datetime.fromisoformat("2024-05-01T00:00:00"), + end_time=datetime.fromisoformat("2024-05-02T00:00:00"), resampling_period=timedelta(seconds=1), ) ] @@ -79,8 +79,8 @@ data = [ microgrid_id=1, sensor_id=100, metrics=[Metric.SENSOR_IRRADIANCE], - start_dt=datetime.fromisoformat("2024-05-01T00:00:00"), - end_dt=datetime.fromisoformat("2024-05-02T00:00:00"), + start_time=datetime.fromisoformat("2024-05-01T00:00:00"), + end_time=datetime.fromisoformat("2024-05-02T00:00:00"), resampling_period=timedelta(seconds=1), ) ] @@ -106,8 +106,8 @@ data = [ client.list_microgrid_components_data( microgrid_components=microgrid_components, metrics=[Metric.AC_ACTIVE_POWER, Metric.AC_REACTIVE_POWER], - start_dt=datetime.fromisoformat("2024-05-01T00:00:00"), - end_dt=datetime.fromisoformat("2024-05-02T00:00:00"), + start_time=datetime.fromisoformat("2024-05-01T00:00:00"), + end_time=datetime.fromisoformat("2024-05-02T00:00:00"), resampling_period=timedelta(seconds=1), include_states=False, # Set to True to include state data include_bounds=False, # Set to True to include metric bounds data @@ -134,8 +134,8 @@ data = [ client.receive_microgrid_sensors_data( microgrid_sensors=microgrid_sensors, metrics=[Metric.SENSOR_IRRADIANCE], - start_dt=datetime.fromisoformat("2024-05-01T00:00:00"), - end_dt=datetime.fromisoformat("2024-05-02T00:00:00"), + start_time=datetime.fromisoformat("2024-05-01T00:00:00"), + end_time=datetime.fromisoformat("2024-05-02T00:00:00"), resampling_period=timedelta(seconds=1), include_states=False, # Set to True to include state data ) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index f80f12e..1d7a602 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -6,7 +6,7 @@ ## Upgrading - +* Change 'start_dt' and 'end_dt' to 'start_time' and 'end_time' respectively. ## New Features diff --git a/src/frequenz/client/reporting/_client.py b/src/frequenz/client/reporting/_client.py index ac3b1c1..87d593c 100644 --- a/src/frequenz/client/reporting/_client.py +++ b/src/frequenz/client/reporting/_client.py @@ -240,8 +240,8 @@ async def list_single_component_data( microgrid_id: int, component_id: int, metrics: Metric | list[Metric], - start_dt: datetime | None, - end_dt: datetime | None, + start_time: datetime | None, + end_time: datetime | None, resampling_period: timedelta | None, include_states: bool = False, include_bounds: bool = False, @@ -252,8 +252,8 @@ async def list_single_component_data( microgrid_id: The microgrid ID. component_id: The component ID. metrics: The metric name or list of metric names. - start_dt: start datetime, if None, the earliest available data will be used - end_dt: end datetime, if None starts streaming indefinitely from start_dt + start_time: start datetime, if None, the earliest available data will be used + end_time: end datetime, if None starts streaming indefinitely from start_time resampling_period: The period for resampling the data. include_states: Whether to include the state data. include_bounds: Whether to include the bound data. @@ -266,8 +266,8 @@ async def list_single_component_data( async for batch in self._list_microgrid_components_data_batch( microgrid_components=[(microgrid_id, [component_id])], metrics=[metrics] if isinstance(metrics, Metric) else metrics, - start_dt=start_dt, - end_dt=end_dt, + start_time=start_time, + end_time=end_time, resampling_period=resampling_period, include_states=include_states, include_bounds=include_bounds, @@ -281,8 +281,8 @@ async def list_microgrid_components_data( *, microgrid_components: list[tuple[int, list[int]]], metrics: Metric | list[Metric], - start_dt: datetime | None, - end_dt: datetime | None, + start_time: datetime | None, + end_time: datetime | None, resampling_period: timedelta | None, include_states: bool = False, include_bounds: bool = False, @@ -293,8 +293,8 @@ async def list_microgrid_components_data( microgrid_components: List of tuples where each tuple contains microgrid ID and corresponding component IDs. metrics: The metric name or list of metric names. - start_dt: start datetime, if None, the earliest available data will be used - end_dt: end datetime, if None starts streaming indefinitely from start_dt + start_time: start datetime, if None, the earliest available data will be used + end_time: end datetime, if None starts streaming indefinitely from start_time resampling_period: The period for resampling the data. include_states: Whether to include the state data. include_bounds: Whether to include the bound data. @@ -310,8 +310,8 @@ async def list_microgrid_components_data( async for batch in self._list_microgrid_components_data_batch( microgrid_components=microgrid_components, metrics=[metrics] if isinstance(metrics, Metric) else metrics, - start_dt=start_dt, - end_dt=end_dt, + start_time=start_time, + end_time=end_time, resampling_period=resampling_period, include_states=include_states, include_bounds=include_bounds, @@ -326,8 +326,8 @@ async def _list_microgrid_components_data_batch( *, microgrid_components: list[tuple[int, list[int]]], metrics: list[Metric], - start_dt: datetime | None, - end_dt: datetime | None, + start_time: datetime | None, + end_time: datetime | None, resampling_period: timedelta | None, include_states: bool = False, include_bounds: bool = False, @@ -337,8 +337,8 @@ async def _list_microgrid_components_data_batch( Args: microgrid_components: A list of tuples of microgrid IDs and component IDs. metrics: A list of metrics. - start_dt: start datetime, if None, the earliest available data will be used - end_dt: end datetime, if None starts streaming indefinitely from start_dt + start_time: start datetime, if None, the earliest available data will be used + end_time: end datetime, if None starts streaming indefinitely from start_time resampling_period: The period for resampling the data. include_states: Whether to include the state data. include_bounds: Whether to include the bound data. @@ -357,8 +357,8 @@ def dt2ts(dt: datetime) -> PBTimestamp: return ts time_filter = PBTimeFilter( - start=dt2ts(start_dt) if start_dt else None, - end=dt2ts(end_dt) if end_dt else None, + start=dt2ts(start_time) if start_time else None, + end=dt2ts(end_time) if end_time else None, ) incl_states = ( @@ -437,8 +437,8 @@ async def receive_single_sensor_data( microgrid_id: int, sensor_id: int, metrics: Metric | list[Metric], - start_dt: datetime | None, - end_dt: datetime | None, + start_time: datetime | None, + end_time: datetime | None, resampling_period: timedelta | None, include_states: bool = False, ) -> AsyncIterator[MetricSample]: @@ -448,8 +448,8 @@ async def receive_single_sensor_data( microgrid_id: The microgrid ID. sensor_id: The sensor ID. metrics: The metric name or list of metric names. - start_dt: start datetime, if None, the earliest available data will be used. - end_dt: end datetime, if None starts streaming indefinitely from start_dt. + start_time: start datetime, if None, the earliest available data will be used. + end_time: end datetime, if None starts streaming indefinitely from start_time. resampling_period: The period for resampling the data. include_states: Whether to include the state data. @@ -461,8 +461,8 @@ async def receive_single_sensor_data( receiver = await self._receive_microgrid_sensors_data_batch( microgrid_sensors=[(microgrid_id, [sensor_id])], metrics=[metrics] if isinstance(metrics, Metric) else metrics, - start_dt=start_dt, - end_dt=end_dt, + start_time=start_time, + end_time=end_time, resampling_period=resampling_period, include_states=include_states, ) @@ -476,8 +476,8 @@ async def receive_microgrid_sensors_data( *, microgrid_sensors: list[tuple[int, list[int]]], metrics: Metric | list[Metric], - start_dt: datetime | None, - end_dt: datetime | None, + start_time: datetime | None, + end_time: datetime | None, resampling_period: timedelta | None, include_states: bool = False, ) -> AsyncIterator[MetricSample]: @@ -487,8 +487,8 @@ async def receive_microgrid_sensors_data( microgrid_sensors: List of tuples where each tuple contains microgrid ID and corresponding sensor IDs. metrics: The metric name or list of metric names. - start_dt: start datetime, if None, the earliest available data will be used. - end_dt: end datetime, if None starts streaming indefinitely from start_dt. + start_time: start datetime, if None, the earliest available data will be used. + end_time: end datetime, if None starts streaming indefinitely from start_time. resampling_period: The period for resampling the data. include_states: Whether to include the state data. @@ -503,8 +503,8 @@ async def receive_microgrid_sensors_data( receiver = await self._receive_microgrid_sensors_data_batch( microgrid_sensors=microgrid_sensors, metrics=[metrics] if isinstance(metrics, Metric) else metrics, - start_dt=start_dt, - end_dt=end_dt, + start_time=start_time, + end_time=end_time, resampling_period=resampling_period, include_states=include_states, ) @@ -519,8 +519,8 @@ async def _receive_microgrid_sensors_data_batch( *, microgrid_sensors: list[tuple[int, list[int]]], metrics: list[Metric], - start_dt: datetime | None, - end_dt: datetime | None, + start_time: datetime | None, + end_time: datetime | None, resampling_period: timedelta | None, include_states: bool = False, ) -> AsyncIterator[SensorsDataBatch]: @@ -529,8 +529,8 @@ async def _receive_microgrid_sensors_data_batch( Args: microgrid_sensors: A list of tuples of microgrid IDs and sensor IDs. metrics: A list of metrics. - start_dt: start datetime, if None, the earliest available data will be used. - end_dt: end datetime, if None starts streaming indefinitely from start_dt. + start_time: start datetime, if None, the earliest available data will be used. + end_time: end datetime, if None starts streaming indefinitely from start_time. resampling_period: The period for resampling the data. include_states: Whether to include the state data. @@ -548,8 +548,8 @@ def dt2ts(dt: datetime) -> PBTimestamp: return ts time_filter = PBTimeFilter( - start=dt2ts(start_dt) if start_dt else None, - end=dt2ts(end_dt) if end_dt else None, + start=dt2ts(start_time) if start_time else None, + end=dt2ts(end_time) if end_time else None, ) incl_states = ( @@ -619,8 +619,8 @@ async def receive_aggregated_data( microgrid_id: int, metric: Metric, aggregation_formula: str, - start: datetime | None, - end: datetime | None, + start_time: datetime | None, + end_time: datetime | None, resampling_period: timedelta, ) -> AsyncIterator[MetricSample]: """Iterate over aggregated data for a single metric using GrpcStreamBroadcaster. @@ -630,8 +630,8 @@ async def receive_aggregated_data( microgrid_id: The microgrid ID. metric: The metric name. aggregation_formula: The aggregation formula. - start: start datetime, if None, the earliest available data will be used - end: end datetime, if None starts streaming indefinitely from start + start_time: start datetime, if None, the earliest available data will be used + end_time: end datetime, if None starts streaming indefinitely from start_time resampling_period: The period for resampling the data. Yields: @@ -655,8 +655,8 @@ def dt2ts(dt: datetime) -> PBTimestamp: return ts time_filter = PBTimeFilter( - start=dt2ts(start) if start else None, - end=dt2ts(end) if end else None, + start=dt2ts(start_time) if start_time else None, + end=dt2ts(end_time) if end_time else None, ) stream_filter = PBAggregatedStreamRequest.AggregationStreamFilter( diff --git a/src/frequenz/client/reporting/cli/__main__.py b/src/frequenz/client/reporting/cli/__main__.py index 31df586..a5b5779 100644 --- a/src/frequenz/client/reporting/cli/__main__.py +++ b/src/frequenz/client/reporting/cli/__main__.py @@ -91,8 +91,8 @@ def main() -> None: microgrid_id=args.mid, component_id=args.cid, metric_names=args.metrics, - start_dt=args.start, - end_dt=args.end, + start_time=args.start, + end_time=args.end, resampling_period_s=args.resampling_period_s, states=args.states, bounds=args.bounds, @@ -109,8 +109,8 @@ async def run( # noqa: DOC502 microgrid_id: int, component_id: list[str], metric_names: list[str], - start_dt: datetime | None, - end_dt: datetime | None, + start_time: datetime | None, + end_time: datetime | None, resampling_period_s: int | None, states: bool, bounds: bool, @@ -124,8 +124,8 @@ async def run( # noqa: DOC502 microgrid_id: microgrid ID component_id: component ID metric_names: list of metric names - start_dt: start datetime, if None, the earliest available data will be used - end_dt: end datetime, if None starts streaming indefinitely from start_dt + start_time: start datetime, if None, the earliest available data will be used + end_time: end datetime, if None starts streaming indefinitely from start_time resampling_period_s: The period for resampling the data. states: include states in the output bounds: include bounds in the output @@ -161,8 +161,8 @@ async def data_iter() -> AsyncIterator[MetricSample]: async for sample in client.list_microgrid_components_data( microgrid_components=microgrid_components, metrics=metrics, - start_dt=start_dt, - end_dt=end_dt, + start_time=start_time, + end_time=end_time, resampling_period=resampling_period, include_states=states, include_bounds=bounds, @@ -176,8 +176,8 @@ async def data_iter() -> AsyncIterator[MetricSample]: microgrid_id=microgrid_id, metric=metric, aggregation_formula=formula, - start=start_dt, - end=end_dt, + start_time=start_time, + end_time=end_time, resampling_period=resampling_period, ): yield sample