Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ Please also refer to [examples](https://github.com/frequenz-floss/frequenz-clien

```bash
# Choose the version you want to install
VERSION=0.16.0
VERSION=0.17.0
pip install frequenz-client-reporting==$VERSION
```

Expand Down Expand Up @@ -63,8 +63,8 @@ data = [
microgrid_id=1,
component_id=100,
metrics=[Metric.AC_ACTIVE_POWER, Metric.AC_REACTIVE_POWER],
start_dt=datetime.fromisoformat("2024-05-01T00:00:00"),
end_dt=datetime.fromisoformat("2024-05-02T00:00:00"),
start_time=datetime.fromisoformat("2024-05-01T00:00:00"),
end_time=datetime.fromisoformat("2024-05-02T00:00:00"),
resampling_period=timedelta(seconds=1),
)
]
Expand All @@ -79,8 +79,8 @@ data = [
microgrid_id=1,
sensor_id=100,
metrics=[Metric.SENSOR_IRRADIANCE],
start_dt=datetime.fromisoformat("2024-05-01T00:00:00"),
end_dt=datetime.fromisoformat("2024-05-02T00:00:00"),
start_time=datetime.fromisoformat("2024-05-01T00:00:00"),
end_time=datetime.fromisoformat("2024-05-02T00:00:00"),
resampling_period=timedelta(seconds=1),
)
]
Expand All @@ -106,8 +106,8 @@ data = [
client.list_microgrid_components_data(
microgrid_components=microgrid_components,
metrics=[Metric.AC_ACTIVE_POWER, Metric.AC_REACTIVE_POWER],
start_dt=datetime.fromisoformat("2024-05-01T00:00:00"),
end_dt=datetime.fromisoformat("2024-05-02T00:00:00"),
start_time=datetime.fromisoformat("2024-05-01T00:00:00"),
end_time=datetime.fromisoformat("2024-05-02T00:00:00"),
resampling_period=timedelta(seconds=1),
include_states=False, # Set to True to include state data
include_bounds=False, # Set to True to include metric bounds data
Expand All @@ -134,8 +134,8 @@ data = [
client.receive_microgrid_sensors_data(
microgrid_sensors=microgrid_sensors,
metrics=[Metric.SENSOR_IRRADIANCE],
start_dt=datetime.fromisoformat("2024-05-01T00:00:00"),
end_dt=datetime.fromisoformat("2024-05-02T00:00:00"),
start_time=datetime.fromisoformat("2024-05-01T00:00:00"),
end_time=datetime.fromisoformat("2024-05-02T00:00:00"),
resampling_period=timedelta(seconds=1),
include_states=False, # Set to True to include state data
)
Expand Down
2 changes: 1 addition & 1 deletion RELEASE_NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

## Upgrading

<!-- Here goes notes on how to upgrade from previous versions, including deprecations and what they should be replaced with -->
* Change 'start_dt' and 'end_dt' to 'start_time' and 'end_time' respectively.

## New Features

Expand Down
84 changes: 42 additions & 42 deletions src/frequenz/client/reporting/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,8 +240,8 @@ async def list_single_component_data(
microgrid_id: int,
component_id: int,
metrics: Metric | list[Metric],
start_dt: datetime | None,
end_dt: datetime | None,
start_time: datetime | None,
end_time: datetime | None,
resampling_period: timedelta | None,
include_states: bool = False,
include_bounds: bool = False,
Expand All @@ -252,8 +252,8 @@ async def list_single_component_data(
microgrid_id: The microgrid ID.
component_id: The component ID.
metrics: The metric name or list of metric names.
start_dt: start datetime, if None, the earliest available data will be used
end_dt: end datetime, if None starts streaming indefinitely from start_dt
start_time: start datetime, if None, the earliest available data will be used
end_time: end datetime, if None starts streaming indefinitely from start_time
resampling_period: The period for resampling the data.
include_states: Whether to include the state data.
include_bounds: Whether to include the bound data.
Expand All @@ -266,8 +266,8 @@ async def list_single_component_data(
async for batch in self._list_microgrid_components_data_batch(
microgrid_components=[(microgrid_id, [component_id])],
metrics=[metrics] if isinstance(metrics, Metric) else metrics,
start_dt=start_dt,
end_dt=end_dt,
start_time=start_time,
end_time=end_time,
resampling_period=resampling_period,
include_states=include_states,
include_bounds=include_bounds,
Expand All @@ -281,8 +281,8 @@ async def list_microgrid_components_data(
*,
microgrid_components: list[tuple[int, list[int]]],
metrics: Metric | list[Metric],
start_dt: datetime | None,
end_dt: datetime | None,
start_time: datetime | None,
end_time: datetime | None,
resampling_period: timedelta | None,
include_states: bool = False,
include_bounds: bool = False,
Expand All @@ -293,8 +293,8 @@ async def list_microgrid_components_data(
microgrid_components: List of tuples where each tuple contains
microgrid ID and corresponding component IDs.
metrics: The metric name or list of metric names.
start_dt: start datetime, if None, the earliest available data will be used
end_dt: end datetime, if None starts streaming indefinitely from start_dt
start_time: start datetime, if None, the earliest available data will be used
end_time: end datetime, if None starts streaming indefinitely from start_time
resampling_period: The period for resampling the data.
include_states: Whether to include the state data.
include_bounds: Whether to include the bound data.
Expand All @@ -310,8 +310,8 @@ async def list_microgrid_components_data(
async for batch in self._list_microgrid_components_data_batch(
microgrid_components=microgrid_components,
metrics=[metrics] if isinstance(metrics, Metric) else metrics,
start_dt=start_dt,
end_dt=end_dt,
start_time=start_time,
end_time=end_time,
resampling_period=resampling_period,
include_states=include_states,
include_bounds=include_bounds,
Expand All @@ -326,8 +326,8 @@ async def _list_microgrid_components_data_batch(
*,
microgrid_components: list[tuple[int, list[int]]],
metrics: list[Metric],
start_dt: datetime | None,
end_dt: datetime | None,
start_time: datetime | None,
end_time: datetime | None,
resampling_period: timedelta | None,
include_states: bool = False,
include_bounds: bool = False,
Expand All @@ -337,8 +337,8 @@ async def _list_microgrid_components_data_batch(
Args:
microgrid_components: A list of tuples of microgrid IDs and component IDs.
metrics: A list of metrics.
start_dt: start datetime, if None, the earliest available data will be used
end_dt: end datetime, if None starts streaming indefinitely from start_dt
start_time: start datetime, if None, the earliest available data will be used
end_time: end datetime, if None starts streaming indefinitely from start_time
resampling_period: The period for resampling the data.
include_states: Whether to include the state data.
include_bounds: Whether to include the bound data.
Expand All @@ -357,8 +357,8 @@ def dt2ts(dt: datetime) -> PBTimestamp:
return ts

time_filter = PBTimeFilter(
start=dt2ts(start_dt) if start_dt else None,
end=dt2ts(end_dt) if end_dt else None,
start=dt2ts(start_time) if start_time else None,
end=dt2ts(end_time) if end_time else None,
)

incl_states = (
Expand Down Expand Up @@ -437,8 +437,8 @@ async def receive_single_sensor_data(
microgrid_id: int,
sensor_id: int,
metrics: Metric | list[Metric],
start_dt: datetime | None,
end_dt: datetime | None,
start_time: datetime | None,
end_time: datetime | None,
resampling_period: timedelta | None,
include_states: bool = False,
) -> AsyncIterator[MetricSample]:
Expand All @@ -448,8 +448,8 @@ async def receive_single_sensor_data(
microgrid_id: The microgrid ID.
sensor_id: The sensor ID.
metrics: The metric name or list of metric names.
start_dt: start datetime, if None, the earliest available data will be used.
end_dt: end datetime, if None starts streaming indefinitely from start_dt.
start_time: start datetime, if None, the earliest available data will be used.
end_time: end datetime, if None starts streaming indefinitely from start_time.
resampling_period: The period for resampling the data.
include_states: Whether to include the state data.

Expand All @@ -461,8 +461,8 @@ async def receive_single_sensor_data(
receiver = await self._receive_microgrid_sensors_data_batch(
microgrid_sensors=[(microgrid_id, [sensor_id])],
metrics=[metrics] if isinstance(metrics, Metric) else metrics,
start_dt=start_dt,
end_dt=end_dt,
start_time=start_time,
end_time=end_time,
resampling_period=resampling_period,
include_states=include_states,
)
Expand All @@ -476,8 +476,8 @@ async def receive_microgrid_sensors_data(
*,
microgrid_sensors: list[tuple[int, list[int]]],
metrics: Metric | list[Metric],
start_dt: datetime | None,
end_dt: datetime | None,
start_time: datetime | None,
end_time: datetime | None,
resampling_period: timedelta | None,
include_states: bool = False,
) -> AsyncIterator[MetricSample]:
Expand All @@ -487,8 +487,8 @@ async def receive_microgrid_sensors_data(
microgrid_sensors: List of tuples where each tuple contains
microgrid ID and corresponding sensor IDs.
metrics: The metric name or list of metric names.
start_dt: start datetime, if None, the earliest available data will be used.
end_dt: end datetime, if None starts streaming indefinitely from start_dt.
start_time: start datetime, if None, the earliest available data will be used.
end_time: end datetime, if None starts streaming indefinitely from start_time.
resampling_period: The period for resampling the data.
include_states: Whether to include the state data.

Expand All @@ -503,8 +503,8 @@ async def receive_microgrid_sensors_data(
receiver = await self._receive_microgrid_sensors_data_batch(
microgrid_sensors=microgrid_sensors,
metrics=[metrics] if isinstance(metrics, Metric) else metrics,
start_dt=start_dt,
end_dt=end_dt,
start_time=start_time,
end_time=end_time,
resampling_period=resampling_period,
include_states=include_states,
)
Expand All @@ -519,8 +519,8 @@ async def _receive_microgrid_sensors_data_batch(
*,
microgrid_sensors: list[tuple[int, list[int]]],
metrics: list[Metric],
start_dt: datetime | None,
end_dt: datetime | None,
start_time: datetime | None,
end_time: datetime | None,
resampling_period: timedelta | None,
include_states: bool = False,
) -> AsyncIterator[SensorsDataBatch]:
Expand All @@ -529,8 +529,8 @@ async def _receive_microgrid_sensors_data_batch(
Args:
microgrid_sensors: A list of tuples of microgrid IDs and sensor IDs.
metrics: A list of metrics.
start_dt: start datetime, if None, the earliest available data will be used.
end_dt: end datetime, if None starts streaming indefinitely from start_dt.
start_time: start datetime, if None, the earliest available data will be used.
end_time: end datetime, if None starts streaming indefinitely from start_time.
resampling_period: The period for resampling the data.
include_states: Whether to include the state data.

Expand All @@ -548,8 +548,8 @@ def dt2ts(dt: datetime) -> PBTimestamp:
return ts

time_filter = PBTimeFilter(
start=dt2ts(start_dt) if start_dt else None,
end=dt2ts(end_dt) if end_dt else None,
start=dt2ts(start_time) if start_time else None,
end=dt2ts(end_time) if end_time else None,
)

incl_states = (
Expand Down Expand Up @@ -619,8 +619,8 @@ async def receive_aggregated_data(
microgrid_id: int,
metric: Metric,
aggregation_formula: str,
start: datetime | None,
end: datetime | None,
start_time: datetime | None,
end_time: datetime | None,
resampling_period: timedelta,
) -> AsyncIterator[MetricSample]:
"""Iterate over aggregated data for a single metric using GrpcStreamBroadcaster.
Expand All @@ -630,8 +630,8 @@ async def receive_aggregated_data(
microgrid_id: The microgrid ID.
metric: The metric name.
aggregation_formula: The aggregation formula.
start: start datetime, if None, the earliest available data will be used
end: end datetime, if None starts streaming indefinitely from start
start_time: start datetime, if None, the earliest available data will be used
end_time: end datetime, if None starts streaming indefinitely from start_time
resampling_period: The period for resampling the data.

Yields:
Expand All @@ -655,8 +655,8 @@ def dt2ts(dt: datetime) -> PBTimestamp:
return ts

time_filter = PBTimeFilter(
start=dt2ts(start) if start else None,
end=dt2ts(end) if end else None,
start=dt2ts(start_time) if start_time else None,
end=dt2ts(end_time) if end_time else None,
)

stream_filter = PBAggregatedStreamRequest.AggregationStreamFilter(
Expand Down
20 changes: 10 additions & 10 deletions src/frequenz/client/reporting/cli/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,8 @@ def main() -> None:
microgrid_id=args.mid,
component_id=args.cid,
metric_names=args.metrics,
start_dt=args.start,
end_dt=args.end,
start_time=args.start,
end_time=args.end,
resampling_period_s=args.resampling_period_s,
states=args.states,
bounds=args.bounds,
Expand All @@ -109,8 +109,8 @@ async def run( # noqa: DOC502
microgrid_id: int,
component_id: list[str],
metric_names: list[str],
start_dt: datetime | None,
end_dt: datetime | None,
start_time: datetime | None,
end_time: datetime | None,
resampling_period_s: int | None,
states: bool,
bounds: bool,
Expand All @@ -124,8 +124,8 @@ async def run( # noqa: DOC502
microgrid_id: microgrid ID
component_id: component ID
metric_names: list of metric names
start_dt: start datetime, if None, the earliest available data will be used
end_dt: end datetime, if None starts streaming indefinitely from start_dt
start_time: start datetime, if None, the earliest available data will be used
end_time: end datetime, if None starts streaming indefinitely from start_time
resampling_period_s: The period for resampling the data.
states: include states in the output
bounds: include bounds in the output
Expand Down Expand Up @@ -161,8 +161,8 @@ async def data_iter() -> AsyncIterator[MetricSample]:
async for sample in client.list_microgrid_components_data(
microgrid_components=microgrid_components,
metrics=metrics,
start_dt=start_dt,
end_dt=end_dt,
start_time=start_time,
end_time=end_time,
resampling_period=resampling_period,
include_states=states,
include_bounds=bounds,
Expand All @@ -176,8 +176,8 @@ async def data_iter() -> AsyncIterator[MetricSample]:
microgrid_id=microgrid_id,
metric=metric,
aggregation_formula=formula,
start=start_dt,
end=end_dt,
start_time=start_time,
end_time=end_time,
resampling_period=resampling_period,
):
yield sample
Expand Down
Loading