Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,9 @@ client = ReportingApiClient(server_url=SERVER_URL, key=API_KEY)
```

Besides the microgrid_id, component_ids, and metrics, start, and end time,
you can also set the sampling period for resampling using the `resolution` parameter.
For example, to resample data every 15 minutes, use a `resolution` of 900 seconds. The default is 1 second.
you can also set the sampling period for resampling using the `resampling_period`
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just noting that the SDK uses sampling_period, but I think the term you chose is more accurate, so I would keep it.

parameter. For example, to resample data every 15 minutes, use a `resampling_period`
of timedelta(minutes=15).

### Query metrics for a single microgrid and component:

Expand All @@ -64,7 +65,7 @@ data = [
metrics=[Metric.AC_ACTIVE_POWER, Metric.AC_REACTIVE_POWER],
start_dt=datetime.fromisoformat("2024-05-01T00:00:00"),
end_dt=datetime.fromisoformat("2024-05-02T00:00:00"),
resolution=1,
resampling_period=timedelta(seconds=1),
)
]
```
Expand All @@ -91,7 +92,7 @@ data = [
metrics=[Metric.AC_ACTIVE_POWER, Metric.AC_REACTIVE_POWER],
start_dt=datetime.fromisoformat("2024-05-01T00:00:00"),
end_dt=datetime.fromisoformat("2024-05-02T00:00:00"),
resolution=1,
resampling_period=timedelta(seconds=1),
states=False, # Set to True to include state data
bounds=False, # Set to True to include metric bounds data
)
Expand Down
1 change: 1 addition & 0 deletions RELEASE_NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
## Upgrading

* Enforce keyword arguments in 'run' function of 'main' module
* Change 'resolution' 'int' to 'resample_period' 'timedelta'

## New Features

Expand Down
23 changes: 17 additions & 6 deletions src/frequenz/client/reporting/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import argparse
import asyncio
from datetime import datetime
from datetime import datetime, timedelta
from pprint import pprint
from typing import AsyncIterator

Expand Down Expand Up @@ -57,7 +57,12 @@ def main() -> None:
help="End datetime in YYYY-MM-DDTHH:MM:SS format",
required=True,
)
parser.add_argument("--resolution", type=int, help="Resolution", default=None)
parser.add_argument(
"--resampling_period_s",
type=int,
help="Resampling period in seconds (integer, rounded to avoid subsecond precision issues).",
default=None,
)
parser.add_argument("--psize", type=int, help="Page size", default=1000)
parser.add_argument(
"--format", choices=["iter", "csv", "dict"], help="Output format", default="csv"
Expand All @@ -76,7 +81,7 @@ def main() -> None:
metric_names=args.metrics,
start_dt=args.start,
end_dt=args.end,
resolution=args.resolution,
resampling_period_s=args.resampling_period_s,
states=args.states,
bounds=args.bounds,
service_address=args.url,
Expand All @@ -94,7 +99,7 @@ async def run(
metric_names: list[str],
start_dt: datetime,
end_dt: datetime,
resolution: int,
resampling_period_s: int | None,
states: bool,
bounds: bool,
service_address: str,
Expand All @@ -109,7 +114,7 @@ async def run(
metric_names: list of metric names
start_dt: start datetime
end_dt: end datetime
resolution: resampling resolution in sec
resampling_period_s: The period for resampling the data.
states: include states in the output
bounds: include bounds in the output
service_address: service address
Expand All @@ -131,13 +136,19 @@ def data_iter() -> AsyncIterator[MetricSample]:
Returns:
Iterator over single metric samples
"""
resampling_period = (
timedelta(seconds=resampling_period_s)
if resampling_period_s is not None
else None
)

return client.list_single_component_data(
microgrid_id=microgrid_id,
component_id=component_id,
metrics=metrics,
start_dt=start_dt,
end_dt=end_dt,
resolution=resolution,
resampling_period=resampling_period,
include_states=states,
include_bounds=bounds,
)
Expand Down
26 changes: 16 additions & 10 deletions src/frequenz/client/reporting/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from collections import namedtuple
from collections.abc import AsyncIterator, Iterable, Iterator
from dataclasses import dataclass
from datetime import datetime
from datetime import datetime, timedelta
from typing import cast

import grpc.aio as grpcaio
Expand Down Expand Up @@ -160,7 +160,7 @@ async def list_single_component_data(
metrics: Metric | list[Metric],
start_dt: datetime,
end_dt: datetime,
resolution: int | None,
resampling_period: timedelta | None,
include_states: bool = False,
include_bounds: bool = False,
) -> AsyncIterator[MetricSample]:
Expand All @@ -172,7 +172,7 @@ async def list_single_component_data(
metrics: The metric name or list of metric names.
start_dt: The start date and time.
end_dt: The end date and time.
resolution: The resampling resolution for the data, represented in seconds.
resampling_period: The period for resampling the data.
include_states: Whether to include the state data.
include_bounds: Whether to include the bound data.

Expand All @@ -186,7 +186,7 @@ async def list_single_component_data(
metrics=[metrics] if isinstance(metrics, Metric) else metrics,
start_dt=start_dt,
end_dt=end_dt,
resolution=resolution,
resampling_period=resampling_period,
include_states=include_states,
include_bounds=include_bounds,
):
Expand All @@ -201,7 +201,7 @@ async def list_microgrid_components_data(
metrics: Metric | list[Metric],
start_dt: datetime,
end_dt: datetime,
resolution: int | None,
resampling_period: timedelta | None,
include_states: bool = False,
include_bounds: bool = False,
) -> AsyncIterator[MetricSample]:
Expand All @@ -213,7 +213,7 @@ async def list_microgrid_components_data(
metrics: The metric name or list of metric names.
start_dt: The start date and time.
end_dt: The end date and time.
resolution: The resampling resolution for the data, represented in seconds.
resampling_period: The period for resampling the data.
include_states: Whether to include the state data.
include_bounds: Whether to include the bound data.

Expand All @@ -230,7 +230,7 @@ async def list_microgrid_components_data(
metrics=[metrics] if isinstance(metrics, Metric) else metrics,
start_dt=start_dt,
end_dt=end_dt,
resolution=resolution,
resampling_period=resampling_period,
include_states=include_states,
include_bounds=include_bounds,
):
Expand All @@ -246,7 +246,7 @@ async def _list_microgrid_components_data_batch(
metrics: list[Metric],
start_dt: datetime,
end_dt: datetime,
resolution: int | None,
resampling_period: timedelta | None,
include_states: bool = False,
include_bounds: bool = False,
) -> AsyncIterator[ComponentsDataBatch]:
Expand All @@ -260,7 +260,7 @@ async def _list_microgrid_components_data_batch(
metrics: A list of metrics.
start_dt: The start date and time.
end_dt: The end date and time.
resolution: The resampling resolution for the data, represented in seconds.
resampling_period: The period for resampling the data.
include_states: Whether to include the state data.
include_bounds: Whether to include the bound data.

Expand Down Expand Up @@ -299,7 +299,13 @@ def dt2ts(dt: datetime) -> PBTimestamp:

stream_filter = PBReceiveMicrogridComponentsDataStreamRequest.StreamFilter(
time_filter=time_filter,
resampling_options=PBResamplingOptions(resolution=resolution),
resampling_options=PBResamplingOptions(
resolution=(
round(resampling_period.total_seconds())
if resampling_period is not None
else None
)
),
include_options=include_options,
)

Expand Down
Loading