Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion RELEASE_NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,11 @@
* States can now be requested via the client and are provided through the flat iterator.
They can be identified via their category `state`, `warning` and `error`, respectively.
Each individual state is provided as its own sample.
* Bounds can now be requested via the client and are provided through the flat iterator.
They can be identified via their category `metric_bound[i]_{upper,lower}`.
Each individual bound is provided as its own sample.

* Support for states is also added to the CLI tool via the `--states` flag.
* Support for states and bound is also added to the CLI tool via the `--states` and `--bounds` flag, respectively.

## Bug Fixes

Expand Down
9 changes: 9 additions & 0 deletions src/frequenz/client/reporting/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,11 @@ def main() -> None:
action="store_true",
help="Include states in the output",
)
parser.add_argument(
"--bounds",
action="store_true",
help="Include bounds in the output",
)
parser.add_argument(
"--start",
type=datetime.fromisoformat,
Expand Down Expand Up @@ -72,6 +77,7 @@ def main() -> None:
args.end,
args.resolution,
states=args.states,
bounds=args.bounds,
service_address=args.url,
key=args.key,
fmt=args.format,
Expand All @@ -88,6 +94,7 @@ async def run(
end_dt: datetime,
resolution: int,
states: bool,
bounds: bool,
service_address: str,
key: str,
fmt: str,
Expand All @@ -102,6 +109,7 @@ async def run(
end_dt: end datetime
resolution: resampling resolution in sec
states: include states in the output
bounds: include bounds in the output
service_address: service address
key: API key
fmt: output format
Expand Down Expand Up @@ -129,6 +137,7 @@ def data_iter() -> AsyncIterator[MetricSample]:
end_dt=end_dt,
resolution=resolution,
include_states=states,
include_bounds=bounds,
)

if fmt == "iter":
Expand Down
31 changes: 31 additions & 0 deletions src/frequenz/client/reporting/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,23 @@ def __iter__(self) -> Iterator[MetricSample]:
metric=met,
value=value,
)
for i, bound in enumerate(msample.bounds):
if bound.lower:
yield MetricSample(
timestamp=ts,
microgrid_id=mid,
component_id=cid,
metric=f"{met}_bound_{i}_lower",
value=bound.lower,
)
if bound.upper:
yield MetricSample(
timestamp=ts,
microgrid_id=mid,
component_id=cid,
metric=f"{met}_bound_{i}_upper",
value=bound.upper,
)
for state in cdata.states:
ts = state.sampled_at.ToDatetime()
for name, category in {
Expand Down Expand Up @@ -145,6 +162,7 @@ async def list_single_component_data(
end_dt: datetime,
resolution: int | None,
include_states: bool = False,
include_bounds: bool = False,
) -> AsyncIterator[MetricSample]:
"""Iterate over the data for a single metric.

Expand All @@ -156,6 +174,7 @@ async def list_single_component_data(
end_dt: The end date and time.
resolution: The resampling resolution for the data, represented in seconds.
include_states: Whether to include the state data.
include_bounds: Whether to include the bound data.

Yields:
A named tuple with the following fields:
Expand All @@ -169,6 +188,7 @@ async def list_single_component_data(
end_dt=end_dt,
resolution=resolution,
include_states=include_states,
include_bounds=include_bounds,
):
for entry in batch:
yield entry
Expand All @@ -183,6 +203,7 @@ async def list_microgrid_components_data(
end_dt: datetime,
resolution: int | None,
include_states: bool = False,
include_bounds: bool = False,
) -> AsyncIterator[MetricSample]:
"""Iterate over the data for multiple microgrids and components.

Expand All @@ -194,6 +215,7 @@ async def list_microgrid_components_data(
end_dt: The end date and time.
resolution: The resampling resolution for the data, represented in seconds.
include_states: Whether to include the state data.
include_bounds: Whether to include the bound data.

Yields:
A named tuple with the following fields:
Expand All @@ -210,6 +232,7 @@ async def list_microgrid_components_data(
end_dt=end_dt,
resolution=resolution,
include_states=include_states,
include_bounds=include_bounds,
):
for entry in batch:
yield entry
Expand All @@ -225,6 +248,7 @@ async def _list_microgrid_components_data_batch(
end_dt: datetime,
resolution: int | None,
include_states: bool = False,
include_bounds: bool = False,
) -> AsyncIterator[ComponentsDataBatch]:
"""Iterate over the component data batches in the stream.

Expand All @@ -238,6 +262,7 @@ async def _list_microgrid_components_data_batch(
end_dt: The end date and time.
resolution: The resampling resolution for the data, represented in seconds.
include_states: Whether to include the state data.
include_bounds: Whether to include the bound data.

Yields:
A ComponentsDataBatch object of microgrid components data.
Expand All @@ -262,7 +287,13 @@ def dt2ts(dt: datetime) -> PBTimestamp:
if include_states
else PBIncludeOptions.FilterOption.FILTER_OPTION_EXCLUDE
)
incl_bounds = (
PBIncludeOptions.FilterOption.FILTER_OPTION_INCLUDE
if include_bounds
else PBIncludeOptions.FilterOption.FILTER_OPTION_EXCLUDE
)
include_options = PBIncludeOptions(
bounds=incl_bounds,
states=incl_states,
)

Expand Down
Loading