Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions _docs/blueprints.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ This blueprint will provide the data source for the UK Carbon Intensity as provi

#### Default

[Install blueprint)](https://my.home-assistant.io/redirect/blueprint_import/?blueprint_url=https%3A%2F%2Fbottlecapdave.github.io%2FHomeAssistant-TargetTimeframes%2Fblueprints%2Ftarget_timeframes_octopus_energy.yaml) | [Source](./blueprints/target_timeframes_octopus_energy.yaml)
[Install blueprint](https://my.home-assistant.io/redirect/blueprint_import/?blueprint_url=https%3A%2F%2Fbottlecapdave.github.io%2FHomeAssistant-TargetTimeframes%2Fblueprints%2Ftarget_timeframes_octopus_energy.yaml) | [Source](./blueprints/target_timeframes_octopus_energy.yaml)

This blueprint will provide the data source for Octopus Energy rates as provided by the [Octopus Energy](https://github.com/BottlecapDave/HomeAssistant-OctopusEnergy) integration. This is for accounts that don't have the [free electricity sensor](https://bottlecapdave.github.io/HomeAssistant-OctopusEnergy/entities/octoplus/#free-electricity-session-events) available.

Expand All @@ -46,4 +46,4 @@ This blueprint will provide the data source for Octopus Energy rates as provided

!!! warning

This automation will run when any of the underlying entities update. This make take a while initially. If you want the data available immediately, then you'll need to run the automation manually.
This automation will run when any of the underlying entities update. This make take a while initially. If you want the data available immediately, then you'll need to run the automation manually.
17 changes: 10 additions & 7 deletions custom_components/target_timeframes/entities/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,17 +33,17 @@ def apply_offset(date_time: datetime, offset: str, inverse = False):

return date_time + timedelta(hours=hours, minutes=minutes, seconds=seconds)

def is_target_timeframe_complete_in_period(current_date: datetime, applicable_time_periods: list | None, target_timeframes: list | None):
if applicable_time_periods is None or target_timeframes is None or len(applicable_time_periods) < 1 or len(target_timeframes) < 1:
def is_target_timeframe_complete_in_period(current_date: datetime, start_time: datetime, end_time: datetime, target_timeframes: list | None):
if target_timeframes is None or len(target_timeframes) < 1:
return False

return (
applicable_time_periods[0]["start"] <= target_timeframes[0]["start"] and
applicable_time_periods[-1]["end"] >= target_timeframes[-1]["end"] and
start_time <= target_timeframes[0]["start"] and
end_time >= target_timeframes[-1]["end"] and
target_timeframes[-1]["end"] <= current_date
)

def get_fixed_applicable_time_periods(current_date: datetime, target_start_time: str, target_end_time: str, time_period_values: list, is_rolling_target = True):
def get_start_and_end_times(current_date: datetime, target_start_time: str, target_end_time: str, start_time_not_in_past = True):
if (target_start_time is not None):
target_start = parse_datetime(current_date.strftime(f"%Y-%m-%dT{target_start_time}:00%z"))
else:
Expand All @@ -65,7 +65,7 @@ def get_fixed_applicable_time_periods(current_date: datetime, target_start_time:
target_end = target_end + timedelta(days=1)

# If our start date has passed, reset it to current_date to avoid picking a slot in the past
if (is_rolling_target == True and target_start < current_date and current_date < target_end):
if (start_time_not_in_past == True and target_start < current_date and current_date < target_end):
_LOGGER.debug(f'Rolling target and {target_start} is in the past. Setting start to {current_date}')
target_start = current_date

Expand All @@ -74,6 +74,9 @@ def get_fixed_applicable_time_periods(current_date: datetime, target_start_time:
target_start = target_start + timedelta(days=1)
target_end = target_end + timedelta(days=1)

return (target_start, target_end)

def get_fixed_applicable_time_periods(target_start: datetime, target_end: datetime, time_period_values: list):
_LOGGER.debug(f'Finding rates between {target_start} and {target_end}')

# Retrieve the rates that are applicable for our target rate
Expand Down Expand Up @@ -434,4 +437,4 @@ def should_evaluate_target_timeframes(current_date: datetime, target_timeframes:

return ((evaluation_mode == CONFIG_TARGET_TARGET_TIMES_EVALUATION_MODE_ALL_IN_PAST and all_rates_in_past) or
(evaluation_mode == CONFIG_TARGET_TARGET_TIMES_EVALUATION_MODE_ALL_IN_FUTURE_OR_PAST and (one_rate_in_past == False or all_rates_in_past)) or
(evaluation_mode == CONFIG_TARGET_TARGET_TIMES_EVALUATION_MODE_ALWAYS))
(evaluation_mode == CONFIG_TARGET_TARGET_TIMES_EVALUATION_MODE_ALWAYS))
20 changes: 12 additions & 8 deletions custom_components/target_timeframes/entities/target_timeframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
create_weighting,
extract_config,
get_fixed_applicable_time_periods,
get_start_and_end_times,
get_target_time_period_info,
is_target_timeframe_complete_in_period,
should_evaluate_target_timeframes
Expand Down Expand Up @@ -166,22 +167,24 @@ async def async_update(self):
if CONFIG_TARGET_MAX_VALUE in self._config:
max_rate = self._config[CONFIG_TARGET_MAX_VALUE]

target_start, target_end = get_start_and_end_times(current_local_date, start_time, end_time, True)
applicable_time_periods = get_fixed_applicable_time_periods(
current_local_date,
start_time,
end_time,
self._data_source_data,
is_rolling_target
target_start,
target_end,
self._data_source_data
)

is_target_timeframe_complete = is_rolling_target == False and is_target_timeframe_complete_in_period(current_local_date, applicable_time_periods, self._target_timeframes)
# Make sure we haven't already completed for the current target timeframe
applicable_target_start, applicable_target_end = get_start_and_end_times(current_local_date, start_time, end_time, False)
is_target_timeframe_complete = is_rolling_target == False and is_target_timeframe_complete_in_period(current_local_date, applicable_target_start, applicable_target_end, self._target_timeframes)

if applicable_time_periods is not None and is_target_timeframe_complete == False:
number_of_slots = math.ceil(target_hours * 2)
weighting = create_weighting(self._config[CONFIG_TARGET_WEIGHTING] if CONFIG_TARGET_WEIGHTING in self._config else None, number_of_slots)

proposed_target_timeframes = None
if (self._config[CONFIG_TARGET_TYPE] == CONFIG_TARGET_TYPE_CONTINUOUS):
self._target_timeframes = calculate_continuous_times(
proposed_target_timeframes = calculate_continuous_times(
applicable_time_periods,
target_hours,
find_highest_values,
Expand All @@ -192,7 +195,7 @@ async def async_update(self):
hours_mode = self._config[CONFIG_TARGET_HOURS_MODE]
)
elif (self._config[CONFIG_TARGET_TYPE] == CONFIG_TARGET_TYPE_INTERMITTENT):
self._target_timeframes = calculate_intermittent_times(
proposed_target_timeframes = calculate_intermittent_times(
applicable_time_periods,
target_hours,
find_highest_values,
Expand All @@ -204,6 +207,7 @@ async def async_update(self):
else:
_LOGGER.error(f"Unexpected target type: {self._config[CONFIG_TARGET_TYPE]}")

self._target_timeframes = proposed_target_timeframes
self._attributes["target_times"] = self._target_timeframes
self._attributes["target_times_last_evaluated"] = current_date
_LOGGER.debug(f"calculated rates: {self._target_timeframes}")
Expand Down
12 changes: 6 additions & 6 deletions tests/unit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,15 @@
_LOGGER = logging.getLogger(__name__)

def create_data_source_data(period_from: datetime, period_to: datetime, expected_values: list):
rates = []
values = []
current_valid_from = period_from
current_valid_to = None

rate_index = 0
while current_valid_to is None or current_valid_to < period_to:
current_valid_to = current_valid_from + timedelta(minutes=30)

rates.append({
values.append({
"start": current_valid_from,
"end": current_valid_to,
"value": expected_values[rate_index],
Expand All @@ -28,19 +28,19 @@ def create_data_source_data(period_from: datetime, period_to: datetime, expected
if (rate_index > (len(expected_values) - 1)):
rate_index = 0

return rates
return values

def get_start(rate):
return rate["start"]

def values_to_thirty_minute_increments(items: list, period_from: datetime, period_to: datetime):
"""Process the collection of rates to ensure they're in 30 minute periods"""
"""Process the collection of values to ensure they're in 30 minute periods"""
starting_period_from = period_from
results = []

items.sort(key=get_start)

# We need to normalise our data into 30 minute increments so that all of our rates across all tariffs are the same and it's
# We need to normalise our data into 30 minute increments so that all of our values across all tariffs are the same and it's
# easier to calculate our target rate sensors
for item in items:
value = float(item["value"])
Expand All @@ -53,7 +53,7 @@ def values_to_thirty_minute_increments(items: list, period_from: datetime, perio
else:
start = starting_period_from

# Some rates don't have end dates, so we should treat this as our period to target
# Some values don't have end dates, so we should treat this as our period to target
if "end" in item and item["end"] is not None:
target_date = as_utc(parse_datetime(item["end"]))

Expand Down
Loading