3131MAX_HOURLY_INTERVAL_DAYS = 63 # API limitation for hourly aggregated data
3232MAX_DAILY_INTERVAL_DAYS = 100 # API limitation for daily aggregated data
3333
34+ UNIT_CONVERSIONS = {
35+ "WaterVolume" : 1000 ,
36+ }
37+
3438
3539def align_to_hour (dt : datetime ) -> datetime :
3640 aligned = dt .replace (minute = 0 , second = 0 , microsecond = 0 )
@@ -164,19 +168,31 @@ def _calculate_time_range(
164168
165169 _LOGGER .debug (f"Importing data from { start_time } to { end_time } for { statistic_id } " )
166170 return start_time , end_time
167-
168- def _process_api_values (self , values : list , adjust_hour : bool = False ) -> list [dict ]:
171+
172+ def _process_api_values (self , values : list , property_name : str , adjust_hour : bool = False ) -> list [dict ]:
169173 statistics_data = []
174+
170175 for value in values :
171176 if "DateTime" in value and "Value" in value and value ["Value" ] is not None :
172177 dt = datetime .fromisoformat (value ["DateTime" ])
173178 if dt .tzinfo is None :
174179 dt = dt .replace (tzinfo = timezone .utc )
180+
175181 if adjust_hour :
176182 dt = dt - timedelta (hours = 1 )
177- statistics_data .append ({"start" : dt , "value" : value ["Value" ]})
183+
184+ val = value ["Value" ]
185+
186+ for key , factor in UNIT_CONVERSIONS .items ():
187+ if key in property_name :
188+ val *= factor
189+ break
190+
191+ statistics_data .append ({"start" : dt , "value" : val })
192+
178193 return statistics_data
179194
195+
180196 async def _fetch_aggregated_data (
181197 self ,
182198 device : CoCoDevice ,
@@ -200,7 +216,9 @@ async def _fetch_aggregated_data(
200216 )
201217
202218 if data and "Values" in data :
203- statistics_data .extend (self ._process_api_values (data ["Values" ], adjust_hour ))
219+ statistics_data .extend (
220+ self ._process_api_values (data ["Values" ], property_name , adjust_hour )
221+ )
204222
205223 current_start = chunk_end
206224
0 commit comments