@@ -4,12 +4,12 @@ import fetchStatistics from "./fetch-statistics";
44import fetchStates from "./fetch-states" ;
55import {
66 TimestampRange ,
7- History ,
87 isEntityIdAttrConfig ,
98 EntityConfig ,
109 isEntityIdStateConfig ,
1110 isEntityIdStatisticsConfig ,
1211 HistoryInRange ,
12+ EntityState ,
1313} from "../types" ;
1414
1515export function mapValues < T , S > (
@@ -25,13 +25,14 @@ async function fetchSingleRange(
2525 significant_changes_only : boolean ,
2626 minimal_response : boolean
2727) : Promise < HistoryInRange > {
28- const start = new Date ( startT ) ;
28+ const start = new Date ( startT - 1 ) ;
29+ endT = Math . min ( endT , Date . now ( ) ) ;
2930 const end = new Date ( endT ) ;
30- let historyInRange : HistoryInRange ;
31+ let history : EntityState [ ] ;
3132 if ( isEntityIdStatisticsConfig ( entity ) ) {
32- historyInRange = await fetchStatistics ( hass , entity , [ start , end ] ) ;
33+ history = await fetchStatistics ( hass , entity , [ start , end ] ) ;
3334 } else {
34- historyInRange = await fetchStates (
35+ history = await fetchStates (
3536 hass ,
3637 entity ,
3738 [ start , end ] ,
@@ -40,25 +41,19 @@ async function fetchSingleRange(
4041 ) ;
4142 }
4243
43- const { history, range } = historyInRange ;
4444 /*
45- home assistant will "invent" a datapoiont at startT with the previous known value, except if there is actually one at startT.
46- To avoid these duplicates, the "fetched range" is capped to end at the last known point instead of endT .
47- This ensures that the next fetch will start with a duplicate of the last known datapoint, which can then be removed.
48- On top of that, in order to ensure that the last known point is extended to endT, I duplicate the last datapoint
49- and set its date to endT .
45+ home assistant will "invent" a datapoiont at startT with the previous
46+ known value, except if there is actually one at startT .
47+ To avoid these duplicates, the "fetched range" starts at startT-1,
48+ but the first point is marked to be deleted (fake_boundary_datapoint).
49+ Delettion occurs when merging the fetched range inside the cached history .
5050 */
51+ let range : [ number , number ] = [ startT , endT ] ;
5152 if ( history . length ) {
52- const last = history [ history . length - 1 ] ;
53- const dup = JSON . parse ( JSON . stringify ( last ) ) ;
54- history [ 0 ] . duplicate_datapoint = true ;
55- dup . duplicate_datapoint = true ;
56- dup . last_updated = Math . min ( + end , Date . now ( ) ) ;
57- history . push ( dup ) ;
53+ history [ 0 ] . fake_boundary_datapoint = true ;
5854 }
59- Math . min ( + end , Date . now ( ) ) ;
6055 return {
61- range : [ range [ 0 ] , Math . min ( range [ 1 ] , Date . now ( ) ) ] ,
56+ range,
6257 history,
6358 } ;
6459}
@@ -75,8 +70,22 @@ export function getEntityKey(entity: EntityConfig) {
7570}
7671export default class Cache {
7772 ranges : Record < string , TimestampRange [ ] > = { } ;
78- histories : Record < string , History > = { } ;
73+ histories : Record < string , EntityState [ ] > = { } ;
7974 busy = Promise . resolve ( ) ; // mutex
75+
76+ add ( entity : EntityConfig , states : EntityState [ ] , range : [ number , number ] ) {
77+ const entityKey = getEntityKey ( entity ) ;
78+ let h = ( this . histories [ entityKey ] ??= [ ] ) ;
79+ h . push ( ...states ) ;
80+ h . sort ( ( a , b ) => a . timestamp - b . timestamp ) ;
81+ h = h . filter ( ( x , i ) => i == 0 || ! x . fake_boundary_datapoint ) ;
82+ h = h . filter ( ( _ , i ) => h [ i - 1 ] ?. timestamp !== h [ i ] . timestamp ) ;
83+ this . histories [ entityKey ] = h ;
84+ this . ranges [ entityKey ] ??= [ ] ;
85+ this . ranges [ entityKey ] . push ( range ) ;
86+ this . ranges [ entityKey ] = compactRanges ( this . ranges [ entityKey ] ) ;
87+ }
88+
8089 clearCache ( ) {
8190 this . ranges = { } ;
8291 this . histories = { } ;
@@ -112,47 +121,34 @@ export default class Cache {
112121 minimal_response
113122 ) ;
114123 if ( fetchedHistory === null ) continue ;
115- let h = ( this . histories [ entityKey ] ??= [ ] ) ;
116- h . push ( ...fetchedHistory . history ) ;
117- h . sort ( ( a , b ) => a . last_updated - b . last_updated ) ;
118- h = h . filter (
119- ( x , i ) => i == 0 || i == h . length - 1 || ! x . duplicate_datapoint
120- ) ;
121- h = h . filter (
122- ( _ , i ) => h [ i ] . last_updated !== h [ i + 1 ] ?. last_updated
123- ) ;
124- this . histories [ entityKey ] = h ;
125- this . ranges [ entityKey ] . push ( fetchedHistory . range ) ;
126- this . ranges [ entityKey ] = compactRanges ( this . ranges [ entityKey ] ) ;
124+ this . add ( entity , fetchedHistory . history , fetchedHistory . range ) ;
127125 }
128126 } ) ;
129127
130128 await Promise . all ( promises ) ;
131129 } ) ) ;
132130 }
133131
134- private removeOutsideRange ( range : TimestampRange ) {
132+ removeOutsideRange ( range : TimestampRange ) {
135133 this . ranges = mapValues ( this . ranges , ( ranges ) =>
136134 subtractRanges ( ranges , [
137135 [ Number . NEGATIVE_INFINITY , range [ 0 ] - 1 ] ,
138136 [ range [ 1 ] + 1 , Number . POSITIVE_INFINITY ] ,
139137 ] )
140138 ) ;
141139 this . histories = mapValues ( this . histories , ( history ) => {
142- let first : History [ 0 ] | undefined ;
143- let last : History [ 0 ] | undefined ;
140+ let first : EntityState | undefined ;
141+ let last : EntityState | undefined ;
144142 const newHistory = history . filter ( ( datum ) => {
145- if ( datum . last_updated <= range [ 0 ] ) first = datum ;
146- else if ( ! last && datum . last_updated >= range [ 1 ] ) last = datum ;
143+ if ( datum . timestamp <= range [ 0 ] ) first = datum ;
144+ else if ( ! last && datum . timestamp >= range [ 1 ] ) last = datum ;
147145 else return true ;
148146 return false ;
149147 } ) ;
150148 if ( first ) {
151- first . last_updated = range [ 0 ] ;
152149 newHistory . unshift ( first ) ;
153150 }
154151 if ( last ) {
155- last . last_updated = range [ 1 ] ;
156152 newHistory . push ( last ) ;
157153 }
158154 return newHistory ;
0 commit comments