32
32
from utils import classattributes
33
33
34
34
35
- local_cache = []
35
+ local_cache = set ()
36
36
37
37
38
38
class TimeSeries (object ):
@@ -56,7 +56,7 @@ def parse_tags(self, filtersMap):
56
56
'Single ts identifiers' , ', ' .join (ident )))
57
57
found = False
58
58
for filtersDict in filtersMap :
59
- if all (( value in filtersDict .values ()) for value in ident ):
59
+ if set ( filtersDict .values ()) == set ( ident ):
60
60
logger .trace (MSG ['ReceivAttrValues' ].format (
61
61
'filtersKeys' , ', ' .join (filtersDict .keys ())))
62
62
if len (self .columnInfo .keys ) == 1 :
@@ -68,17 +68,16 @@ def parse_tags(self, filtersMap):
68
68
break
69
69
# detected zimon key, do we need refresh local TOPO?
70
70
if not found :
71
- already_reported = False
72
- for cache_item in local_cache :
73
- if set (cache_item ) == set (ident ):
74
- logger .trace (MSG ['NewKeyAlreadyReported' ].format (ident ))
75
- already_reported = True
76
- break
77
- if not already_reported :
71
+ cache_size = len (local_cache )
72
+ local_cache .union (ident )
73
+ updated_size = len (local_cache )
74
+ if updated_size > cache_size :
78
75
logger .trace (MSG ['NewKeyDetected' ].format (ident ))
79
76
local_cache .append (ident )
80
77
md = MetadataHandler ()
81
78
Thread (name = 'AdHocMetaDataUpdate' , target = md .update ).start ()
79
+ else :
80
+ logger .trace (MSG ['NewKeyAlreadyReported' ].format (ident ))
82
81
83
82
for _key , _values in tagsDict .items ():
84
83
if len (_values ) > 1 :
@@ -357,10 +356,8 @@ def _collect(self):
357
356
for value , columnInfo in zip (row .values , res .columnInfos ):
358
357
columnValues [columnInfo ][row .tstamp ] = value
359
358
360
- timeseries = []
361
359
for columnInfo , dps in columnValues .items ():
362
360
ts = TimeSeries (columnInfo , dps , self .filtersMap )
363
- timeseries .append (ts )
364
361
if self .metrics .get (columnInfo .keys [0 ].metric ) is not None :
365
362
self .logger .trace (MSG ['MetricInResults' ].format (
366
363
columnInfo .keys [0 ].metric ))
@@ -370,7 +367,7 @@ def _collect(self):
370
367
self .logger .warning (MSG ['MetricNotInResults' ].format (
371
368
columnInfo .keys [0 ].metric ))
372
369
mt = MetricTimeSeries (columnInfo .keys [0 ].metric , '' )
373
- mt .timeseries = timeseries
370
+ mt .timeseries . append ( ts )
374
371
self .metrics [columnInfo .keys [0 ].metric ] = mt
375
372
# self.logger.info(f'rows data {str(columnValues)}')
376
373
@@ -432,16 +429,16 @@ def validate_group_tags(self):
432
429
433
430
# check groupBy settings
434
431
if self .request .grouptags :
435
- filter_keys = self .md .metaData .getAllFilterKeysForSensor (
436
- self .sensor )
432
+ filter_keys = set ()
433
+ for filter in self .filtersMap :
434
+ filter_keys .update (filter .keys ())
437
435
if not filter_keys :
438
436
self .logger .error (MSG ['GroupByErr' ])
439
437
raise cherrypy .HTTPError (
440
- 400 , MSG ['AttrNotValid' ].format ('filter' ))
441
- groupKeys = self .request .grouptags
442
- if not all (key in filter_keys for key in groupKeys ):
443
- self .logger .error (MSG ['AttrNotValid' ].format ('groupBy' ))
438
+ 400 , MSG ['AttrNotValid' ].format ('groupBy key' ))
439
+ if not (set (self .request .grouptags )).issubset (filter_keys ):
440
+ self .logger .error (MSG ['AttrNotValid' ].format ('groupBy key' ))
444
441
self .logger .error (MSG ['ReceivAttrValues' ].format (
445
- 'groupBy' , ", " .join (filter_keys )))
442
+ 'groupBy keys ' , ", " .join (filter_keys )))
446
443
raise cherrypy .HTTPError (
447
- 400 , MSG ['AttrNotValid' ].format ('filter ' ))
444
+ 400 , MSG ['AttrNotValid' ].format ('groupBy key ' ))
0 commit comments