Skip to content

Commit 5bf6975

Browse files
authored
Merge pull request #330 from Helene/scrape_job_params
Add support for prometheus scrape job params config
2 parents 6ed391f + 9131efe commit 5bf6975

File tree

3 files changed

+199
-24
lines changed

3 files changed

+199
-24
lines changed

source/collector.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -152,6 +152,10 @@ def __init__(self, sensor: str, period: str):
152152
self.filtersMap = self._get_all_filters()
153153
self.labels = self._get_sensor_labels()
154154

155+
@property
156+
def md(self):
157+
return MetadataHandler()
158+
155159
def cleanup_metrics_values(self) -> None:
156160
for name in self.metrics.keys():
157161
self.metrics[name].timeseries = []
@@ -162,8 +166,7 @@ def setup_static_metrics_data(self, include_metrics: Optional[list] = None,
162166
include_metrics = include_metrics or []
163167
exclude_metrics = exclude_metrics or []
164168

165-
md = MetadataHandler()
166-
metrics = md.metaData.getSensorMetricNames(self.sensor)
169+
metrics = self.md.metaData.getSensorMetricNames(self.sensor)
167170
if len(metrics) < 1:
168171
return # this should not happen
169172

@@ -178,9 +181,8 @@ def setup_static_metrics_data(self, include_metrics: Optional[list] = None,
178181

179182
def _setup_static_metrics_data(self, metric_names: List[str]):
180183
mDict = {}
181-
md = MetadataHandler()
182-
spec = md.metricsDesc
183-
metricsTypes = md.metaData.metricsType
184+
spec = self.md.metricsDesc
185+
metricsTypes = self.md.metaData.metricsType
184186

185187
for name in metric_names:
186188
mtype = 'gauge'
@@ -195,12 +197,10 @@ def _setup_static_metrics_data(self, metric_names: List[str]):
195197
self.metrics = mDict
196198

197199
def _get_all_filters(self):
198-
md = MetadataHandler()
199-
return md.metaData.getAllFilterMapsForSensor(self.sensor)
200+
return self.md.metaData.getAllFilterMapsForSensor(self.sensor)
200201

201202
def _get_sensor_labels(self):
202-
md = MetadataHandler()
203-
return md.metaData.getSensorLabels(self.sensor)
203+
return self.md.metaData.getSensorLabels(self.sensor)
204204

205205

206206
@classattributes(dict(metricsaggr=None, filters=None, grouptags=None,

source/prometheus.py

Lines changed: 19 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def format_response(self, data) -> [str]:
7575
return resp
7676

7777
@execution_time()
78-
def metrics(self, export_sensors: Optional[list] = None):
78+
def metrics(self, export_sensors: Optional[list] = None, filters: Optional[dict] = None):
7979
export_sensors = export_sensors or []
8080
resp = []
8181

@@ -84,18 +84,18 @@ def metrics(self, export_sensors: Optional[list] = None):
8484
respList = self.format_response(collector.cached_metrics)
8585
resp.extend(respList)
8686
elif len(export_sensors) > 0:
87-
resp = self._metrics(export_sensors)
87+
resp = self._metrics(export_sensors, filters)
8888
else:
8989
resp = self._metrics(self.static_sensors_list)
9090

9191
return resp
9292

93-
def _metrics(self, export_sensors: list):
93+
def _metrics(self, export_sensors: list, filters: Optional[dict] = None):
9494
resp = []
9595
collectors = []
9696

9797
for sensor in export_sensors:
98-
collector = self.build_collector(sensor)
98+
collector = self.build_collector(sensor, filters)
9999
collectors.append(collector)
100100

101101
for collector in collectors:
@@ -121,27 +121,30 @@ def initialize_cache_collectors(self):
121121
name=thread_name).subscribe()
122122

123123
@cond_execution_time(enabled=analytics.inspect)
124-
def build_collector(self, sensor) -> SensorCollector:
124+
def build_collector(self, sensor: str, filters: Optional[dict] = None) -> SensorCollector:
125125

126126
period = self.md.getSensorPeriod(sensor)
127127
if period < 1:
128128
self.logger.error(MSG['SensorDisabled'].format(sensor))
129129
raise cherrypy.HTTPError(400, MSG['SensorDisabled'].format(sensor))
130130

131-
attrs = {}
131+
attrs = {'sensor': sensor, 'period': period}
132132

133-
if self.raw_data:
134-
attrs = {'sensor': sensor, 'period': period,
135-
'nsamples': period, 'rawData': True}
136-
elif "counter" in self.TOPO.getSensorMetricTypes(sensor).values():
137-
attrs = {'sensor': sensor, 'period': period,
138-
'nsamples': period, 'rawData': True}
133+
if self.raw_data or "counter" in self.TOPO.getSensorMetricTypes(sensor).values():
134+
attrs.update({'nsamples': period, 'rawData': True})
139135
self.logger.debug(MSG['SensorForceRawData'].format(sensor))
140136
else:
141-
attrs = {'sensor': sensor, 'period': period,
142-
'nsamples': 1}
137+
attrs.update({'nsamples': 1})
138+
if filters:
139+
for key, value in filters.items():
140+
if isinstance(value, list):
141+
filters[key] = "|".join(value)
142+
self.logger.debug(f"Collector filters: {filters}")
143+
attrs['filters'] = filters
144+
143145
request = QueryPolicy(**attrs)
144146
collector = SensorCollector(sensor, period, self.logger, request)
147+
collector.validate_query_filters()
145148

146149
# self.logger.trace(f'request instance {str(request.__dict__)}')
147150
# self.logger.trace(f'Created Collector instance {str(collector.__dict__)}')
@@ -154,6 +157,7 @@ def GET(self, **params):
154157
resp = []
155158

156159
self.logger.trace(f"Request headers:{str(cherrypy.request.headers)}")
160+
self.logger.info(f"Request params:{str(params)}")
157161
conn = cherrypy.request.headers.get('Host').split(':')
158162
if len(conn) == 2 and int(conn[1]) != int(self.port):
159163
self.logger.error(MSG['EndpointNotSupportedForPort'].
@@ -163,7 +167,7 @@ def GET(self, **params):
163167
if self.endpoints and self.endpoints.get(cherrypy.request.script_name,
164168
None):
165169
sensor = self.endpoints[cherrypy.request.script_name]
166-
resp = self.metrics([sensor])
170+
resp = self.metrics([sensor], params)
167171
cherrypy.response.headers['Content-Type'] = 'text/plain'
168172
resString = '\n'.join(resp) + '\n'
169173
return resString

tests/test_collector.py

Lines changed: 171 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,171 @@
1+
import logging
2+
import os
3+
import json
4+
from unittest import mock
5+
from source.bridgeLogger import configureLogging
6+
from source.queryHandler.Topo import Topo
7+
from source.collector import QueryPolicy, SensorCollector
8+
from nose2.tools.such import helper as assert_helper
9+
from nose2.tools.decorators import with_setup
10+
11+
12+
def my_setup():
13+
global path, topo, logger, prometheus_attrs, pfilters, pquery_filters, wrong_pfilters
14+
path = os.getcwd()
15+
topoStrFile = os.path.join(path, "tests", "test_data", 'topoStr.json')
16+
with open(topoStrFile) as f:
17+
topoStr = json.load(f)
18+
topo = Topo(topoStr)
19+
logger = configureLogging(path, None)
20+
prometheus_attrs = {'sensor': 'GPFSFilesystem', 'period': 300,
21+
'nsamples': 300, 'rawData': True}
22+
pfilters = {'node': 'scale-16', 'gpfs_fs_name': 'afmCacheFS'}
23+
pquery_filters = [f"{k}={v}" for k, v in pfilters.items()]
24+
wrong_pfilters = {'node': 'scale-16', 'gpfs_filesystem_name': 'afmCacheFS'}
25+
26+
27+
@with_setup(my_setup)
28+
def test_case01():
29+
with mock.patch('source.collector.QueryPolicy.md') as md:
30+
md_instance = md.return_value
31+
md_instance.includeDiskData.return_value = False
32+
md_instance.logger = logging.getLogger(__name__)
33+
request = QueryPolicy(**prometheus_attrs)
34+
query = request.get_zimon_query()
35+
query.includeDiskData = md_instance.includeDiskData.return_value
36+
queryString = 'get -j {0} {1} group {2} bucket_size {3} {4}'.format(
37+
'', '-z', prometheus_attrs.get('sensor'),
38+
prometheus_attrs.get('period'),
39+
f"last {prometheus_attrs.get('period')}")
40+
queryString += '\n'
41+
assert "group" in query.__str__()
42+
assert "last" in query.__str__()
43+
assert "from" not in query.__str__()
44+
assert queryString == query.__str__()
45+
46+
47+
@with_setup(my_setup)
48+
def test_case02():
49+
prometheus_attrs.update({'nsamples': 1, 'rawData': False})
50+
with mock.patch('source.collector.QueryPolicy.md') as md:
51+
md_instance = md.return_value
52+
md_instance.includeDiskData.return_value = False
53+
md_instance.logger = logging.getLogger(__name__)
54+
request = QueryPolicy(**prometheus_attrs)
55+
query = request.get_zimon_query()
56+
query.includeDiskData = md_instance.includeDiskData.return_value
57+
queryString = 'get -j {0} {1} group {2} bucket_size {3} {4}'.format(
58+
'', '', prometheus_attrs.get('sensor'),
59+
prometheus_attrs.get('period'),
60+
f"last {prometheus_attrs.get('nsamples')}")
61+
queryString += '\n'
62+
assert "group" in query.__str__()
63+
assert "last" in query.__str__()
64+
assert "from" not in query.__str__()
65+
66+
67+
@with_setup(my_setup)
68+
def test_case03():
69+
prometheus_attrs.update({'filters': pfilters})
70+
with mock.patch('source.collector.QueryPolicy.md') as md:
71+
md_instance = md.return_value
72+
md_instance.includeDiskData.return_value = False
73+
md_instance.logger = logging.getLogger(__name__)
74+
request = QueryPolicy(**prometheus_attrs)
75+
query = request.get_zimon_query()
76+
query.includeDiskData = md_instance.includeDiskData.return_value
77+
queryString = 'get -j {0} {1} group {2} bucket_size {3} {4}'.format(
78+
'', '-z', prometheus_attrs.get('sensor'),
79+
prometheus_attrs.get('period'),
80+
f"last {prometheus_attrs.get('period')}")
81+
queryString += ' from ' + ",".join(pquery_filters)
82+
queryString += '\n'
83+
assert "group" in query.__str__()
84+
assert "last" in query.__str__()
85+
assert "from" in query.__str__()
86+
assert queryString == query.__str__()
87+
88+
89+
@with_setup(my_setup)
90+
def test_case04():
91+
prometheus_attrs.update({'filters': pfilters})
92+
prometheus_attrs.update({'nsamples': 1, 'rawData': False})
93+
with mock.patch('source.collector.QueryPolicy.md') as md:
94+
md_instance = md.return_value
95+
md_instance.includeDiskData.return_value = False
96+
md_instance.logger = logging.getLogger(__name__)
97+
request = QueryPolicy(**prometheus_attrs)
98+
query = request.get_zimon_query()
99+
query.includeDiskData = md_instance.includeDiskData.return_value
100+
queryString = 'get -j {0} {1} group {2} bucket_size {3} {4}'.format(
101+
'', '', prometheus_attrs.get('sensor'),
102+
prometheus_attrs.get('period'),
103+
f"last {prometheus_attrs.get('nsamples')}")
104+
queryString += ' from ' + ",".join(pquery_filters)
105+
queryString += '\n'
106+
assert "group" in query.__str__()
107+
assert "last" in query.__str__()
108+
assert "from" in query.__str__()
109+
assert queryString == query.__str__()
110+
111+
112+
@with_setup(my_setup)
113+
@mock.patch('source.collector.QueryPolicy.md')
114+
@mock.patch('source.collector.SensorTimeSeries.md')
115+
@mock.patch('source.collector.SensorCollector.md')
116+
def test_case05(col_md, sts_md, md):
117+
sensor = prometheus_attrs.get('sensor')
118+
period = prometheus_attrs.get('period')
119+
logger = logging.getLogger(__name__)
120+
prometheus_attrs.update({'filters': pfilters})
121+
prometheus_attrs.update({'nsamples': 1, 'rawData': False})
122+
123+
md_instance = md.return_value
124+
md_instance.includeDiskData.return_value = False
125+
md_instance.logger.return_value = logger
126+
md_instance.metaData = topo
127+
# md_instance1 = sts_md.return_value
128+
# md_instance2 = col_md.return_value
129+
request = QueryPolicy(**prometheus_attrs)
130+
collector = SensorCollector(sensor, period, logger, request)
131+
collector.md = md_instance
132+
collector.labels = collector._get_sensor_labels()
133+
collector.filtersMap = collector._get_all_filters()
134+
assert collector.sensor == sensor
135+
assert collector.period == period
136+
assert collector.request == request
137+
assert collector.labels == topo.getSensorLabels(sensor)
138+
assert len(collector.filtersMap) > 0
139+
assert all(i in collector.labels for i in pfilters.keys())
140+
141+
142+
@with_setup(my_setup)
143+
@mock.patch('source.collector.QueryPolicy.md')
144+
@mock.patch('source.collector.SensorTimeSeries.md')
145+
@mock.patch('source.collector.SensorCollector.md')
146+
def test_case06(col_md, sts_md, md):
147+
sensor = prometheus_attrs.get('sensor')
148+
period = prometheus_attrs.get('period')
149+
logger = logging.getLogger(__name__)
150+
prometheus_attrs.update({'filters': wrong_pfilters})
151+
prometheus_attrs.update({'nsamples': 1, 'rawData': False})
152+
153+
md_instance = md.return_value
154+
md_instance.includeDiskData.return_value = False
155+
md_instance.logger.return_value = logger
156+
md_instance.metaData = topo
157+
# md_instance1 = sts_md.return_value
158+
# md_instance2 = col_md.return_value
159+
request = QueryPolicy(**prometheus_attrs)
160+
collector = SensorCollector(sensor, period, logger, request)
161+
collector.md = md_instance
162+
collector.labels = collector._get_sensor_labels()
163+
collector.filtersMap = collector._get_all_filters()
164+
assert collector.sensor == sensor
165+
assert collector.period == period
166+
assert collector.request == request
167+
assert collector.labels == topo.getSensorLabels(sensor)
168+
assert len(collector.filtersMap) > 0
169+
assert not all(i in collector.labels for i in wrong_pfilters.keys())
170+
with assert_helper.assertRaises(Exception):
171+
assert collector.validate_query_filters()

0 commit comments

Comments
 (0)