Skip to content

Commit 17f0060

Browse files
committed
prometheus exporter initial version
Signed-off-by: hwassman <[email protected]>
1 parent 97df4a5 commit 17f0060

File tree

5 files changed

+386
-6
lines changed

5 files changed

+386
-6
lines changed

source/confParser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def checkTLSsettings(args):
4646

4747

4848
def checkApplicationPort(args):
49-
if not args.get('port') or not args.get('apiKeyValue'):
49+
if not args.get('port', None) and not args.get('prometheus', None):
5050
return False, MSG['MissingParm']
5151
return True, ''
5252

source/config.ini

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
##################### Grafana Connection Defaults ############################
1+
##################### OpenTSDB API Connection Defaults ########################
22
[connection]
33
# port number the bridge listening on for Grafana data requests
44
# 4242 - for HTTP connections
@@ -9,7 +9,7 @@ port = 4242
99
# Protocol (http, https)
1010
protocol = http
1111

12-
#################################### Grafana Generic OAuth ####################
12+
#################################### OpenTSDB API SSL OAuth ##################
1313
[tls]
1414
# Directory path of tls key and cert file location
1515
#tlsKeyPath = /etc/bridge_cert/certs
@@ -20,6 +20,9 @@ protocol = http
2020
# Name of tls certificate file
2121
#tlsCertFile = cert.pem
2222

23+
##################### Prometheus Connection Defaults ##########################
24+
# prometheus = 9250
25+
2326
#################################### GPFS Server ##############################
2427
[server]
2528
# The ip address to bind to, empty will bind to all interfaces

source/opentsdb.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,10 @@
3232
class OpenTsdbApi(object):
3333
exposed = True
3434

35-
def __init__(self, logger, mdHandler):
35+
def __init__(self, logger, mdHandler, port):
3636
self.logger = logger
3737
self.__md = mdHandler
38+
self.port = port
3839

3940
@property
4041
def md(self):
@@ -273,6 +274,10 @@ def GET(self, **params):
273274
"""
274275
resp = []
275276

277+
conn = cherrypy.request.headers.get('Host').split(':')
278+
if int(conn[1]) != int(self.port):
279+
raise cherrypy.HTTPError(400, MSG[400])
280+
276281
# /api/suggest
277282
if 'suggest' in cherrypy.request.script_name:
278283
resp = self.suggest(params)

source/prometheus.py

Lines changed: 297 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,297 @@
1+
'''
2+
##############################################################################
3+
# Copyright 2023 IBM Corp.
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
##############################################################################
17+
18+
Created on Oct 30, 2023
19+
20+
@author: HWASSMAN
21+
'''
22+
23+
import cherrypy
24+
import copy
25+
import json
26+
from messages import ERR, MSG
27+
from typing import Optional
28+
from cherrypy.process.plugins import Monitor
29+
from time import time
30+
from collector import SensorCollector, QueryPolicy
31+
from utils import execution_time
32+
33+
34+
class PrometheusExporter(object):
35+
exposed = True
36+
37+
def __init__(self, logger, mdHandler, port):
38+
self.logger = logger
39+
self.__md = mdHandler
40+
self.port = port
41+
self.static_sensors_list = ['CPU', 'Memory', 'GPFSFileset']
42+
self.cache_strategy = False
43+
self.caching_collectors = []
44+
if self.cache_strategy:
45+
self.initialize_cache_collectors()
46+
47+
@property
48+
def md(self):
49+
return self.__md
50+
51+
@property
52+
def qh(self):
53+
return self.__md.qh
54+
55+
@property
56+
def TOPO(self):
57+
return self.__md.metaData
58+
59+
def format_response(self, data)-> [str]:
60+
resp = []
61+
for name, metric in data.items():
62+
header = metric.str_descfmt()
63+
resp.extend(header)
64+
for sts in metric.timeseries:
65+
for _key, _value in sts.dps.items():
66+
sts_resp = SingleTimeSeriesResponse(name, _key, _value, sts.tags)
67+
self.logger.trace(f'sts_resp.str_expfmt output: {sts_resp.str_expfmt()}')
68+
resp.extend(sts_resp.str_expfmt())
69+
return resp
70+
71+
@execution_time()
72+
def metrics(self, export_sensors: Optional[list] = None):
73+
export_sensors = export_sensors or []
74+
resp = []
75+
76+
if self.cache_strategy and self.caching_collectors:
77+
for collector in self.caching_collectors:
78+
respList = self.format_response(collector.cached_metrics)
79+
resp.extend(respList)
80+
elif len(export_sensors) > 0:
81+
resp = self._metrics(export_sensors)
82+
else:
83+
resp = self._metrics(self.static_sensors_list)
84+
85+
return resp
86+
87+
def _metrics(self, export_sensors: list):
88+
resp = []
89+
collectors = []
90+
91+
for sensor in export_sensors:
92+
collector = self.build_collector(sensor)
93+
collectors.append(collector)
94+
95+
for collector in collectors:
96+
collector.start_collect()
97+
98+
for collector in collectors:
99+
collector.thread.join()
100+
101+
for collector in collectors:
102+
self.logger.trace('Finished custom thread %r.' % collector.thread.name)
103+
respList = self.format_response(collector.metrics)
104+
resp.extend(respList)
105+
return resp
106+
107+
def initialize_cache_collectors(self):
108+
for sensor in self.static_sensors_list:
109+
collector = self.build_collector(sensor)
110+
self.caching_collectors.append(collector)
111+
thread_name = 'Monitor_' + sensor
112+
Monitor(cherrypy.engine,
113+
collector.collect,
114+
frequency=collector.period,
115+
name=thread_name).subscribe()
116+
117+
def build_collector(self, sensor) -> SensorCollector :
118+
119+
period = self.md.getSensorPeriod(sensor)
120+
if period < 1:
121+
self.logger.error(MSG['SensorDisabled'].format(sensor))
122+
raise cherrypy.HTTPError(400, MSG['SensorDisabled'].format(sensor))
123+
124+
attrs = {}
125+
126+
# if self.cache_strategy:
127+
attrs = {'sensor': sensor, 'period': period, 'nsamples': 1}
128+
request = QueryPolicy(**attrs)
129+
collector = SensorCollector(sensor, period, self.logger, request)
130+
#collector.cache = True
131+
#else:
132+
#ts = calendar.timegm(time.gmtime()) - 15
133+
#ts = int(round(time.time() * 1000))
134+
#ts=int(timer()*1000)
135+
136+
#ts=int(time()*1000)
137+
#ts1 = ts-1000
138+
#self.logger.trace("ts:{0}, ts1:{1}".format(str(ts),str(ts1)))
139+
#qstart = str(int(int(str(ts1)) / 1000))
140+
#attrs = {'sensor': sensor, 'period': period, 'start': qstart}
141+
#request = QueryPolicy(**attrs)
142+
#collector = SensorCollector(sensor, period, self.logger, request)
143+
144+
self.logger.debug(f'request instance {str(request.__dict__)}')
145+
self.logger.debug(f'Created Collector instance {str(collector.__dict__)}')
146+
return collector
147+
148+
def GET(self, **params):
149+
'''Handle partial URLs such as /api/suggest?q=cpu_&type=metrics
150+
where type is one of metrics, tagk or tagv
151+
or
152+
Handle /api/search/lookup/m=cpu_idle{node=*}
153+
where m is the metric and optional term { tagk = tagv } qualifies the lookup.
154+
For more details please check openTSDB API (version 2.2 and higher) documentation for
155+
/api/lookup
156+
/api/search/lookup
157+
'''
158+
resp = []
159+
160+
conn = cherrypy.request.headers.get('Host').split(':')
161+
if int(conn[1]) != int(self.port):
162+
raise cherrypy.HTTPError(400, MSG[400])
163+
164+
# /update
165+
if 'update' in cherrypy.request.script_name:
166+
#cherrypy.response.headers['Content-Type'] = 'application/json'
167+
resp = self.md.update()
168+
#resp = json.dumps(resp)
169+
170+
# /metrics_cpu
171+
elif 'metrics_cpu' in cherrypy.request.script_name:
172+
resp = self.metrics(['CPU'])
173+
cherrypy.response.headers['Content-Type'] = 'text/plain'
174+
resString = '\n'.join(resp) + '\n'
175+
return resString
176+
#return bytes(resString, 'utf-8')
177+
178+
# /metrics_load
179+
elif 'metrics_load' in cherrypy.request.script_name:
180+
resp = self.metrics(['Load'])
181+
cherrypy.response.headers['Content-Type'] = 'text/plain'
182+
resString = '\n'.join(resp) + '\n'
183+
return resString
184+
#return bytes(resString, 'utf-8')
185+
186+
# /metrics_memory
187+
elif 'metrics_memory' in cherrypy.request.script_name:
188+
resp = self.metrics(['Memory'])
189+
cherrypy.response.headers['Content-Type'] = 'text/plain'
190+
resString = '\n'.join(resp) + '\n'
191+
return resString
192+
#return bytes(resString, 'utf-8')
193+
194+
# /metrics_network
195+
elif 'metrics_network' in cherrypy.request.script_name:
196+
resp = self.metrics(['Network'])
197+
cherrypy.response.headers['Content-Type'] = 'text/plain'
198+
resString = '\n'.join(resp) + '\n'
199+
return resString
200+
#return bytes(resString, 'utf-8')
201+
202+
# /metrics_netstat
203+
elif 'metrics_netstat' in cherrypy.request.script_name:
204+
resp = self.metrics(['Netstat'])
205+
cherrypy.response.headers['Content-Type'] = 'text/plain'
206+
resString = '\n'.join(resp) + '\n'
207+
return resString
208+
#return bytes(resString, 'utf-8')
209+
210+
# /metrics_diskfree
211+
elif 'metrics_diskfree' in cherrypy.request.script_name:
212+
resp = self.metrics(['DiskFree'])
213+
cherrypy.response.headers['Content-Type'] = 'text/plain'
214+
resString = '\n'.join(resp) + '\n'
215+
return resString
216+
#return bytes(resString, 'utf-8')
217+
218+
# /metrics_gpfs_fileset
219+
elif 'metrics_gpfs_fileset' in cherrypy.request.script_name:
220+
resp = self.metrics(['GPFSFileset'])
221+
cherrypy.response.headers['Content-Type'] = 'text/plain'
222+
resString = '\n'.join(resp) + '\n'
223+
return resString
224+
#return bytes(resString, 'utf-8')
225+
226+
# /metrics_gpfs_pool
227+
elif 'metrics_gpfs_pool' in cherrypy.request.script_name:
228+
resp = self.metrics(['GPFSPool'])
229+
cherrypy.response.headers['Content-Type'] = 'text/plain'
230+
resString = '\n'.join(resp) + '\n'
231+
return resString
232+
#return bytes(resString, 'utf-8')
233+
234+
# /metrics
235+
elif 'metrics' in cherrypy.request.script_name:
236+
resp = self.metrics()
237+
cherrypy.response.headers['Content-Type'] = 'text/plain'
238+
resString = '\n'.join(resp) + '\n'
239+
return resString
240+
#return bytes(resString, 'utf-8')
241+
242+
elif 'aggregators' in cherrypy.request.script_name:
243+
resp = ["noop", "sum", "avg", "max", "min", "rate"]
244+
245+
elif 'config/filters' in cherrypy.request.script_name:
246+
supportedFilters = {}
247+
filterDesc = {}
248+
filterDesc['description'] = '''Accepts an exact value or a regular expressions and matches against
249+
values for the given tag. The value can be omitted if the filter is used to specify groupBy on the tag only.'''
250+
filterDesc['examples'] = '''node=pm_filter(machine1), node=pm_filter(machine[1-6]), node=pm_filter(m1|m2),
251+
node=pm_filter(mac.*), node=pm_filter((?!^z).*)'''
252+
supportedFilters['pm_filter'] = filterDesc
253+
resp = supportedFilters
254+
255+
del cherrypy.response.headers['Allow']
256+
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
257+
cherrypy.response.headers['Content-Type'] = 'application/json'
258+
resp = json.dumps(resp)
259+
return resp
260+
261+
def OPTIONS(self):
262+
# print('options_post')
263+
del cherrypy.response.headers['Allow']
264+
cherrypy.response.headers['Access-Control-Allow-Methods'] = 'GET, POST, NEW, OPTIONS'
265+
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
266+
cherrypy.response.headers['Access-Control-Allow-Headers'] = 'Content-Type,Accept'
267+
cherrypy.response.headers['Access-Control-Max-Age'] = 604800
268+
269+
270+
class SingleTimeSeriesResponse():
271+
272+
def __init__(self, metricname, timestamp, value, tags):
273+
self.metric = metricname
274+
self.timestamp = timestamp*1000
275+
self.value = value if value is not None else 0 # TODO check if we should return None or null
276+
self.tags = tags
277+
278+
def str_expfmt(self) -> str:
279+
myset = []
280+
281+
if self.tags:
282+
labels = ','.join('%s="%s"' % (k, v) for k, v in self.tags.items())
283+
else:
284+
labels = ''
285+
286+
if labels:
287+
fmtstr = '{name}{{{labels}}} {value} {timestamp}'
288+
else:
289+
fmtstr = '{name} {value} {timestamp}'
290+
mstring = fmtstr.format(
291+
name=self.metric,
292+
labels=labels,
293+
value=repr(float(self.value)),
294+
timestamp = int(self.timestamp)
295+
)
296+
myset.append(mstring)
297+
return myset

0 commit comments

Comments
 (0)