Skip to content

Commit d5a7301

Browse files
authored
Merge pull request #275 from Helene/prepae_8.0.3-prod
Publish v8.0.3 release
2 parents eb14d10 + bb2e3f9 commit d5a7301

File tree

9 files changed

+227
-54
lines changed

9 files changed

+227
-54
lines changed

Dockerfile

Lines changed: 40 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,23 @@
1-
ARG BASE=registry.access.redhat.com/ubi9/ubi:9.5-1732804088
2-
FROM $BASE
1+
ARG BUILD_ENV=prod
2+
ARG BASE=registry.access.redhat.com/ubi9/ubi:9.5-1736404036
3+
4+
FROM $BASE as build_prod
5+
ONBUILD COPY ./requirements/requirements_ubi9.txt /root/requirements_ubi9.txt
6+
7+
FROM $BASE as build_test
8+
ONBUILD COPY ./requirements/requirements_ubi.in /root/requirements_ubi.in
9+
10+
FROM $BASE as build_custom
11+
ONBUILD COPY ./requirements/requirements.in /root/requirements.in
12+
13+
FROM build_${BUILD_ENV}
14+
15+
ARG BUILD_ENV
16+
ARG BASE
317

418
LABEL com.ibm.name="IBM Storage Scale bridge for Grafana"
519
LABEL com.ibm.vendor="IBM"
6-
LABEL com.ibm.version="8.0.3-dev"
20+
LABEL com.ibm.version="8.0.3"
721
LABEL com.ibm.url="https://github.com/IBM/ibm-spectrum-scale-bridge-for-grafana"
822
LABEL com.ibm.description="This tool translates the IBM Storage Scale performance data collected internally \
923
to the query requests acceptable by the Grafana integrated openTSDB plugin"
@@ -26,11 +40,9 @@ ENV GID=$GROUPID
2640

2741
ARG HTTPPROTOCOL=http
2842
ENV PROTOCOL=$HTTPPROTOCOL
29-
RUN echo "the HTTP/S protocol is set to $PROTOCOL"
3043

3144
ARG HTTPBASICAUTH=True
3245
ENV BASICAUTH=$HTTPBASICAUTH
33-
RUN echo "the HTTP/S basic authentication is set to $BASICAUTH"
3446

3547
ARG AUTHUSER=None
3648
ENV BASICAUTHUSER=$AUTHUSER
@@ -40,15 +52,12 @@ ENV BASICAUTHPASSW=$AUTHPASSW
4052

4153
ARG HTTPPORT=None
4254
ENV PORT=$HTTPPORT
43-
RUN echo "the OpentTSDB API HTTP/S port is set to $PORT"
4455

4556
ARG PROMPORT=None
46-
ENV PROMETHEUS=$PROMPORT
47-
RUN echo "the Prometheus API HTTPS port is set to $PROMETHEUS"
57+
ENV PROMETHEUS=$PROMPORT
4858

4959
ARG PERFMONPORT=9980
5060
ENV SERVERPORT=$PERFMONPORT
51-
RUN echo "the PERFMONPORT port is set to $SERVERPORT"
5261

5362
ARG CERTPATH='/etc/bridge_ssl/certs'
5463
ENV TLSKEYPATH=$CERTPATH
@@ -67,27 +76,33 @@ ENV APIKEYVALUE=$KEYVALUE
6776

6877
ARG PMCOLLECTORIP=0.0.0.0
6978
ENV SERVER=$PMCOLLECTORIP
70-
RUN echo "the pmcollector server ip is set to $SERVER"
7179

7280
ARG DEFAULTLOGPATH='/var/log/ibm_bridge_for_grafana'
7381
ENV LOGPATH=$DEFAULTLOGPATH
74-
RUN echo "the log will use $LOGPATH"
7582

7683
ARG DEFAULTLOGLEVEL=15
7784
ENV LOGLEVEL=$DEFAULTLOGLEVEL
7885

79-
COPY ./requirements/requirements_ubi9.txt /root/requirements_ubi9.txt
80-
# COPY ./requirements/requirements_ubi.in /root/requirements_ubi.in
81-
82-
RUN yum install -y python39 python3-pip
83-
84-
# RUN /usr/bin/python3 -m pip install pip-tools && \
85-
# /usr/bin/python3 -m piptools compile /root/requirements_ubi.in --output-file /root/requirements_ubi9.txt && \
86-
# echo "Compiled python packages: $(cat /root/requirements_ubi9.txt)"
87-
88-
RUN /usr/bin/python3 -m pip install -r /root/requirements_ubi9.txt && \
89-
echo "Installed python version: $(/usr/bin/python3 -V)" && \
90-
echo "Installed python packages: $(/usr/bin/python3 -m pip list)"
86+
RUN echo "the HTTP/S protocol is set to $PROTOCOL" && \
87+
echo "the HTTP/S basic authentication is set to $BASICAUTH" && \
88+
echo "the OpentTSDB API HTTP/S port is set to $PORT" && \
89+
echo "the Prometheus API HTTPS port is set to $PROMETHEUS" && \
90+
echo "the PERFMONPORT port is set to $SERVERPORT" && \
91+
echo "the pmcollector server ip is set to $SERVER" && \
92+
echo "the log will use $LOGPATH"
93+
94+
RUN if [ $(expr "$BASE" : '.*python.*') -eq 0 ]; then \
95+
yum install -y python39 python3-pip; \
96+
if [ "$BUILD_ENV" = "build_test" ]; then \
97+
python3 -m pip install pip-tools && \
98+
python3 -m piptools compile /root/requirements_ubi.in --output-file /root/requirements_ubi9.txt && \
99+
echo "Compiled python packages: $(cat /root/requirements_ubi9.txt)"; fi && \
100+
python3 -m pip install -r /root/requirements_ubi9.txt && \
101+
echo "Installed python version: $(python3 -V)" && \
102+
echo "Installed python packages: $(python3 -m pip list)"; else \
103+
echo "Already using python container as base image. No need to install it." && \
104+
python3 -m pip install -r /root/requirements.in && \
105+
echo "Installed python packages: $(python3 -m pip list)"; fi
91106

92107
USER root
93108

@@ -101,8 +116,8 @@ COPY ./source/gpfsConfig/mmsdrfs* /var/mmfs/gen/
101116
COPY ./source/gpfsConfig/ZIMon* /opt/IBM/zimon/
102117

103118
RUN if [ "${APIKEYVALUE:0:1}" = "/" ]; then ln -s $APIKEYVALUE /etc/perfmon-api-keys; echo "APIKEYVALUE is a PATH"; else echo "APIKEYVALUE not a PATH"; fi && \
104-
if [ -z "$TLSKEYPATH" ] || [ -z "$TLSCERTFILE" ] || [ -z "$TLSKEYFILE" ] && [ "$PROTOCOL" = "https" ]; then echo "TLSKEYPATH FOR SSL CONNECTION NOT SET - ERROR"; exit 1; else echo "PASS"; fi
105-
RUN echo "the ssl certificates path is set to $TLSKEYPATH"
119+
if [ -z "$TLSKEYPATH" ] || [ -z "$TLSCERTFILE" ] || [ -z "$TLSKEYFILE" ] && [ "$PROTOCOL" = "https" ]; then echo "TLSKEYPATH FOR SSL CONNECTION NOT SET - ERROR"; exit 1; else echo "PASS"; fi && \
120+
echo "the ssl certificates path is set to $TLSKEYPATH"
106121

107122
# Switch to the working directory
108123
WORKDIR /opt/IBM/bridge

docs/RELEASE_NOTES.md

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,14 @@
1+
# Version 8.0.3 (01/20/2025)
2+
Added GPFSPoolCap, GPFSInodeCap and GPFSFCMDA sensors to the supported PrometheusExporter endpoints \
3+
Added HTTP Api REST endpoint for querying last metric sample (OpenTSDB plugin)
4+
Reworked Dockerfile allowing build grafana-bridge image from Redhat UBI9/Python3.9 \
5+
Changed the Dockerfile parent image to the registry.access.redhat.com/ubi9/ubi:9.5-1736404036 \
6+
7+
Tested with Grafana version 11.0.0
8+
Tested with RedHat community-powered Grafana operator v.5
9+
10+
11+
112
# Version 8.0.2 (12/18/2024)
213
Added GPFSNSDPool, GPFSNSDFS sensors to the supported PrometheusExporter endpoints
314
Added LOGLEVEL to the Dockerfile editable command line arguments
@@ -41,6 +52,14 @@ Tested with RedHat community-powered Grafana operator v.5
4152

4253

4354

55+
# Version 7.2.1 (01/15/2025)
56+
Changed the Dockerfile parent image to the registry.access.redhat.com/ubi9/ubi:9.5-1736404036 \
57+
58+
Tested with Grafana version 11
59+
Tested with RedHat community-powered Grafana operator v.5
60+
61+
62+
4463
# Version 7.2.0 (12/06/2024)
4564
Changed the Dockerfile parent image to the registry.access.redhat.com/ubi9/ubi:9.5-1732804088 \
4665
Speed up OpenTSDB /search/lookup REST Api endpoint response time \

docs/SUPPORT_MATRIX.md

Lines changed: 23 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,15 @@
11
The following matrix gives a quick overview of the supported software for the IBM Storage Scale bridge for Grafana packages by version number:
2+
# Version 8.0.3 (01/20/2025)
3+
Classic Scale:
4+
- Python 3.9
5+
- CherryPy 18.10.0
6+
- IBM Storage Scale system must run 5.2.2 and above
7+
- Grafana 11.0.0 and above
8+
9+
Cloud native:
10+
- IBM Storage Scale Container Native Storage Access(CNSA) devices having minReleaseLevel 5.2.3
11+
- RedHat community-powered Grafana-Operator v5
12+
213
# Version 8.0.2 (12/18/2024)
314
Classic Scale:
415
- Python 3.9
@@ -7,7 +18,7 @@ Classic Scale:
718
- Grafana 11.0.0 and above
819

920
Cloud native:
10-
- IBM Storage Scale Container Native Storage Access(CNSA) devices having minReleaseLevel 5.2.0
21+
- IBM Storage Scale Container Native Storage Access(CNSA) devices having minReleaseLevel 5.2.3
1122
- RedHat community-powered Grafana-Operator v5
1223

1324
# Version 8.0.1 (12/10/2024)
@@ -18,7 +29,6 @@ Classic Scale:
1829
- Grafana 11.0.0 and above
1930

2031
Cloud native:
21-
- IBM Storage Scale Container Native Storage Access(CNSA) devices having minReleaseLevel 5.2.0
2232
- RedHat community-powered Grafana-Operator v5
2333

2434
# Version 8.0.0 (04/26/2024)
@@ -29,7 +39,17 @@ Classic Scale:
2939
- Grafana 10.2.3 and above
3040

3141
Cloud native:
32-
- IBM Storage Scale Container Native Storage Access(CNSA) devices having minReleaseLevel 5.2.0
42+
- RedHat community-powered Grafana-Operator v5
43+
44+
# Version 7.2.1 (01/15/2025)
45+
Classic Scale:
46+
- Python 3.9
47+
- CherryPy 18.10.0
48+
- IBM Storage Scale system must run 5.1.9 and above
49+
- Grafana 11.0.0 and above
50+
51+
Cloud native:
52+
- IBM Storage Scale Container Native Storage Access(CNSA) devices having minReleaseLevel 5.1.9.8,5.2.2.1
3353
- RedHat community-powered Grafana-Operator v5
3454

3555
# Version 7.2.0 (12/06/2024)

requirements/requirements.in

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
# This file is used for generating python requirements list "requirements.txt".
2+
# It includes the packages needed to be installed to run the IBM Storage Scale Performance Monitoring bridge for Grafana
3+
# on top of the Redhat ubi9/python-39 image.
4+
#
5+
# To update, run:
6+
#
7+
# $ pip-compile requirements_ubi.in --output-file requirements.txt
8+
#
9+
# setuptools
10+
cherrypy
11+
PyYAML
12+
# psutil
13+
# urllib3
14+
requests

requirements/requirements_ubi.in

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# This file is used for generating python requirements list "requirements_ubi9.txt".
22
# It includes the packages needed to be installed to run the IBM Storage Scale Performance Monitoring bridge for Grafana
3-
# in an OpenShift production environment, on top of the redhat UBI8 image.
3+
# in an OpenShift production environment, on top of the Redhat UBI9 image.
44
#
55
# To update, run:
66
#

source/__version__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,4 +20,4 @@
2020
@author: HWASSMAN
2121
'''
2222

23-
__version__ = '8.0.3-dev'
23+
__version__ = '8.0.3'

source/opentsdb.py

Lines changed: 93 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -54,15 +54,29 @@ def TOPO(self):
5454
def format_response(self, data: dict, jreq: dict) -> List[dict]:
5555
respList = []
5656
metrics = set(data.values())
57-
for metric in metrics:
58-
for st in metric.timeseries:
59-
res = SingleTimeSeriesResponse(jreq.get('inputQuery'),
60-
jreq.get('showQuery'),
61-
jreq.get('globalAnnotations'),
62-
st.tags, st.aggregatedTags)
63-
# self.logger.trace(f'OpenTSDB queryResponse for :
64-
# {data.keys()[0]} with {len(st.dps)} datapoints')
65-
respList.append(res.to_dict(st.dps))
57+
if jreq.get('start') == 'last':
58+
for metric in metrics:
59+
for st in metric.timeseries:
60+
timestmp = ''
61+
val = 'null'
62+
if len(st.dps) > 0:
63+
timestmp = list(st.dps.keys())[0]
64+
val = st.dps[timestmp]
65+
res = LastSingleTimeSeriesResponse(jreq.get('inputQuery'),
66+
timestmp,
67+
val,
68+
st.tags)
69+
respList.append(res.to_dict())
70+
else:
71+
for metric in metrics:
72+
for st in metric.timeseries:
73+
res = SingleTimeSeriesResponse(jreq.get('inputQuery'),
74+
jreq.get('showQuery'),
75+
jreq.get('globalAnnotations'),
76+
st.tags, st.aggregatedTags)
77+
# self.logger.trace(f'OpenTSDB queryResponse for :
78+
# {data.keys()[0]} with {len(st.dps)} datapoints')
79+
respList.append(res.to_dict(st.dps))
6680
return respList
6781

6882
@execution_time()
@@ -115,29 +129,34 @@ def build_collector(self, jreq: dict) -> SensorCollector:
115129

116130
q = jreq.get('inputQuery')
117131

118-
period = self.md.getSensorPeriodForMetric(q.get('metric'))
132+
sensor = self.TOPO.getSensorForMetric(q.get('metric'))
133+
period = self.md.getSensorPeriod(sensor)
119134
if period < 1:
120135
self.logger.error(MSG['SensorDisabled'].format(q.get('metric')))
121136
raise cherrypy.HTTPError(
122137
400, MSG['SensorDisabled'].format(q.get('metric')))
123138

124-
sensor = self.TOPO.getSensorForMetric(q.get('metric'))
125-
126139
args = {}
127140
args['metricsaggr'] = {q.get('metric'): q.get('aggregator')}
128-
args['start'] = str(int(int(str(jreq.get('start'))) / 1000))
129-
if jreq.get('end') is not None:
130-
args['end'] = str(int(int(str(jreq.get('end'))) / 1000))
131141

132-
if q.get('downsample'):
133-
args['dsOp'] = self._get_downsmpl_op(q.get('downsample'))
134-
args['dsBucketSize'] = self._calc_bucket_size(q.get('downsample'))
142+
if jreq.get('start') == 'last':
143+
args['nsamples'] = 1
144+
if q.get('tags'):
145+
args['filters'] = q.get('tags')
146+
else:
147+
args['start'] = str(int(int(str(jreq.get('start'))) / 1000))
148+
if jreq.get('end') is not None:
149+
args['end'] = str(int(int(str(jreq.get('end'))) / 1000))
150+
151+
if q.get('downsample'):
152+
args['dsOp'] = self._get_downsmpl_op(q.get('downsample'))
153+
args['dsBucketSize'] = self._calc_bucket_size(q.get('downsample'))
135154

136-
if q.get('filters'):
137-
filters, grouptags = self._parse_input_query_filters(
138-
q.get('filters'))
139-
args['filters'] = filters
140-
args['grouptags'] = grouptags
155+
if q.get('filters'):
156+
filters, grouptags = self._parse_input_query_filters(
157+
q.get('filters'))
158+
args['filters'] = filters
159+
args['grouptags'] = grouptags
141160

142161
args['rawData'] = q.get('explicitTags', False)
143162

@@ -296,6 +315,42 @@ def GET(self, **params):
296315
elif 'lookup' in cherrypy.request.script_name:
297316
resp = self.lookup(params)
298317

318+
# /api/query/last
319+
elif '/api/query/last' == cherrypy.request.script_name:
320+
jreq = {}
321+
322+
if params.get('timeseries') is None:
323+
self.logger.error(MSG['QueryError'].format('empty'))
324+
raise cherrypy.HTTPError(400, ERR[400])
325+
326+
queries = []
327+
timeseries = params.get('timeseries')
328+
if not isinstance(timeseries, list):
329+
timeseries = [timeseries]
330+
for timeserie in timeseries:
331+
try:
332+
metricDict = {}
333+
params_list = re.split(r'\{(.*)\}', timeserie.strip())
334+
if len(params_list[0]) == 0:
335+
break
336+
metricDict['metric'] = params_list[0]
337+
338+
if len(params_list) > 1:
339+
attr = params_list[1]
340+
filterBy = dict(x.split('=') for x in attr.split(','))
341+
metricDict['tags'] = filterBy
342+
queries.append(metricDict)
343+
344+
except Exception as e:
345+
self.logger.exception(MSG['IntError'].format(str(e)))
346+
raise cherrypy.HTTPError(500, MSG[500])
347+
if len(queries) == 0:
348+
raise cherrypy.HTTPError(400, ERR[400])
349+
jreq['start'] = 'last'
350+
jreq['queries'] = queries
351+
352+
resp = self.query(jreq)
353+
299354
elif 'aggregators' in cherrypy.request.script_name:
300355
resp = ["noop", "sum", "avg", "max", "min", "rate"]
301356

@@ -336,7 +391,7 @@ def POST(self):
336391
raise cherrypy.HTTPError(400, ERR[400])
337392

338393
# /api/query
339-
if 'query' in cherrypy.request.script_name:
394+
if '/api/query' == cherrypy.request.script_name:
340395

341396
# read query request parameters
342397
jreq = cherrypy.request.json
@@ -399,3 +454,17 @@ def to_dict(self, dps: dict = None):
399454
if dps:
400455
res['dps'] = dps
401456
return res
457+
458+
459+
class LastSingleTimeSeriesResponse(object):
460+
461+
def __init__(self, inputQuery, timestmp, value, tags: dict = None):
462+
self.metric = inputQuery.get('metric')
463+
self.timestamp = timestmp
464+
self.value = value
465+
self.tags = tags or defaultdict(list)
466+
467+
def to_dict(self):
468+
''' Converts the LastSingleTimeSeriesResponse object to dict. '''
469+
res = self.__dict__
470+
return res

source/zimonGrafanaIntf.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -303,6 +303,12 @@ def main(argv):
303303
}
304304
}
305305
)
306+
# query metric last value
307+
cherrypy.tree.mount(api, '/api/query/last',
308+
{'/':
309+
{'request.dispatch': cherrypy.dispatch.MethodDispatcher()}
310+
}
311+
)
306312
# query for metric name (openTSDB: zimon extension returns keys as well)
307313
cherrypy.tree.mount(api, '/api/suggest',
308314
{'/':

0 commit comments

Comments
 (0)