Skip to content

Commit 56af8a5

Browse files
authored
Merge branch 'main' into feat/custom-product-versions-hadoop
2 parents c35a44e + ab869d3 commit 56af8a5

File tree

4 files changed

+146
-2
lines changed

4 files changed

+146
-2
lines changed

CHANGELOG.md

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,6 @@ All notable changes to this project will be documented in this file.
124124
- nifi: Remove `2.2.0` ([#1114]).
125125
- kafka: Remove `3.7.1` and `3.8.0` ([#1117]).
126126
- spark-connect-client: Remove `3.5.5` ([#1142]).
127-
- spark-k8s: Remove the JMX exporter jar ([#1157]).
128127

129128
[nifi-iceberg-bundle]: https://github.com/stackabletech/nifi-iceberg-bundle
130129
[#1025]: https://github.com/stackabletech/docker-images/pull/1025
@@ -186,7 +185,6 @@ All notable changes to this project will be documented in this file.
186185
[#1151]: https://github.com/stackabletech/docker-images/pull/1151
187186
[#1152]: https://github.com/stackabletech/docker-images/pull/1152
188187
[#1156]: https://github.com/stackabletech/docker-images/pull/1156
189-
[#1157]: https://github.com/stackabletech/docker-images/pull/1157
190188
[#1163]: https://github.com/stackabletech/docker-images/pull/1163
191189
[#1165]: https://github.com/stackabletech/docker-images/pull/1165
192190
[#1173]: https://github.com/stackabletech/docker-images/pull/1173

spark-k8s/Dockerfile

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,6 +142,7 @@ ARG AZURE_KEYVAULT_CORE
142142
ARG JACKSON_DATAFORMAT_XML
143143
ARG STAX2_API
144144
ARG WOODSTOX_CORE
145+
ARG JMX_EXPORTER
145146
ARG TARGETARCH
146147
ARG TINI
147148
ARG RELEASE
@@ -227,6 +228,8 @@ RUN cp /stackable/spark-${PRODUCT}-stackable${RELEASE}/connector/connect/server/
227228
&& cp /stackable/spark-${PRODUCT}-stackable${RELEASE}/connector/connect/common/target/spark-connect-common_*-${PRODUCT}-stackable${RELEASE}.jar . \
228229
&& cp /stackable/spark-${PRODUCT}-stackable${RELEASE}/connector/connect/client/jvm/target/spark-connect-client-jvm_2.12-${PRODUCT}-stackable${RELEASE}.jar .
229230

231+
COPY spark-k8s/stackable/jmx /stackable/jmx
232+
230233
WORKDIR /stackable/spark-${PRODUCT}-stackable${RELEASE}/dist/extra-jars
231234

232235
RUN <<EOF
@@ -243,8 +246,14 @@ curl --fail "https://repo.stackable.tech/repository/packages/tini/tini-${TINI}-$
243246
-o /usr/bin/tini
244247
chmod +x /usr/bin/tini
245248

249+
# JMX Exporter
250+
curl --fail "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" \
251+
-o "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"
252+
ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar
253+
246254
chmod -R g=u /stackable/spark-${PRODUCT}-stackable${RELEASE}/dist
247255
chmod -R g=u /stackable/spark-${PRODUCT}-stackable${RELEASE}/assembly/target/bom.json
256+
chmod -R g=u /stackable/jmx
248257
EOF
249258

250259
# TODO: java-base installs the Adoptium dnf repo and the Termurin jre which is not needed here.
@@ -254,6 +263,7 @@ FROM stackable/image/java-base AS final
254263
ARG PRODUCT
255264
ARG PYTHON
256265
ARG RELEASE
266+
ARG JMX_EXPORTER
257267
ARG HBASE_CONNECTOR
258268
ARG STACKABLE_USER_UID
259269

@@ -279,6 +289,7 @@ COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRO
279289
COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-source-builder /stackable/spark-${PRODUCT}-stackable${RELEASE}-src.tar.gz /stackable
280290
COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-connectors-builder /stackable/hbase-connector-${HBASE_CONNECTOR}-stackable${RELEASE}-src.tar.gz /stackable
281291
COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}-stackable${RELEASE}/assembly/target/bom.json /stackable/spark/spark-${PRODUCT}-stackable${RELEASE}.cdx.json
292+
COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/jmx /stackable/jmx
282293
COPY --from=spark-builder /usr/bin/tini /usr/bin/tini
283294

284295
COPY --chown=${STACKABLE_USER_UID}:0 spark-k8s/stackable/run-spark.sh /stackable/run-spark.sh
@@ -312,6 +323,7 @@ chown -h ${STACKABLE_USER_UID}:0 /stackable/spark/examples/jars/spark-examples.j
312323

313324
# fix permissions
314325
chmod g=u /stackable/spark
326+
chmod g=u /stackable/jmx
315327
chmod g=u /stackable/run-spark.sh
316328
chmod g=u /stackable/*-src.tar.gz
317329
EOF
Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
---
2+
rules:
3+
4+
# These come from the master
5+
# Example: master.aliveWorkers
6+
- pattern: "metrics<name=master\\.(.*), type=counters><>Value"
7+
name: spark_master_$1
8+
9+
# These come from the worker
10+
# Example: worker.coresFree
11+
- pattern: "metrics<name=worker\\.(.*), type=counters><>Value"
12+
name: spark_worker_$1
13+
14+
# These come from the application driver
15+
# Example: app-20160809000059-0000.driver.DAGScheduler.stage.failedStages
16+
- pattern: "metrics<name=(.*)\\.driver\\.(DAGScheduler|BlockManager|jvm)\\.(.*), type=gauges><>Value"
17+
name: spark_driver_$2_$3
18+
type: GAUGE
19+
labels:
20+
app_id: "$1"
21+
22+
# These come from the application driver
23+
# Emulate timers for DAGScheduler like messagePRocessingTime
24+
- pattern: "metrics<name=(.*)\\.driver\\.DAGScheduler\\.(.*), type=counters><>Count"
25+
name: spark_driver_DAGScheduler_$2_total
26+
type: COUNTER
27+
labels:
28+
app_id: "$1"
29+
30+
- pattern: "metrics<name=(.*)\\.driver\\.HiveExternalCatalog\\.(.*), type=counters><>Count"
31+
name: spark_driver_HiveExternalCatalog_$2_total
32+
type: COUNTER
33+
labels:
34+
app_id: "$1"
35+
36+
# These come from the application driver
37+
# Emulate histograms for CodeGenerator
38+
- pattern: "metrics<name=(.*)\\.driver\\.CodeGenerator\\.(.*), type=counters><>Count"
39+
name: spark_driver_CodeGenerator_$2_total
40+
type: COUNTER
41+
labels:
42+
app_id: "$1"
43+
44+
# These come from the application driver
45+
# Emulate timer (keep only count attribute) plus counters for LiveListenerBus
46+
- pattern: "metrics<name=(.*)\\.driver\\.LiveListenerBus\\.(.*), type=counters><>Count"
47+
name: spark_driver_LiveListenerBus_$2_total
48+
type: COUNTER
49+
labels:
50+
app_id: "$1"
51+
52+
# Get Gauge type metrics for LiveListenerBus
53+
- pattern: "metrics<name=(.*)\\.driver\\.LiveListenerBus\\.(.*), type=gauges><>Value"
54+
name: spark_driver_LiveListenerBus_$2
55+
type: GAUGE
56+
labels:
57+
app_id: "$1"
58+
59+
# These come from the application driver if it's a streaming application
60+
# Example: app-20160809000059-0000.driver.com.example.ClassName.StreamingMetrics.streaming.lastCompletedBatch_schedulingDelay
61+
- pattern: "metrics<name=(.*)\\.driver\\.(.*)\\.StreamingMetrics\\.streaming\\.(.*), type=gauges><>Value"
62+
name: spark_driver_streaming_$3
63+
labels:
64+
app_id: "$1"
65+
app_name: "$2"
66+
67+
# These come from the application driver if it's a structured streaming application
68+
# Example: app-20160809000059-0000.driver.spark.streaming.QueryName.inputRate-total
69+
- pattern: "metrics<name=(.*)\\.driver\\.spark\\.streaming\\.(.*)\\.(.*), type=gauges><>Value"
70+
name: spark_driver_structured_streaming_$3
71+
labels:
72+
app_id: "$1"
73+
query_name: "$2"
74+
75+
# These come from the application executors
76+
# Examples:
77+
# app-20160809000059-0000.0.executor.threadpool.activeTasks (value)
78+
# app-20160809000059-0000.0.executor.JvmGCtime (counter)
79+
80+
# filesystem metrics are declared as gauge metrics, but are actually counters
81+
- pattern: "metrics<name=(.*)\\.(.*)\\.executor\\.filesystem\\.(.*), type=gauges><>Value"
82+
name: spark_executor_filesystem_$3_total
83+
type: COUNTER
84+
labels:
85+
app_id: "$1"
86+
executor_id: "$2"
87+
88+
- pattern: "metrics<name=(.*)\\.(.*)\\.executor\\.(.*), type=gauges><>Value"
89+
name: spark_executor_$3
90+
type: GAUGE
91+
labels:
92+
app_id: "$1"
93+
executor_id: "$2"
94+
95+
- pattern: "metrics<name=(.*)\\.(.*)\\.executor\\.(.*), type=counters><>Count"
96+
name: spark_executor_$3_total
97+
type: COUNTER
98+
labels:
99+
app_id: "$1"
100+
executor_id: "$2"
101+
102+
- pattern: "metrics<name=(.*)\\.(.*)\\.ExecutorMetrics\\.(.*), type=gauges><>Value"
103+
name: spark_executor_$3
104+
type: GAUGE
105+
labels:
106+
app_id: "$1"
107+
executor_id: "$2"
108+
109+
# These come from the application executors
110+
# Example: app-20160809000059-0000.0.jvm.threadpool.activeTasks
111+
- pattern: "metrics<name=(.*)\\.([0-9]+)\\.(jvm|NettyBlockTransfer)\\.(.*), type=gauges><>Value"
112+
name: spark_executor_$3_$4
113+
type: GAUGE
114+
labels:
115+
app_id: "$1"
116+
executor_id: "$2"
117+
118+
- pattern: "metrics<name=(.*)\\.([0-9]+)\\.HiveExternalCatalog\\.(.*), type=counters><>Count"
119+
name: spark_executor_HiveExternalCatalog_$3_total
120+
type: COUNTER
121+
labels:
122+
app_id: "$1"
123+
executor_id: "$2"
124+
125+
# These come from the application driver
126+
# Emulate histograms for CodeGenerator
127+
- pattern: "metrics<name=(.*)\\.([0-9]+)\\.CodeGenerator\\.(.*), type=counters><>Count"
128+
name: spark_executor_CodeGenerator_$3_total
129+
type: COUNTER
130+
labels:
131+
app_id: "$1"
132+
executor_id: "$2"

spark-k8s/versions.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
"stax2_api": "4.2.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2
1414
"woodstox_core": "6.5.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2
1515
"vector": "0.47.0",
16+
"jmx_exporter": "1.3.0",
1617
"tini": "0.19.0",
1718
"hbase_connector": "1.0.1",
1819
},
@@ -30,6 +31,7 @@
3031
"stax2_api": "4.2.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2
3132
"woodstox_core": "6.5.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2
3233
"vector": "0.47.0",
34+
"jmx_exporter": "1.3.0",
3335
"tini": "0.19.0",
3436
"hbase_connector": "1.0.1",
3537
},

0 commit comments

Comments
 (0)