@@ -199,8 +199,11 @@ RUN export MAVEN_OPTS="-Xss64m -Xmx2g -XX:ReservedCodeCacheSize=1g" \
199199# <<< Build spark
200200
201201# Get the correct `tini` binary for our architecture.
202- RUN curl -o /usr/bin/tini "https://repo.stackable.tech/repository/packages/tini/tini-${TINI}-${TARGETARCH}" \
203- && chmod +x /usr/bin/tini
202+ RUN <<EOF
203+ curl --fail "https://repo.stackable.tech/repository/packages/tini/tini-${TINI}-${TARGETARCH}" \
204+ -o /usr/bin/tini
205+ chmod +x /usr/bin/tini
206+ EOF
204207
205208WORKDIR /stackable/spark-${PRODUCT}/dist/jars
206209
@@ -241,15 +244,28 @@ COPY --from=hbase-builder --chown=${STACKABLE_USER_UID}:0 \
241244
242245WORKDIR /stackable/spark-${PRODUCT}/dist/extra-jars
243246
247+ COPY spark-k8s/stackable/jmx /stackable/jmx
248+
249+ RUN <<EOF
244250# Download jackson-dataformat-xml, stax2-api, and woodstox-core which are required for logging.
245- RUN curl -O https://repo.stackable.tech/repository/packages/jackson-dataformat-xml/jackson-dataformat-xml-${JACKSON_DATAFORMAT_XML}.jar \
246- && curl -O https://repo.stackable.tech/repository/packages/stax2-api/stax2-api-${STAX2_API}.jar \
247- && curl -O https://repo.stackable.tech/repository/packages/woodstox-core/woodstox-core-${WOODSTOX_CORE}.jar
251+ curl --fail https://repo.stackable.tech/repository/packages/jackson-dataformat-xml/jackson-dataformat-xml-${JACKSON_DATAFORMAT_XML}.jar
252+ curl --fail https://repo.stackable.tech/repository/packages/stax2-api/stax2-api-${STAX2_API}.jar
253+ curl --fail https://repo.stackable.tech/repository/packages/woodstox-core/woodstox-core-${WOODSTOX_CORE}.jar
248254
249- WORKDIR /stackable/jmx
255+ # Get the correct `tini` binary for our architecture.
256+ curl --fail "https://repo.stackable.tech/repository/packages/tini/tini-${TINI}-${TARGETARCH}" \
257+ -o /usr/bin/tini
258+ chmod +x /usr/bin/tini
250259
251- RUN curl -O "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"
260+ # JMX Exporter
261+ curl --fail "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" \
262+ -o "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"
263+ ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar
252264
265+ chmod -R g=u /stackable/spark-${PRODUCT}/dist
266+ chmod -R g=u /stackable/spark-${PRODUCT}/assembly/target/bom.json
267+ chmod -R g=u /stackable/jmx
268+ EOF
253269
254270FROM stackable/image/java-base AS final
255271
@@ -274,14 +290,15 @@ ENV PATH=$SPARK_HOME:$PATH:/bin:$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$HOME/.local/b
274290ENV PYSPARK_PYTHON=/usr/bin/python
275291ENV PYTHONPATH=$SPARK_HOME/python
276292
277- COPY spark-k8s/stackable /stackable
278- COPY spark-k8s/licenses /licenses
279293
280294COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}/dist /stackable/spark
281295COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}/assembly/target/bom.json /stackable/spark/spark-${PRODUCT}.cdx.json
282296COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/jmx /stackable/jmx
283297COPY --from=spark-builder /usr/bin/tini /usr/bin/tini
284298
299+ COPY --chown=${STACKABLE_USER_UID}:0 spark-k8s/stackable/run-spark.sh /stackable/run-spark.sh
300+ COPY --chown=${STACKABLE_USER_UID}:0 spark-k8s/licenses /licenses
301+
285302RUN <<EOF
286303microdnf update
287304# procps: required for spark startup scripts
@@ -299,20 +316,19 @@ microdnf clean all
299316rm -rf /var/cache/yum
300317
301318ln -s /usr/bin/python${PYTHON} /usr/bin/python
319+ chown -h ${STACKABLE_USER_UID}:0 /usr/bin/python
302320ln -s /usr/bin/pip-${PYTHON} /usr/bin/pip
321+ chown -h ${STACKABLE_USER_UID}:0 /usr/bin/pip
303322
304- ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar
305323# Symlink example jar, so that we can easily use it in tests
306324ln -s /stackable/spark/examples/jars/spark-examples_*.jar /stackable/spark/examples/jars/spark-examples.jar
325+ chown -h ${STACKABLE_USER_UID}:0 /stackable/spark/examples/jars/spark-examples.jar
307326
308- # All files and folders owned by root group to support running as arbitrary users.
309- # This is best practice as all container users will belong to the root group (0).
310- chown -R ${STACKABLE_USER_UID}:0 /stackable
311- chmod -R g=u /stackable
327+ chmod -R g=u /stackable/run-spark.sh
312328EOF
313329
314330# ----------------------------------------
315- # Attention: We are changing the group of all files in /stackable directly above
331+ # Attention:
316332# If you do any file based actions (copying / creating etc.) below this comment you
317333# absolutely need to make sure that the correct permissions are applied!
318334# chown ${STACKABLE_USER_UID}:0
0 commit comments