@@ -172,6 +172,7 @@ ARG WOODSTOX_CORE
172172ARG  JMX_EXPORTER
173173ARG  TARGETARCH
174174ARG  TINI
175+ ARG  STACKABLE_USER_UID
175176
176177WORKDIR  /stackable/spark-${PRODUCT}
177178
@@ -283,45 +284,51 @@ LABEL name="Apache Spark" \
283284    summary="The Stackable image for Apache Spark with PySpark support."  \
284285    description="This image is deployed by the Stackable Operator for Apache Spark on Kubernetes." 
285286
286- RUN  microdnf update && \
287-     microdnf install \
288-     gzip \
289-     hostname \
290-     #  required for spark startup scripts
291-     procps \
292-     "python${PYTHON}"  \
293-     "python${PYTHON}-pip"  \
294-     zip \
295-     #  This is needed by the Spark UI to display process information using jps and jmap
296-     #  Copying the binaries from the builder stage failed.
297-     "java-${JAVA_VERSION}-openjdk-devel"  \
298-     && microdnf clean all \
299-     && rm -rf /var/cache/yum
300- 
301- RUN  ln -s /usr/bin/python${PYTHON} /usr/bin/python \
302-     && ln -s /usr/bin/pip-${PYTHON} /usr/bin/pip
303- 
304287
305288ENV  HOME=/stackable
306289ENV  SPARK_HOME=/stackable/spark
307290ENV  PATH=$SPARK_HOME:$PATH:/bin:$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$HOME/.local/bin
308291ENV  PYSPARK_PYTHON=/usr/bin/python
309292ENV  PYTHONPATH=$SPARK_HOME/python
310293
294+ COPY  spark-k8s/stackable /stackable
295+ COPY  spark-k8s/licenses /licenses
296+ 
311297COPY  --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}/dist /stackable/spark
312298COPY  --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}/assembly/target/bom.json /stackable/spark/spark-${PRODUCT}.cdx.json
313299COPY  --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/jmx /stackable/jmx
314300COPY  --from=spark-builder /usr/bin/tini /usr/bin/tini
315301
316- RUN  ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"  /stackable/jmx/jmx_prometheus_javaagent.jar \
317-     #  Symlink example jar, so that we can easily use it in tests
318-     && ln -s /stackable/spark/examples/jars/spark-examples_*.jar /stackable/spark/examples/jars/spark-examples.jar
302+ RUN  <<EOF
303+ microdnf update
304+ #  procps: required for spark startup scripts
305+ #  java-*-openjdk-devel: This is needed by the Spark UI to display process information using jps and jmap
306+ #                        Copying just the binaries from the builder stage failed.
307+ microdnf install \
308+   gzip \
309+   hostname \
310+   procps \
311+   "python${PYTHON}"  \
312+   "python${PYTHON}-pip"  \
313+   zip \
314+   "java-${JAVA_VERSION}-openjdk-devel" 
315+ microdnf clean all
316+ rm -rf /var/cache/yum
317+ 
318+ ln -s /usr/bin/python${PYTHON} /usr/bin/python
319+ ln -s /usr/bin/pip-${PYTHON} /usr/bin/pip
319320
320- USER  ${STACKABLE_USER_UID}
321- WORKDIR  /stackable
321+ ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"  /stackable/jmx/jmx_prometheus_javaagent.jar
322+ #  Symlink example jar, so that we can easily use it in tests
323+ ln -s /stackable/spark/examples/jars/spark-examples_*.jar /stackable/spark/examples/jars/spark-examples.jar
322324
323- COPY  spark-k8s/stackable /stackable
324- COPY  spark-k8s/licenses /licenses
325+ #  All files and folders owned by root to support running as arbitrary users
326+ #  This is best practice as all container users will belong to the root group (0)
327+ chown -R ${STACKABLE_USER_UID}:0 /stackable
328+ chmod -R g=u /stackable
329+ EOF
330+ 
331+ USER  ${STACKABLE_USER_UID}
325332
326333WORKDIR  /stackable/spark
327334ENTRYPOINT  [ "/stackable/run-spark.sh"  ]
0 commit comments