@@ -12,6 +12,7 @@ FROM stackable/image/hbase AS hbase-builder
1212FROM  stackable/image/java-devel AS spark-source-builder
1313
1414ARG  PRODUCT
15+ ARG  STACKABLE_USER_UID
1516
1617RUN  <<EOF
1718microdnf update
3435
3536WORKDIR  /stackable/spark
3637
37- COPY  --chown=stackable:stackable  \
38+ COPY  --chown=${STACKABLE_USER_UID}:0  \
3839    spark-k8s/stackable/patches/apply_patches.sh \
3940    patches/apply_patches.sh
40- COPY  --chown=stackable:stackable  \
41+ COPY  --chown=${STACKABLE_USER_UID}:0  \
4142    spark-k8s/stackable/patches/${PRODUCT} \
4243    patches/${PRODUCT}
4344
@@ -52,6 +53,7 @@ ARG PRODUCT
5253ARG  HADOOP
5354ARG  HBASE
5455ARG  HBASE_CONNECTOR
56+ ARG  STACKABLE_USER_UID
5557
5658RUN  <<EOF
5759microdnf update
@@ -70,7 +72,7 @@ WORKDIR /stackable
7072#  versions used by Spark. The pom.xml defines child modules which are
7173#  not required and not copied, therefore mvn must be called with the
7274#  parameter --non-recursive.
73- COPY  --chown=stackable:stackable  --from=spark-source-builder \
75+ COPY  --chown=${STACKABLE_USER_UID}:0  --from=spark-source-builder \
7476    /stackable/spark/pom.xml \
7577    spark/
7678
8385
8486#  Patch the hbase-connectors source code
8587WORKDIR  /stackable/hbase-connectors
86- COPY  --chown=stackable:stackable  \
88+ COPY  --chown=${STACKABLE_USER_UID}:0  \
8789    spark-k8s/stackable/hbase-connectors-patches/apply_patches.sh \
8890    patches/apply_patches.sh
89- COPY  --chown=stackable:stackable  \
91+ COPY  --chown=${STACKABLE_USER_UID}:0  \
9092    spark-k8s/stackable/hbase-connectors-patches/${HBASE_CONNECTOR} \
9193    patches/${HBASE_CONNECTOR}
9294RUN  patches/apply_patches.sh ${HBASE_CONNECTOR}
@@ -173,7 +175,7 @@ ARG TINI
173175
174176WORKDIR  /stackable/spark-${PRODUCT}
175177
176- COPY  --chown=stackable:stackable  --from=spark-source-builder \
178+ COPY  --chown=${STACKABLE_USER_UID}:0  --from=spark-source-builder \
177179    /stackable/spark/ \
178180    ./
179181
@@ -200,35 +202,35 @@ RUN curl -o /usr/bin/tini "https://repo.stackable.tech/repository/packages/tini/
200202WORKDIR  /stackable/spark-${PRODUCT}/dist/jars
201203
202204#  Copy modules required for s3a://
203- COPY  --from=hadoop-builder --chown=stackable:stackable  \
205+ COPY  --from=hadoop-builder --chown=${STACKABLE_USER_UID}:0  \
204206    /stackable/hadoop/share/hadoop/tools/lib/hadoop-aws-${HADOOP}.jar \
205207    /stackable/hadoop/share/hadoop/tools/lib/aws-java-sdk-bundle-${AWS_JAVA_SDK_BUNDLE}.jar \
206208    ./
207209
208210#  Copy modules required for abfs://
209- COPY  --from=hadoop-builder --chown=stackable:stackable  \
211+ COPY  --from=hadoop-builder --chown=${STACKABLE_USER_UID}:0  \
210212    /stackable/hadoop/share/hadoop/tools/lib/hadoop-azure-${HADOOP}.jar \
211213    /stackable/hadoop/share/hadoop/tools/lib/azure-storage-${AZURE_STORAGE}.jar \
212214    /stackable/hadoop/share/hadoop/tools/lib/azure-keyvault-core-${AZURE_KEYVAULT_CORE}.jar \
213215    ./
214216
215217#  Copy the HBase connector including required modules
216- COPY  --from=hbase-connectors-builder --chown=stackable:stackable  \
218+ COPY  --from=hbase-connectors-builder --chown=${STACKABLE_USER_UID}:0  \
217219    /stackable/spark/jars/* \
218220    ./
219221
220222#  Copy modules required to access HBase
221- COPY  --from=hbase-builder --chown=stackable:stackable  \
223+ COPY  --from=hbase-builder --chown=${STACKABLE_USER_UID}:0  \
222224    /stackable/hbase/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${HBASE}.jar \
223225    /stackable/hbase/lib/shaded-clients/hbase-shaded-mapreduce-${HBASE}.jar \
224226    ./
225227#  Copy modules required to access HBase if $HBASE == 2.4.x
226- COPY  --from=hbase-builder --chown=stackable:stackable  \
228+ COPY  --from=hbase-builder --chown=${STACKABLE_USER_UID}:0  \
227229    /stackable/hbase/lib/client-facing-thirdparty/htrace-core4-*-incubating.jar \
228230    /stackable/hbase/lib/client-facing-thirdparty/slf4j-reload4j-*.jar \
229231    ./
230232#  Copy modules required to access HBase if $HBASE == 2.6.x
231- COPY  --from=hbase-builder --chown=stackable:stackable  \
233+ COPY  --from=hbase-builder --chown=${STACKABLE_USER_UID}:0  \
232234    /stackable/hbase/lib/client-facing-thirdparty/opentelemetry-api-*.jar \
233235    /stackable/hbase/lib/client-facing-thirdparty/opentelemetry-context-*.jar \
234236    /stackable/hbase/lib/client-facing-thirdparty/opentelemetry-semconv-*-alpha.jar \
@@ -271,7 +273,7 @@ ARG PRODUCT
271273ARG  PYTHON
272274ARG  RELEASE
273275ARG  JMX_EXPORTER
274- 
276+ ARG  STACKABLE_USER_UID 
275277
276278LABEL  name="Apache Spark"  \
277279@@ -306,21 +308,20 @@ ENV PATH=$SPARK_HOME:$PATH:/bin:$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$HOME/.local/b
306308ENV  PYSPARK_PYTHON=/usr/bin/python
307309ENV  PYTHONPATH=$SPARK_HOME/python
308310
309- COPY  --chown=stackable:stackable  --from=spark-builder /stackable/spark-${PRODUCT}/dist /stackable/spark
310- COPY  --chown=stackable:stackable  --from=spark-builder /stackable/spark-${PRODUCT}/assembly/target/bom.json /stackable/spark/spark-${PRODUCT}.cdx.json
311- COPY  --chown=stackable:stackable  --from=spark-builder /stackable/jmx /stackable/jmx
311+ COPY  --chown=${STACKABLE_USER_UID}:0  --from=spark-builder /stackable/spark-${PRODUCT}/dist /stackable/spark
312+ COPY  --chown=${STACKABLE_USER_UID}:0  --from=spark-builder /stackable/spark-${PRODUCT}/assembly/target/bom.json /stackable/spark/spark-${PRODUCT}.cdx.json
313+ COPY  --chown=${STACKABLE_USER_UID}:0  --from=spark-builder /stackable/jmx /stackable/jmx
312314COPY  --from=spark-builder /usr/bin/tini /usr/bin/tini
313315
314316RUN  ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"  /stackable/jmx/jmx_prometheus_javaagent.jar \
315317    #  Symlink example jar, so that we can easily use it in tests
316318    && ln -s /stackable/spark/examples/jars/spark-examples_*.jar /stackable/spark/examples/jars/spark-examples.jar
317319
318- USER  stackable 
320+ USER  ${STACKABLE_USER_UID} 
319321WORKDIR  /stackable
320322
321323COPY  spark-k8s/stackable /stackable
322324COPY  spark-k8s/licenses /licenses
323325
324- 
325326WORKDIR  /stackable/spark
326327ENTRYPOINT  [ "/stackable/run-spark.sh"  ]
0 commit comments