Skip to content

Commit c03f406

Browse files
committed
fix: Use STACKABLE_USER_GID arg instead of the hard-coded GID of 0
1 parent 3033cfc commit c03f406

File tree

31 files changed

+260
-213
lines changed

31 files changed

+260
-213
lines changed

airflow/Dockerfile

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ ARG SHARED_STATSD_EXPORTER
4444
ARG PYTHON
4545
ARG TARGETARCH
4646
ARG STACKABLE_USER_UID
47+
ARG STACKABLE_USER_GID
4748
ARG S3FS
4849
ARG CYCLONEDX_BOM
4950
ARG UV
@@ -114,7 +115,7 @@ EOF
114115

115116
COPY --from=statsd_exporter-builder /statsd_exporter/statsd_exporter /stackable/statsd_exporter
116117
COPY --from=statsd_exporter-builder /statsd_exporter/statsd_exporter-${SHARED_STATSD_EXPORTER}.cdx.json /stackable/statsd_exporter-${SHARED_STATSD_EXPORTER}.cdx.json
117-
COPY --from=gitsync-image --chown=${STACKABLE_USER_UID}:0 /git-sync /stackable/git-sync
118+
COPY --from=gitsync-image --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} /git-sync /stackable/git-sync
118119

119120
RUN <<EOF
120121
mkdir -pv /stackable/airflow
@@ -132,6 +133,7 @@ ARG RELEASE
132133
ARG TINI
133134
ARG TARGETARCH
134135
ARG STACKABLE_USER_UID
136+
ARG STACKABLE_USER_GID
135137

136138
LABEL name="Apache Airflow" \
137139
maintainer="[email protected]" \
@@ -146,11 +148,11 @@ ENV AIRFLOW_USER_HOME_DIR=/stackable
146148
ENV PATH=$PATH:/bin:$HOME/app/bin
147149
ENV AIRFLOW_HOME=$HOME/airflow
148150

149-
COPY --from=airflow-build-image --chown=${STACKABLE_USER_UID}:0 /stackable/ ${HOME}/
150-
COPY --from=airflow-build-image --chown=${STACKABLE_USER_UID}:0 /stackable/git-sync ${HOME}/git-sync
151+
COPY --from=airflow-build-image --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} /stackable/ ${HOME}/
152+
COPY --from=airflow-build-image --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} /stackable/git-sync ${HOME}/git-sync
151153

152-
COPY --chown=${STACKABLE_USER_UID}:0 airflow/stackable/utils/entrypoint.sh /entrypoint.sh
153-
COPY --chown=${STACKABLE_USER_UID}:0 airflow/stackable/utils/run-airflow.sh /run-airflow.sh
154+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} airflow/stackable/utils/entrypoint.sh /entrypoint.sh
155+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} airflow/stackable/utils/run-airflow.sh /run-airflow.sh
154156

155157
COPY airflow/licenses /licenses
156158

druid/Dockerfile

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ ARG STAX2_API
1212
ARG WOODSTOX_CORE
1313
ARG AUTHORIZER
1414
ARG STACKABLE_USER_UID
15+
ARG STACKABLE_USER_GID
1516
ARG HADOOP
1617

1718
# Setting this to anything other than "true" will keep the cache folders around (e.g. for Maven, NPM etc.)
@@ -36,10 +37,10 @@ EOF
3637
USER ${STACKABLE_USER_UID}
3738
WORKDIR /stackable
3839

39-
COPY --chown=${STACKABLE_USER_UID}:0 druid/stackable/patches/patchable.toml /stackable/src/druid/stackable/patches/patchable.toml
40-
COPY --chown=${STACKABLE_USER_UID}:0 druid/stackable/patches/${PRODUCT} /stackable/src/druid/stackable/patches/${PRODUCT}
40+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} druid/stackable/patches/patchable.toml /stackable/src/druid/stackable/patches/patchable.toml
41+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} druid/stackable/patches/${PRODUCT} /stackable/src/druid/stackable/patches/${PRODUCT}
4142

42-
COPY --from=hadoop-builder --chown=${STACKABLE_USER_UID}:0 /stackable/patched-libs /stackable/patched-libs
43+
COPY --from=hadoop-builder --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} /stackable/patched-libs /stackable/patched-libs
4344
# Cache mounts are owned by root by default
4445
# We need to explicitly give the uid to use.
4546
# The cache id has to include the product version that we are building because otherwise
@@ -80,7 +81,7 @@ mvn \
8081
-Dcheckstyle.skip `# Skip checkstyle checks. We dont care if the code is properly formatted, it just wastes time` \
8182
-Dmaven.javadoc.skip=true `# Dont generate javadoc` \
8283
-Dmaven.gitcommitid.skip=true `# The gitcommitid plugin cannot work with git workspaces (ie: patchable)` \
83-
$(if [[ ${PRODUCT} != 30.* ]]; then echo --projects '!quidem-ut'; fi) `# This is just a maven module for tests. https://github.com/apache/druid/pull/16867 added https://raw.githubusercontent.com/kgyrtkirk/datasets/repo/ as a Maven repository, which fails to pull for us (Failed to execute goal on project druid-quidem-ut: Could not resolve dependencies for project org.apache.druid:druid-quidem-ut:jar:33.0.0: com.github.kgyrtkirk.datasets:kttm-nested:jar:0.1 was not found in https://build-repo.stackable.tech/repository/maven-public/). By disabling the maven module we dont pull in this weird dependency...`
84+
$(if [[ ${PRODUCT} != 30.* ]]; then echo --projects '!quidem-ut'; fi) `# This is just a maven module for tests. https://github.com/apache/druid/pull/16867 added https://raw.githubusercontent.com/kgyrtkirk/datasets/repo/ as a Maven repository, which fails to pull for us (Failed to execute goal on project druid-quidem-ut: Could not resolve dependencies for project org.apache.druid:druid-quidem-ut:jar:33.0.0: com.github.kgyrtkirk.datasets:kttm-nested:jar:${STACKABLE_USER_GID}.1 was not found in https://build-repo.stackable.tech/repository/maven-public/). By disabling the maven module we dont pull in this weird dependency...`
8485

8586
mv distribution/target/apache-druid-${NEW_VERSION}-bin/apache-druid-${NEW_VERSION} /stackable/
8687
sed -i "s/${NEW_VERSION}/${ORIGINAL_VERSION}/g" distribution/target/bom.json
@@ -112,6 +113,7 @@ FROM stackable/image/java-base AS final
112113
ARG PRODUCT
113114
ARG RELEASE
114115
ARG STACKABLE_USER_UID
116+
ARG STACKABLE_USER_GID
115117

116118
ARG NAME="Apache Druid"
117119
ARG DESCRIPTION="This image is deployed by the Stackable Operator for Apache Druid"
@@ -136,26 +138,26 @@ LABEL io.k8s.description="${DESCRIPTION}"
136138
LABEL io.k8s.display-name="${NAME}"
137139

138140

139-
COPY --chown=${STACKABLE_USER_UID}:0 --from=druid-builder /stackable/apache-druid-${PRODUCT}-stackable${RELEASE} /stackable/apache-druid-${PRODUCT}-stackable${RELEASE}
140-
COPY --chown=${STACKABLE_USER_UID}:0 --from=druid-builder /stackable/druid-${PRODUCT}-stackable${RELEASE}-src.tar.gz /stackable
141+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} --from=druid-builder /stackable/apache-druid-${PRODUCT}-stackable${RELEASE} /stackable/apache-druid-${PRODUCT}-stackable${RELEASE}
142+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} --from=druid-builder /stackable/druid-${PRODUCT}-stackable${RELEASE}-src.tar.gz /stackable
141143

142-
COPY --chown=${STACKABLE_USER_UID}:0 druid/stackable/bin /stackable/bin
143-
COPY --chown=${STACKABLE_USER_UID}:0 druid/licenses /licenses
144+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} druid/stackable/bin /stackable/bin
145+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} druid/licenses /licenses
144146

145147
RUN <<EOF
146148
microdnf update
147149
microdnf clean all
148150
rpm -qa --qf "%{NAME}-%{VERSION}-%{RELEASE}\n" | sort > /stackable/package_manifest.txt
149-
chown ${STACKABLE_USER_UID}:0 /stackable/package_manifest.txt
151+
chown ${STACKABLE_USER_UID}:${STACKABLE_USER_GID} /stackable/package_manifest.txt
150152
chmod g=u /stackable/package_manifest.txt
151153
rm -rf /var/cache/yum
152154

153155
ln -sf /stackable/apache-druid-${PRODUCT}-stackable${RELEASE} /stackable/druid
154-
chown -h ${STACKABLE_USER_UID}:0 stackable/druid
156+
chown -h ${STACKABLE_USER_UID}:${STACKABLE_USER_GID} stackable/druid
155157

156158
# Force to overwrite the existing 'run-druid'
157159
ln -sf /stackable/bin/run-druid /stackable/druid/bin/run-druid
158-
chown -h ${STACKABLE_USER_UID}:0 /stackable/druid/bin/run-druid
160+
chown -h ${STACKABLE_USER_UID}:${STACKABLE_USER_GID} /stackable/druid/bin/run-druid
159161

160162
# fix missing permissions
161163
chmod -R g=u /stackable/bin

hadoop/Dockerfile

Lines changed: 24 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,12 @@ ARG PROTOBUF
1111
ARG TARGETARCH
1212
ARG TARGETOS
1313
ARG STACKABLE_USER_UID
14+
ARG STACKABLE_USER_GID
1415

1516
WORKDIR /stackable
1617

17-
COPY --chown=${STACKABLE_USER_UID}:0 shared/protobuf/stackable/patches/patchable.toml /stackable/src/shared/protobuf/stackable/patches/patchable.toml
18-
COPY --chown=${STACKABLE_USER_UID}:0 shared/protobuf/stackable/patches/${PROTOBUF} /stackable/src/shared/protobuf/stackable/patches/${PROTOBUF}
18+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} shared/protobuf/stackable/patches/patchable.toml /stackable/src/shared/protobuf/stackable/patches/patchable.toml
19+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} shared/protobuf/stackable/patches/${PROTOBUF} /stackable/src/shared/protobuf/stackable/patches/${PROTOBUF}
1920

2021
RUN <<EOF
2122
rpm --install --replacepkgs https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm
@@ -26,7 +27,7 @@ microdnf install boost1.78-devel automake libtool
2627
microdnf clean all
2728
rm -rf /var/cache/yum
2829
mkdir /opt/protobuf
29-
chown ${STACKABLE_USER_UID}:0 /opt/protobuf
30+
chown ${STACKABLE_USER_UID}:${STACKABLE_USER_GID} /opt/protobuf
3031
EOF
3132

3233
USER ${STACKABLE_USER_UID}
@@ -63,10 +64,10 @@ ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/j
6364
EOF
6465

6566
WORKDIR /build
66-
COPY --chown=${STACKABLE_USER_UID}:0 hadoop/stackable/patches/patchable.toml /build/src/hadoop/stackable/patches/patchable.toml
67-
COPY --chown=${STACKABLE_USER_UID}:0 hadoop/stackable/patches/${PRODUCT} /build/src/hadoop/stackable/patches/${PRODUCT}
68-
COPY --chown=${STACKABLE_USER_UID}:0 hadoop/stackable/fuse_dfs_wrapper /build
69-
COPY --chown=${STACKABLE_USER_UID}:0 hadoop/stackable/jmx /stackable/jmx
67+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} hadoop/stackable/patches/patchable.toml /build/src/hadoop/stackable/patches/patchable.toml
68+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} hadoop/stackable/patches/${PRODUCT} /build/src/hadoop/stackable/patches/${PRODUCT}
69+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} hadoop/stackable/fuse_dfs_wrapper /build
70+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} hadoop/stackable/jmx /stackable/jmx
7071
USER ${STACKABLE_USER_UID}
7172
# Hadoop Pipes requires libtirpc to build, whose headers are not packaged in RedHat UBI, so skip building this module
7273
# Build from source to enable FUSE module, and to apply custom patches.
@@ -140,6 +141,7 @@ FROM stackable/image/java-devel AS hdfs-utils-builder
140141
ARG HDFS_UTILS
141142
ARG PRODUCT
142143
ARG STACKABLE_USER_UID
144+
ARG STACKABLE_USER_GID
143145

144146
# Starting with hdfs-utils 0.4.0 we need to use Java 17 for compilation.
145147
# We can not simply use java-devel with Java 17, as it is also used to compile Hadoop in this
@@ -158,8 +160,8 @@ ENV JAVA_HOME="/usr/lib/jvm/temurin-17-jdk"
158160
USER ${STACKABLE_USER_UID}
159161
WORKDIR /stackable
160162

161-
COPY --chown=${STACKABLE_USER_UID}:0 hadoop/hdfs-utils/stackable/patches/patchable.toml /stackable/src/hadoop/hdfs-utils/stackable/patches/patchable.toml
162-
COPY --chown=${STACKABLE_USER_UID}:0 hadoop/hdfs-utils/stackable/patches/${HDFS_UTILS} /stackable/src/hadoop/hdfs-utils/stackable/patches/${HDFS_UTILS}
163+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} hadoop/hdfs-utils/stackable/patches/patchable.toml /stackable/src/hadoop/hdfs-utils/stackable/patches/patchable.toml
164+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} hadoop/hdfs-utils/stackable/patches/${HDFS_UTILS} /stackable/src/hadoop/hdfs-utils/stackable/patches/${HDFS_UTILS}
163165

164166
# The Stackable HDFS utils contain an OPA authorizer, group mapper & topology provider.
165167
# The topology provider provides rack awareness functionality for HDFS by allowing users to specify Kubernetes
@@ -196,6 +198,7 @@ ARG TARGETOS
196198
ARG HDFS_UTILS
197199
ARG ASYNC_PROFILER
198200
ARG STACKABLE_USER_UID
201+
ARG STACKABLE_USER_GID
199202

200203
LABEL \
201204
name="Apache Hadoop" \
@@ -207,16 +210,16 @@ LABEL \
207210
description="This image is deployed by the Stackable Operator for Apache Hadoop / HDFS."
208211

209212

210-
COPY --chown=${STACKABLE_USER_UID}:0 --from=hadoop-builder /stackable/hadoop-${PRODUCT}-stackable${RELEASE} /stackable/hadoop-${PRODUCT}-stackable${RELEASE}
211-
COPY --chown=${STACKABLE_USER_UID}:0 --from=hadoop-builder /stackable/hadoop-${PRODUCT}-stackable${RELEASE}-src.tar.gz /stackable/
212-
COPY --chown=${STACKABLE_USER_UID}:0 --from=hadoop-builder /stackable/async-profiler-${ASYNC_PROFILER}-* /stackable/async-profiler-${ASYNC_PROFILER}
213-
COPY --chown=${STACKABLE_USER_UID}:0 --from=hadoop-builder /stackable/jmx /stackable/jmx
214-
COPY --chown=${STACKABLE_USER_UID}:0 --from=hadoop-builder /stackable/protobuf-*-src.tar.gz /stackable/
213+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} --from=hadoop-builder /stackable/hadoop-${PRODUCT}-stackable${RELEASE} /stackable/hadoop-${PRODUCT}-stackable${RELEASE}
214+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} --from=hadoop-builder /stackable/hadoop-${PRODUCT}-stackable${RELEASE}-src.tar.gz /stackable/
215+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} --from=hadoop-builder /stackable/async-profiler-${ASYNC_PROFILER}-* /stackable/async-profiler-${ASYNC_PROFILER}
216+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} --from=hadoop-builder /stackable/jmx /stackable/jmx
217+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} --from=hadoop-builder /stackable/protobuf-*-src.tar.gz /stackable/
215218

216-
COPY --chown=${STACKABLE_USER_UID}:0 --from=hdfs-utils-builder /stackable/hdfs-utils-${HDFS_UTILS}.jar /stackable/hadoop-${PRODUCT}-stackable${RELEASE}/share/hadoop/common/lib/hdfs-utils-${HDFS_UTILS}.jar
217-
COPY --chown=${STACKABLE_USER_UID}:0 --from=hdfs-utils-builder /stackable/hdfs-utils-${HDFS_UTILS}-src.tar.gz /stackable
219+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} --from=hdfs-utils-builder /stackable/hdfs-utils-${HDFS_UTILS}.jar /stackable/hadoop-${PRODUCT}-stackable${RELEASE}/share/hadoop/common/lib/hdfs-utils-${HDFS_UTILS}.jar
220+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} --from=hdfs-utils-builder /stackable/hdfs-utils-${HDFS_UTILS}-src.tar.gz /stackable
218221

219-
COPY --chown=${STACKABLE_USER_UID}:0 hadoop/licenses /licenses
222+
COPY --chown=${STACKABLE_USER_UID}:${STACKABLE_USER_GID} hadoop/licenses /licenses
220223

221224
# fuse is required for fusermount (called by fuse_dfs)
222225
# fuse-libs is required for fuse_dfs (not included in fuse)
@@ -232,7 +235,7 @@ microdnf install \
232235
tar
233236
microdnf clean all
234237
rpm -qa --qf "%{NAME}-%{VERSION}-%{RELEASE}\n" | sort > /stackable/package_manifest.txt
235-
chown ${STACKABLE_USER_UID}:0 /stackable/package_manifest.txt
238+
chown ${STACKABLE_USER_UID}:${STACKABLE_USER_GID} /stackable/package_manifest.txt
236239
chmod g=u /stackable/package_manifest.txt
237240
rm -rf /var/cache/yum
238241

@@ -241,15 +244,15 @@ rm -rf /var/cache/yum
241244
echo "user_allow_other" > /etc/fuse.conf
242245

243246
ln -s "/stackable/hadoop-${PRODUCT}-stackable${RELEASE}" /stackable/hadoop
244-
chown --no-dereference "${STACKABLE_USER_UID}:0" /stackable/hadoop
247+
chown --no-dereference "${STACKABLE_USER_UID}:${STACKABLE_USER_GID}" /stackable/hadoop
245248
chmod g=u "/stackable/hadoop-${PRODUCT}-stackable${RELEASE}"
246249
chmod g=u /stackable/*-src.tar.gz
247250

248251
ARCH="${TARGETARCH/amd64/x64}"
249252
mv /stackable/async-profiler-${ASYNC_PROFILER}* "/stackable/async-profiler-${ASYNC_PROFILER-}-${TARGETOS}-${ARCH}"
250253
chmod g=u "/stackable/async-profiler-${ASYNC_PROFILER-}-${TARGETOS}-${ARCH}"
251254
ln -s "/stackable/async-profiler-${ASYNC_PROFILER}-${TARGETOS}-${ARCH}" /stackable/async-profiler
252-
chown --no-dereference "${STACKABLE_USER_UID}:0" /stackable/async-profiler
255+
chown --no-dereference "${STACKABLE_USER_UID}:${STACKABLE_USER_GID}" /stackable/async-profiler
253256

254257
chmod g=u /stackable/jmx
255258

@@ -273,7 +276,7 @@ USER ${STACKABLE_USER_UID}
273276

274277
ENV HOME=/stackable
275278
ENV LD_LIBRARY_PATH=/stackable/hadoop/lib/native:/usr/lib/jvm/jre/lib/server
276-
ENV PATH="${PATH}":/stackable/hadoop/bin
279+
ENV PATH="${PATH}:/stackable/hadoop/bin"
277280
ENV HADOOP_HOME=/stackable/hadoop
278281
ENV HADOOP_CONF_DIR=/stackable/config
279282
ENV ASYNC_PROFILER_HOME=/stackable/async-profiler

0 commit comments

Comments
 (0)