Skip to content

Commit b0c21f2

Browse files
Merge branch 'main' into chore/vector-24.11
2 parents 0216634 + 0a376b0 commit b0c21f2

File tree

35 files changed

+170
-1223
lines changed

35 files changed

+170
-1223
lines changed

.hadolint.yaml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,26 @@ ignored:
3030
# Reason: I've yet to see this being useful, where this happens we usually have no way to actually provide the file
3131
- SC1091
3232

33+
# Expressions don't expand in single quotes, use double quotes for that.
34+
# https://www.shellcheck.net/wiki/SC2016
35+
# Reason: Ignoring because envsubst requires environment variable names
36+
# as parameter. These environment variables must not be expanded, e.g.:
37+
# envsubst '${PRODUCT}:${HBASE_OPERATOR_TOOLS}' < /stackable/bin/hbck2.env
38+
- SC2016
39+
3340
# Use cd ... || exit in case cd fails.
3441
# https://github.com/koalaman/shellcheck/wiki/SC2164
3542
# Reason: Ignoring because we inherit SHELL from the base image which contains "-e" for bash
3643
- SC2164
44+
45+
# In POSIX sh, [[ ]] is undefined.
46+
# https://www.shellcheck.net/wiki/SC3010
47+
# Reason: Ignoring because we inherit SHELL from the base image which
48+
# sets the default shell to Bash where [[ ]] is defined.
49+
- SC3010
50+
51+
# In POSIX sh, string replacement is undefined.
52+
# https://www.shellcheck.net/wiki/SC3060
53+
# Reason: Ignoring because we inherit SHELL from the base image which
54+
# sets the default shell to Bash where string replacement is supported.
55+
- SC3060

CHANGELOG.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,10 @@ All notable changes to this project will be documented in this file.
1515
- trino: Add version `455` ([#822]).
1616
- trino-cli: Add version `455` ([#822]).
1717
- spark: Add version `3.5.2` ([#848]).
18+
- statsd-exporter: Add version 0.27.0 ([#866]).
1819
- hadoop: Add patch "HADOOP-18516: Support Fixed SAS Token for ABFS Authentication" ([#852]).
20+
- hbase: Add hadoop-azure.jar to the lib directory to support the Azure Blob Filesystem and
21+
the Azure Data Lake Storage ([#853]).
1922

2023
### Changed
2124

@@ -34,6 +37,8 @@ All notable changes to this project will be documented in this file.
3437
- hbase: Remove 2.4.17 ([#846]).
3538
- omid: Remove 1.1.0 and 1.1.1 ([#846]).
3639
- spark: Remove 3.4.2 and 3.4.3 ([#848]).
40+
- statsd-exporter: Remove 0.26.1 ([#866]).
41+
- superset: Remove 2.1.3, 3.1.0 and 3.1.3 ([#866]).
3742
- zookeeper: Remove 3.8.4 ([#851]).
3843

3944
### Fixed
@@ -54,6 +59,8 @@ All notable changes to this project will be documented in this file.
5459
[#848]: https://github.com/stackabletech/docker-images/pull/848
5560
[#851]: https://github.com/stackabletech/docker-images/pull/851
5661
[#852]: https://github.com/stackabletech/docker-images/pull/852
62+
[#853]: https://github.com/stackabletech/docker-images/pull/853
63+
[#866]: https://github.com/stackabletech/docker-images/pull/866
5764
[#867]: https://github.com/stackabletech/docker-images/pull/867
5865

5966
## [24.7.0] - 2024-07-24

airflow/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ RUN mkdir -pv ${AIRFLOW_HOME} && \
101101
mkdir -pv ${AIRFLOW_HOME}/dags && \
102102
mkdir -pv ${AIRFLOW_HOME}/logs && \
103103
chown --recursive stackable:stackable ${AIRFLOW_HOME} && \
104-
curl --fail -o /usr/bin/tini "https://repo.stackable.tech/repository/packages/tini/tini-${TINI}-${TARGETARCH}"
104+
curl -o /usr/bin/tini "https://repo.stackable.tech/repository/packages/tini/tini-${TINI}-${TARGETARCH}"
105105

106106
COPY airflow/stackable/utils/entrypoint.sh /entrypoint.sh
107107
COPY airflow/stackable/utils/run-airflow.sh /run-airflow.sh

airflow/versions.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,15 @@
33
"product": "2.9.2",
44
"python": "3.9",
55
"git_sync": "v4.2.4",
6-
"statsd_exporter": "0.26.1",
6+
"statsd_exporter": "0.27.0",
77
"tini": "0.19.0",
88
"vector": "0.41.1",
99
},
1010
{
1111
"product": "2.9.3",
1212
"python": "3.9",
1313
"git_sync": "v4.2.4",
14-
"statsd_exporter": "0.26.1",
14+
"statsd_exporter": "0.27.0",
1515
"tini": "0.19.0",
1616
"vector": "0.41.1",
1717
},

druid/Dockerfile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ RUN --mount=type=cache,id=maven-${PRODUCT},uid=1000,target=/stackable/.m2/reposi
4848
--mount=type=cache,id=npm-${PRODUCT},uid=1000,target=/stackable/.npm \
4949
--mount=type=cache,id=cache-${PRODUCT},uid=1000,target=/stackable/.cache \
5050
<<EOF
51-
curl --fail -L "https://repo.stackable.tech/repository/packages/druid/apache-druid-${PRODUCT}-src.tar.gz" | tar -xzC .
51+
curl "https://repo.stackable.tech/repository/packages/druid/apache-druid-${PRODUCT}-src.tar.gz" | tar -xzC .
5252
cd apache-druid-${PRODUCT}-src
5353
./patches/apply_patches.sh ${PRODUCT}
5454

@@ -71,7 +71,7 @@ fi
7171
# testdata in kuttl tests and the getting started guide.
7272

7373
# Install OPA authorizer extension.
74-
curl --fail -L "https://repo.stackable.tech/repository/packages/druid/druid-opa-authorizer-${AUTHORIZER}.tar.gz" | tar -xzC /stackable/apache-druid-${PRODUCT}/extensions
74+
curl "https://repo.stackable.tech/repository/packages/druid/druid-opa-authorizer-${AUTHORIZER}.tar.gz" | tar -xzC /stackable/apache-druid-${PRODUCT}/extensions
7575
EOF
7676

7777
FROM stackable/image/java-base AS final

hadoop/Dockerfile

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,20 +20,20 @@ COPY hadoop/stackable/fuse_dfs_wrapper /stackable/fuse_dfs_wrapper
2020
# At the same time a new HDFS Operator will still work with older images which do not have the symlink to the versionless jar.
2121
# After one of our next releases (23.11 or 24.x) we should update the operator to point at the non-versioned symlink (jmx_prometheus_javaagent.jar)
2222
# And then we can also remove the symlink to 0.16.1 from this Dockerfile.
23-
RUN curl --fail "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" -o "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" && \
23+
RUN curl "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" -o "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" && \
2424
chmod -x "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" && \
2525
ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar && \
2626
ln -s /stackable/jmx/jmx_prometheus_javaagent.jar /stackable/jmx/jmx_prometheus_javaagent-0.16.1.jar
2727

2828
RUN ARCH="${TARGETARCH/amd64/x64}" && \
29-
curl --fail -L "https://repo.stackable.tech/repository/packages/async-profiler/async-profiler-${ASYNC_PROFILER}-${TARGETOS}-${ARCH}.tar.gz" | tar -xzC . && \
29+
curl "https://repo.stackable.tech/repository/packages/async-profiler/async-profiler-${ASYNC_PROFILER}-${TARGETOS}-${ARCH}.tar.gz" | tar -xzC . && \
3030
ln -s "/stackable/async-profiler-${ASYNC_PROFILER}-${TARGETOS}-${ARCH}" /stackable/async-profiler
3131

3232
# This Protobuf version is the exact version as used in the Hadoop Dockerfile
3333
# See https://github.com/apache/hadoop/blob/trunk/dev-support/docker/pkg-resolver/install-protobuf.sh
3434
# (this was hardcoded in the Dockerfile in earlier versions of Hadoop, make sure to look at the exact version in Github)
3535
WORKDIR /opt/protobuf-src
36-
RUN curl --fail -L -s -S https://repo.stackable.tech/repository/packages/protobuf/protobuf-java-${PROTOBUF}.tar.gz -o /opt/protobuf.tar.gz && \
36+
RUN curl https://repo.stackable.tech/repository/packages/protobuf/protobuf-java-${PROTOBUF}.tar.gz -o /opt/protobuf.tar.gz && \
3737
tar xzf /opt/protobuf.tar.gz --strip-components 1 --no-same-owner && \
3838
./configure --prefix=/opt/protobuf && \
3939
make "-j$(nproc)" && \
@@ -60,7 +60,7 @@ COPY hadoop/stackable/patches /stackable/patches
6060
# Also skip building the yarn, mapreduce and minicluster modules: this will result in the modules being excluded but not all
6161
# jar files will be stripped if they are needed elsewhere e.g. share/hadoop/yarn will not be part of the build, but yarn jars
6262
# will still exist in share/hadoop/tools as they would be needed by the resource estimator tool. Such jars are removed in a later step.
63-
RUN curl --fail -L "https://repo.stackable.tech/repository/packages/hadoop/hadoop-${PRODUCT}-src.tar.gz" | tar -xzC . && \
63+
RUN curl "https://repo.stackable.tech/repository/packages/hadoop/hadoop-${PRODUCT}-src.tar.gz" | tar -xzC . && \
6464
patches/apply_patches.sh ${PRODUCT} && \
6565
cd hadoop-${PRODUCT}-src && \
6666
mvn --no-transfer-progress clean package -Pdist,native -pl '!hadoop-tools/hadoop-pipes,!hadoop-yarn-project,!hadoop-mapreduce-project,!hadoop-minicluster' -Drequire.fuse=true -DskipTests -Dmaven.javadoc.skip=true && \
@@ -99,7 +99,7 @@ WORKDIR /stackable
9999
# labels to build a rackID from.
100100
# Starting with hdfs-utils version 0.3.0 the topology provider is not a standalone jar anymore and included in hdfs-utils.
101101

102-
RUN curl --fail -L "https://github.com/stackabletech/hdfs-utils/archive/refs/tags/v${HDFS_UTILS}.tar.gz" | tar -xzC . && \
102+
RUN curl "https://github.com/stackabletech/hdfs-utils/archive/refs/tags/v${HDFS_UTILS}.tar.gz" | tar -xzC . && \
103103
cd hdfs-utils-${HDFS_UTILS} && \
104104
mvn --no-transfer-progress clean package -P hadoop-${PRODUCT} -DskipTests -Dmaven.javadoc.skip=true && \
105105
mkdir -p /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib && \

hbase/Dockerfile

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ RUN --mount=type=cache,id=maven-hbase-${PRODUCT},uid=1000,target=/stackable/.m2/
4242
###
4343
### HBase
4444
###
45-
curl --fail -L "https://repo.stackable.tech/repository/packages/hbase/hbase-${PRODUCT}-src.tar.gz" | tar -xzC .
45+
curl "https://repo.stackable.tech/repository/packages/hbase/hbase-${PRODUCT}-src.tar.gz" | tar -xzC .
4646
mv hbase-${PRODUCT} hbase-${PRODUCT}-src
4747

4848
chmod +x patches/apply_patches.sh
@@ -63,7 +63,7 @@ ln -s "/stackable/hbase-${PRODUCT}" /stackable/hbase
6363
### JMX Prometheus Exporter/Agent
6464
###
6565
if [[ -n "${JMX_EXPORTER}" ]] ; then
66-
curl --fail -L "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" -o "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"
66+
curl "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" -o "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"
6767
chmod +x "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"
6868
ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar
6969
fi
@@ -73,7 +73,7 @@ fi
7373
###
7474
cd /stackable
7575
export ARCH="${TARGETARCH/amd64/x64}"
76-
curl --fail -L "https://repo.stackable.tech/repository/packages/async-profiler/async-profiler-${ASYNC_PROFILER}-${TARGETOS}-${ARCH}.tar.gz" | tar -xzC .
76+
curl "https://repo.stackable.tech/repository/packages/async-profiler/async-profiler-${ASYNC_PROFILER}-${TARGETOS}-${ARCH}.tar.gz" | tar -xzC .
7777
ln -s "/stackable/async-profiler-${ASYNC_PROFILER}-${TARGETOS}-${ARCH}" /stackable/async-profiler
7878

7979
# We're removing these to make the intermediate layer smaller
@@ -144,7 +144,7 @@ WORKDIR /stackable
144144
# We need to explicitly give the uid to use which is hardcoded to "1000" in stackable-base
145145
RUN --mount=type=cache,id=maven-hbase-operator-tools,uid=1000,target=/stackable/.m2/repository <<EOF
146146

147-
curl --fail -L "https://repo.stackable.tech/repository/packages/hbase-operator-tools/hbase-operator-tools-${HBASE_OPERATOR_TOOLS}-src.tar.gz" | tar -xzC .
147+
curl "https://repo.stackable.tech/repository/packages/hbase-operator-tools/hbase-operator-tools-${HBASE_OPERATOR_TOOLS}-src.tar.gz" | tar -xzC .
148148
mv hbase-operator-tools-${HBASE_OPERATOR_TOOLS} hbase-operator-tools-${HBASE_OPERATOR_TOOLS}-src
149149
chmod +x patches/apply_patches.sh
150150
patches/apply_patches.sh hbase-operator-tools/${HBASE_OPERATOR_TOOLS} hbase-operator-tools-${HBASE_OPERATOR_TOOLS}-src
@@ -231,7 +231,7 @@ WORKDIR /stackable
231231

232232
RUN --mount=type=cache,id=maven-phoenix,uid=1000,target=/stackable/.m2/repository <<EOF
233233
cd /stackable
234-
curl --fail -L "https://repo.stackable.tech/repository/packages/phoenix/phoenix-${PHOENIX}-src.tar.gz" | tar -xzC .
234+
curl "https://repo.stackable.tech/repository/packages/phoenix/phoenix-${PHOENIX}-src.tar.gz" | tar -xzC .
235235
mv phoenix-${PHOENIX} phoenix-${PHOENIX}-src
236236

237237
chmod +x patches/apply_patches.sh
@@ -317,6 +317,14 @@ COPY --chown=stackable:stackable --from=phoenix-builder /stackable/phoenix /stac
317317
COPY --chown=stackable:stackable --from=hadoop-s3-builder /stackable/bin/export-snapshot-to-s3 /stackable/bin/export-snapshot-to-s3
318318
COPY --chown=stackable:stackable --from=hadoop-s3-builder /stackable/hadoop/share/hadoop/tools/lib/ /stackable/hadoop/share/hadoop/tools/lib/
319319

320+
# Copy the dependencies from Hadoop which are required for the Azure Data Lake
321+
# Storage (ADLS) to /stackable/hbase-${PRODUCT}/lib which is on the classpath.
322+
# hadoop-azure-${HADOOP}.jar contains the AzureBlobFileSystem which is required
323+
# by hadoop-common-${HADOOP}.jar if the scheme of a file system is "abfs://".
324+
COPY --chown=stackable:stackable --from=hadoop-builder \
325+
/stackable/hadoop/share/hadoop/tools/lib/hadoop-azure-${HADOOP}.jar \
326+
/stackable/hbase-${PRODUCT}/lib/
327+
320328
COPY --chown=stackable:stackable --from=opa-authorizer-builder /stackable/hbase-opa-authorizer/target/hbase-opa-authorizer*.jar /stackable/hbase-${PRODUCT}/lib
321329

322330
RUN <<EOF

hello-world/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,6 @@ RUN microdnf update && \
2020
USER stackable
2121
WORKDIR /stackable
2222

23-
RUN curl --fail -L https://repo.stackable.tech/repository/packages/hello-world/hello-world-${PRODUCT}.jar -o hello-world.jar
23+
RUN curl "https://repo.stackable.tech/repository/packages/hello-world/hello-world-${PRODUCT}.jar" -o hello-world.jar
2424

2525
CMD ["java", "-jar", "hello-world.jar"]

hive/Dockerfile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ WORKDIR /stackable
2525
# Cache mounts are owned by root by default
2626
# We need to explicitly give the uid to use which is hardcoded to "1000" in stackable-base
2727
RUN --mount=type=cache,id=maven-hive,uid=1000,target=/stackable/.m2/repository <<EOF
28-
curl --fail -L "https://repo.stackable.tech/repository/packages/hive/apache-hive-${PRODUCT}-src.tar.gz" | tar -xzC .
28+
curl "https://repo.stackable.tech/repository/packages/hive/apache-hive-${PRODUCT}-src.tar.gz" | tar -xzC .
2929

3030
patches/apply_patches.sh ${PRODUCT}
3131

@@ -53,7 +53,7 @@ fi
5353
cp /stackable/bin/start-metastore /stackable/apache-hive-metastore-${PRODUCT}-bin/bin
5454
rm -rf /stackable/apache-hive-${PRODUCT}-src
5555

56-
curl --fail -L "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" -o "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"
56+
curl "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" -o "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"
5757
ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar
5858

5959
# We're removing these to make the intermediate layer smaller

java-devel/Dockerfile

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,8 @@ RUN microdnf update && \
4747

4848
ENV JAVA_HOME=/usr/lib/jvm/jre-${PRODUCT}
4949

50+
COPY --chown=stackable:0 java-devel/stackable/settings.xml /stackable/.m2/settings.xml
51+
5052
# Mitigation for CVE-2021-44228 (Log4Shell)
5153
# This variable is supported as of Log4j version 2.10 and
5254
# disables the vulnerable feature

0 commit comments

Comments
 (0)