@@ -60,10 +60,13 @@ COPY --chown=${STACKABLE_USER_UID}:0 spark-k8s/hbase-connectors/stackable/patche
6060
6161RUN  <<EOF
6262
63- #  HBase connectors don't support Spark 4 yet, so we skip the build.
63+ #  IMPORTANT: HBase connectors don't support Spark 4 yet, so we skip the build.
64+ #  Watch this PR for updates: https://github.com/apache/hbase-connectors/pull/130
6465if [[ "${PRODUCT}"  == 4* ]]; then
6566    #  Create this empy directory so that following COPY layers succeed.
6667    mkdir -p /stackable/spark/jars
68+     #  Create a dummy tarball to satisfy the build process for Spark 3.
69+     touch hbase-connector-${HBASE_CONNECTOR}-stackable${RELEASE}-src.tar.gz
6770    exit 0
6871fi
6972
@@ -187,7 +190,7 @@ RUN <<EOF
187190    export MAVEN_OPTS="-Xss64m -Xmx2g -XX:ReservedCodeCacheSize=1g" 
188191
189192    case "${PRODUCT}"  in
190-         "4*" )
193+         4* )
191194            #  The Spark 4 script has a --connect option which is not available in Spark 3.
192195            #  This option is required to build Spark Connect.
193196            #  Also this option breaks the Spark 3 build so we ensure it's only provided here.
@@ -226,7 +229,7 @@ RUN <<EOF
226229    cd dist/connect
227230
228231    case "${PRODUCT}"  in
229-         "4*" )
232+         4* )
230233            cp "/stackable/spark-${PRODUCT}-stackable${RELEASE}/sql/connect/server/target/spark-connect_${SCALA_BINARY_VERSION}-${PRODUCT}-stackable${RELEASE}.jar"  .
231234            cp "/stackable/spark-${PRODUCT}-stackable${RELEASE}/sql/connect/common/target/spark-connect-common_${SCALA_BINARY_VERSION}-${PRODUCT}-stackable${RELEASE}.jar"  .
232235            cp "/stackable/spark-${PRODUCT}-stackable${RELEASE}/sql/connect/client/jvm/target/spark-connect-client-jvm_${SCALA_BINARY_VERSION}-${PRODUCT}-stackable${RELEASE}.jar"  .
@@ -238,7 +241,11 @@ RUN <<EOF
238241            ;;
239242    esac
240243
241-     #  The Spark operator expects a file named spark-connect-{PRODUCT}.jar.
244+     #  This link is needed by the operator and is kept for backwards compatibility.
245+     #  TODO: remove it at some time in the future.
246+     ln -s "spark-connect_${SCALA_BINARY_VERSION}-${PRODUCT}-stackable${RELEASE}.jar"  "spark-connect_${SCALA_BINARY_VERSION}-${PRODUCT}.jar" 
247+     #  Link to the spark-connect jar without the stackable suffix and scala version.
248+     #  This link supersedes the previous link.
242249    ln -s "spark-connect_${SCALA_BINARY_VERSION}-${PRODUCT}-stackable${RELEASE}.jar"  "spark-connect-${PRODUCT}.jar" 
243250EOF
244251
@@ -340,7 +347,7 @@ ENV PYTHONPATH=$SPARK_HOME/python
340347
341348COPY  --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}-stackable${RELEASE}/dist /stackable/spark
342349COPY  --chown=${STACKABLE_USER_UID}:0 --from=spark-source-builder /stackable/spark-${PRODUCT}-stackable${RELEASE}-src.tar.gz /stackable
343- COPY  --chown=${STACKABLE_USER_UID}:0 --from=hbase-connectors-builder /stackable/hbase-connector-${HBASE_CONNECTOR}-stackable${RELEASE}-src.tar.gz*  /stackable
350+ COPY  --chown=${STACKABLE_USER_UID}:0 --from=hbase-connectors-builder /stackable/hbase-connector-${HBASE_CONNECTOR}-stackable${RELEASE}-src.tar.gz /stackable
344351COPY  --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}-stackable${RELEASE}/assembly/target/bom.json /stackable/spark/spark-${PRODUCT}-stackable${RELEASE}.cdx.json
345352COPY  --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/jmx /stackable/jmx
346353COPY  --from=spark-builder /usr/bin/tini /usr/bin/tini
0 commit comments