Skip to content

Commit 171384e

Browse files
authored
chore(spark): bump version to 4.0.1 (#304)
* chore(spark): bump version to 3.5.7 * format command args consistently * update to spark 4.0.1
1 parent c484484 commit 171384e

File tree

7 files changed

+30
-24
lines changed

7 files changed

+30
-24
lines changed

demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -144,13 +144,13 @@ data:
144144
stackable.tech/vendor: Stackable
145145
spec:
146146
sparkImage:
147-
productVersion: 3.5.6
147+
productVersion: 4.0.1
148148
mode: cluster
149149
mainApplicationFile: local:///stackable/spark/jobs/spark-ingest-into-lakehouse.py
150150
deps:
151151
packages:
152-
- org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.8.1
153-
- org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.6
152+
- org.apache.iceberg:iceberg-spark-runtime-4.0_2.13:1.10.0
153+
- org.apache.spark:spark-sql-kafka-0-10_2.13:4.0.1
154154
s3connection:
155155
reference: minio
156156
sparkConf:

demos/end-to-end-security/create-spark-report.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,12 +55,12 @@ data:
5555
name: spark-report
5656
spec:
5757
sparkImage:
58-
productVersion: 3.5.6
58+
productVersion: 4.0.1
5959
mode: cluster
6060
mainApplicationFile: local:///stackable/spark/jobs/spark-report.py
6161
deps:
6262
packages:
63-
- org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.8.1
63+
- org.apache.iceberg:iceberg-spark-runtime-4.0_2.13:1.10.0
6464
sparkConf:
6565
spark.driver.extraClassPath: /stackable/config/hdfs
6666
spark.executor.extraClassPath: /stackable/config/hdfs

demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,22 +10,22 @@ spec:
1010
- name: wait-for-testdata
1111
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command:
13-
- bash
14-
- -euo
15-
- pipefail
16-
- -c
17-
- |
13+
- bash
14+
- -euo
15+
- pipefail
16+
- -c
17+
- |
1818
echo 'Waiting for job load-ny-taxi-data to finish'
1919
kubectl wait --for=condition=complete --timeout=30m job/load-ny-taxi-data
2020
containers:
2121
- name: create-spark-anomaly-detection-job
2222
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
2323
command:
24-
- bash
25-
- -euo
26-
- pipefail
27-
- -c
28-
- |
24+
- bash
25+
- -euo
26+
- pipefail
27+
- -c
28+
- |
2929
echo 'Submitting Spark job'
3030
kubectl apply -f /tmp/manifest/spark-ad-job.yaml
3131
volumeMounts:
@@ -51,12 +51,12 @@ data:
5151
name: spark-ad
5252
spec:
5353
sparkImage:
54-
productVersion: 3.5.6
54+
productVersion: 4.0.1
5555
mode: cluster
5656
mainApplicationFile: local:///spark-scripts/spark-ad.py
5757
deps:
5858
packages:
59-
- org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.8.1
59+
- org.apache.iceberg:iceberg-spark-runtime-4.0_2.13:1.10.1
6060
requirements:
6161
- scikit-learn==1.4.0
6262
s3connection:

stacks/airflow/airflow.yaml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -303,7 +303,7 @@ data:
303303
spec:
304304
version: "1.0"
305305
sparkImage:
306-
productVersion: 3.5.6
306+
productVersion: 4.0.1
307307
mode: cluster
308308
mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py
309309
job:
@@ -331,6 +331,8 @@ data:
331331
memory:
332332
limit: 1024Mi
333333
replicas: 3
334+
335+
334336
# {% endraw %}
335337
---
336338
apiVersion: v1

stacks/jupyterhub-pyspark-hdfs/jupyterlab.yaml

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ spec:
2121
serviceAccountName: default
2222
containers:
2323
- name: jupyterlab
24-
image: oci.stackable.tech/stackable/spark-connect-client:3.5.6-stackable0.0.0-dev
24+
image: oci.stackable.tech/stackable/spark-connect-client:4.0.1-stackable0.0.0-dev
2525
imagePullPolicy: IfNotPresent
2626
command:
2727
- bash
@@ -39,8 +39,12 @@ spec:
3939
name: notebook
4040
initContainers:
4141
- name: download-notebook
42-
image: oci.stackable.tech/stackable/spark-connect-client:3.5.6-stackable0.0.0-dev
43-
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb']
42+
image: oci.stackable.tech/stackable/spark-connect-client:4.0.1-stackable0.0.0-dev
43+
command:
44+
- bash
45+
args:
46+
- -c
47+
- curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb
4448
volumeMounts:
4549
- mountPath: /notebook
4650
name: notebook

stacks/jupyterhub-pyspark-hdfs/notebook.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353
"#\n",
5454
"# See: https://issues.apache.org/jira/browse/SPARK-46032\n",
5555
"#\n",
56-
"spark.addArtifacts(\"/stackable/spark/connect/spark-connect_2.12-3.5.6.jar\")"
56+
"spark.addArtifacts(\"/stackable/spark/connect/spark-connect-4.0.1.jar\")"
5757
]
5858
},
5959
{

stacks/jupyterhub-pyspark-hdfs/spark_connect.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@ spec:
3030
image:
3131
# Using an image that includes scikit-learn (among other things)
3232
# because this package needs to be available on the executors.
33-
custom: oci.stackable.tech/stackable/spark-connect-client:3.5.6-stackable0.0.0-dev
34-
productVersion: 3.5.6
33+
custom: oci.stackable.tech/stackable/spark-connect-client:4.0.1-stackable0.0.0-dev
34+
productVersion: 4.0.1
3535
pullPolicy: IfNotPresent
3636
args:
3737
server:

0 commit comments

Comments
 (0)