Skip to content

Commit a17786a

Browse files
committed
chore(spark): bump version to 3.5.7
1 parent 813c3c3 commit a17786a

File tree

7 files changed

+27
-21
lines changed

7 files changed

+27
-21
lines changed

demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -144,13 +144,13 @@ data:
144144
stackable.tech/vendor: Stackable
145145
spec:
146146
sparkImage:
147-
productVersion: 3.5.6
147+
productVersion: 3.5.7
148148
mode: cluster
149149
mainApplicationFile: local:///stackable/spark/jobs/spark-ingest-into-lakehouse.py
150150
deps:
151151
packages:
152152
- org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.8.1
153-
- org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.6
153+
- org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.7
154154
s3connection:
155155
reference: minio
156156
sparkConf:

demos/end-to-end-security/create-spark-report.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ data:
5555
name: spark-report
5656
spec:
5757
sparkImage:
58-
productVersion: 3.5.6
58+
productVersion: 3.5.7
5959
mode: cluster
6060
mainApplicationFile: local:///stackable/spark/jobs/spark-report.py
6161
deps:

demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -10,22 +10,22 @@ spec:
1010
- name: wait-for-testdata
1111
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command:
13-
- bash
14-
- -euo
15-
- pipefail
16-
- -c
17-
- |
13+
- bash
14+
- -euo
15+
- pipefail
16+
- -c
17+
- |
1818
echo 'Waiting for job load-ny-taxi-data to finish'
1919
kubectl wait --for=condition=complete --timeout=30m job/load-ny-taxi-data
2020
containers:
2121
- name: create-spark-anomaly-detection-job
2222
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
2323
command:
24-
- bash
25-
- -euo
26-
- pipefail
27-
- -c
28-
- |
24+
- bash
25+
- -euo
26+
- pipefail
27+
- -c
28+
- |
2929
echo 'Submitting Spark job'
3030
kubectl apply -f /tmp/manifest/spark-ad-job.yaml
3131
volumeMounts:
@@ -51,7 +51,7 @@ data:
5151
name: spark-ad
5252
spec:
5353
sparkImage:
54-
productVersion: 3.5.6
54+
productVersion: 3.5.7
5555
mode: cluster
5656
mainApplicationFile: local:///spark-scripts/spark-ad.py
5757
deps:

stacks/airflow/airflow.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,7 @@ data:
302302
spec:
303303
version: "1.0"
304304
sparkImage:
305-
productVersion: 3.5.6
305+
productVersion: 3.5.7
306306
mode: cluster
307307
mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py
308308
job:
@@ -330,6 +330,7 @@ data:
330330
memory:
331331
limit: 1024Mi
332332
replicas: 3
333+
333334
# {% endraw %}
334335
---
335336
apiVersion: v1

stacks/jupyterhub-pyspark-hdfs/jupyterlab.yaml

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ spec:
2121
serviceAccountName: default
2222
containers:
2323
- name: jupyterlab
24-
image: oci.stackable.tech/stackable/spark-connect-client:3.5.6-stackable0.0.0-dev
24+
image: oci.stackable.tech/stackable/spark-connect-client:3.5.7-stackable0.0.0-dev
2525
imagePullPolicy: IfNotPresent
2626
command:
2727
- bash
@@ -39,8 +39,13 @@ spec:
3939
name: notebook
4040
initContainers:
4141
- name: download-notebook
42-
image: oci.stackable.tech/stackable/spark-connect-client:3.5.6-stackable0.0.0-dev
43-
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb']
42+
image: oci.stackable.tech/stackable/spark-connect-client:3.5.7-stackable0.0.0-dev
43+
command:
44+
[
45+
"sh",
46+
"-c",
47+
"curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb",
48+
]
4449
volumeMounts:
4550
- mountPath: /notebook
4651
name: notebook

stacks/jupyterhub-pyspark-hdfs/notebook.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353
"#\n",
5454
"# See: https://issues.apache.org/jira/browse/SPARK-46032\n",
5555
"#\n",
56-
"spark.addArtifacts(\"/stackable/spark/connect/spark-connect_2.12-3.5.6.jar\")"
56+
"spark.addArtifacts(\"/stackable/spark/connect/spark-connect_2.12-3.5.7.jar\")"
5757
]
5858
},
5959
{

stacks/jupyterhub-pyspark-hdfs/spark_connect.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@ spec:
3030
image:
3131
# Using an image that includes scikit-learn (among other things)
3232
# because this package needs to be available on the executors.
33-
custom: oci.stackable.tech/stackable/spark-connect-client:3.5.6-stackable0.0.0-dev
34-
productVersion: 3.5.6
33+
custom: oci.stackable.tech/stackable/spark-connect-client:3.5.7-stackable0.0.0-dev
34+
productVersion: 3.5.7
3535
pullPolicy: IfNotPresent
3636
args:
3737
server:

0 commit comments

Comments
 (0)