Skip to content

Commit 96d67c6

Browse files
authored
chore: bump spark version to 3.5.6 (#224)
1 parent aa66834 commit 96d67c6

File tree

7 files changed

+10
-10
lines changed

7 files changed

+10
-10
lines changed

demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,13 +43,13 @@ data:
4343
stackable.tech/vendor: Stackable
4444
spec:
4545
sparkImage:
46-
productVersion: 3.5.5
46+
productVersion: 3.5.6
4747
mode: cluster
4848
mainApplicationFile: local:///stackable/spark/jobs/spark-ingest-into-lakehouse.py
4949
deps:
5050
packages:
5151
- org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.8.1
52-
- org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.5
52+
- org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.6
5353
s3connection:
5454
reference: minio
5555
sparkConf:

demos/end-to-end-security/create-spark-report.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ data:
5555
name: spark-report
5656
spec:
5757
sparkImage:
58-
productVersion: 3.5.5
58+
productVersion: 3.5.6
5959
mode: cluster
6060
mainApplicationFile: local:///stackable/spark/jobs/spark-report.py
6161
deps:

demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ data:
3737
name: spark-ad
3838
spec:
3939
sparkImage:
40-
productVersion: 3.5.5
40+
productVersion: 3.5.6
4141
mode: cluster
4242
mainApplicationFile: local:///spark-scripts/spark-ad.py
4343
deps:

stacks/airflow/airflow.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,7 @@ data:
274274
spec:
275275
version: "1.0"
276276
sparkImage:
277-
productVersion: 3.5.5
277+
productVersion: 3.5.6
278278
mode: cluster
279279
mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py
280280
job:

stacks/jupyterhub-pyspark-hdfs/jupyterlab.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ spec:
2121
serviceAccountName: default
2222
containers:
2323
- name: jupyterlab
24-
image: oci.stackable.tech/stackable/spark-connect-client:3.5.5-stackable0.0.0-dev
24+
image: oci.stackable.tech/stackable/spark-connect-client:3.5.6-stackable0.0.0-dev
2525
imagePullPolicy: IfNotPresent
2626
command:
2727
- bash
@@ -39,7 +39,7 @@ spec:
3939
name: notebook
4040
initContainers:
4141
- name: download-notebook
42-
image: oci.stackable.tech/stackable/spark-connect-client:3.5.5-stackable0.0.0-dev
42+
image: oci.stackable.tech/stackable/spark-connect-client:3.5.6-stackable0.0.0-dev
4343
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb']
4444
volumeMounts:
4545
- mountPath: /notebook

stacks/jupyterhub-pyspark-hdfs/notebook.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353
"#\n",
5454
"# See: https://issues.apache.org/jira/browse/SPARK-46032\n",
5555
"#\n",
56-
"spark.addArtifacts(\"/stackable/spark/connect/spark-connect_2.12-3.5.5.jar\")"
56+
"spark.addArtifacts(\"/stackable/spark/connect/spark-connect_2.12-3.5.6.jar\")"
5757
]
5858
},
5959
{

stacks/jupyterhub-pyspark-hdfs/spark_connect.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@ spec:
3030
image:
3131
# Using an image that includes scikit-learn (among other things)
3232
# because this package needs to be available on the executors.
33-
custom: oci.stackable.tech/stackable/spark-connect-client:3.5.5-stackable0.0.0-dev
34-
productVersion: 3.5.5
33+
custom: oci.stackable.tech/stackable/spark-connect-client:3.5.6-stackable0.0.0-dev
34+
productVersion: 3.5.6
3535
pullPolicy: IfNotPresent
3636
args:
3737
server:

0 commit comments

Comments
 (0)