diff --git a/.scripts/update_refs.sh b/.scripts/update_refs.sh index abe1f622..32cd97ed 100755 --- a/.scripts/update_refs.sh +++ b/.scripts/update_refs.sh @@ -35,10 +35,10 @@ function prepend { function maybe_commit { [ "$COMMIT" == "true" ] || return 0 local MESSAGE="$1" - PATCH=$(mktemp) + PATCH=$(mktemp --suffix=.diff) git add -u git diff --staged > "$PATCH" - git commit -S -m "$MESSAGE" --no-verify + git diff-index --quiet HEAD -- || git commit -S -m "$MESSAGE" --no-verify echo "patch written to: $PATCH" | prepend "\t" } @@ -55,8 +55,8 @@ if [[ "$CURRENT_BRANCH" == release-* ]]; then # Replace 0.0.0-dev refs with ${STACKABLE_RELEASE}.0 # TODO (@NickLarsenNZ): handle patches later, and what about release-candidates? - SEARCH='stackable(0\.0\.0-dev|24\.7\.[0-9]+)' # TODO (@NickLarsenNZ): After https://github.com/stackabletech/stackable-cockpit/issues/310, only search for 0.0.0-dev - REPLACEMENT="stackable${STACKABLE_RELEASE}.0" # TODO (@NickLarsenNZ): Be a bit smarter about patch releases. + SEARCH='stackable(0\.0\.0-dev|24\.7\.[0-9]+|24\.11\.0)' # TODO (@NickLarsenNZ): After https://github.com/stackabletech/stackable-cockpit/issues/310, only search for 0.0.0-dev + REPLACEMENT="stackable${STACKABLE_RELEASE}.1" # TODO (@NickLarsenNZ): Be a bit smarter about patch releases. MESSAGE="Update image references with $REPLACEMENT" echo "$MESSAGE" find demos stacks -type f \ diff --git a/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml b/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml index a64c4e7b..46e2760f 100644 --- a/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml +++ b/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: start-pyspark-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 # N.B. it is possible for the scheduler to report that a DAG exists, only for the worker task to fail if a pod is unexpectedly # restarted. Additionally, the db-init job takes a few minutes to complete before the cluster is deployed. The wait/watch steps # below are not "water-tight" but add a layer of stability by at least ensuring that the db is initialized and ready and that diff --git a/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml b/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml index fc0dff9e..84c42550 100644 --- a/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml +++ b/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: start-date-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 # N.B. it is possible for the scheduler to report that a DAG exists, only for the worker task to fail if a pod is unexpectedly # restarted. Additionally, the db-init job takes a few minutes to complete before the cluster is deployed. The wait/watch steps # below are not "water-tight" but add a layer of stability by at least ensuring that the db is initialized and ready and that diff --git a/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml b/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml index 8ae380ec..98472784 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml @@ -9,11 +9,11 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-kafka - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for all kafka brokers to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/instance=kafka -l app.kubernetes.io/name=kafka"] containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/data-lakehouse-iceberg-trino-spark/LakehouseKafkaIngest.xml && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml b/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml index 5e567e3f..526a8867 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml @@ -12,11 +12,11 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-kafka - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for all kafka brokers to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/name=kafka -l app.kubernetes.io/instance=kafka"] containers: - name: create-spark-ingestion-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Submitting Spark job' && kubectl apply -f /tmp/manifest/spark-ingestion-job.yaml"] volumeMounts: - name: manifest diff --git a/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml b/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml index b3d744e0..4a8a2e5d 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml @@ -9,11 +9,11 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-testdata - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for job load-test-data to finish' && kubectl wait --for=condition=complete --timeout=30m job/load-test-data"] containers: - name: create-tables-in-trino - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml b/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml index c225fe88..53b0bb54 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/data-lakehouse-iceberg-trino-spark/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/end-to-end-security/create-spark-report.yaml b/demos/end-to-end-security/create-spark-report.yaml index a5f25af4..ded96dea 100644 --- a/demos/end-to-end-security/create-spark-report.yaml +++ b/demos/end-to-end-security/create-spark-report.yaml @@ -12,7 +12,7 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-trino-tables - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: - bash - -euo @@ -23,7 +23,7 @@ spec: kubectl wait --timeout=30m --for=condition=complete job/create-tables-in-trino containers: - name: create-spark-report - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: - bash - -euo diff --git a/demos/end-to-end-security/create-trino-tables.yaml b/demos/end-to-end-security/create-trino-tables.yaml index 6c117160..469d8925 100644 --- a/demos/end-to-end-security/create-trino-tables.yaml +++ b/demos/end-to-end-security/create-trino-tables.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-tables-in-trino - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml b/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml index 92f031f6..b822d886 100644 --- a/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml +++ b/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml @@ -9,7 +9,7 @@ spec: spec: containers: - name: create-hfile-and-import-to-hbase - image: docker.stackable.tech/stackable/hbase:2.4.18-stackable24.11.0 + image: docker.stackable.tech/stackable/hbase:2.4.18-stackable24.11.1 env: - name: HADOOP_USER_NAME value: stackable diff --git a/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml b/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml index 7dd44f89..22abb7b1 100644 --- a/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml +++ b/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: load-ny-taxi-data - image: docker.stackable.tech/stackable/hadoop:3.4.0-stackable24.11.0 + image: docker.stackable.tech/stackable/hadoop:3.4.0-stackable24.11.1 # yamllint disable rule:line-length command: ["bash", "-c", "/stackable/hadoop/bin/hdfs dfs -mkdir -p /ny-taxi-data/raw \ && cd /tmp \ diff --git a/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml b/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml index c7afa9a0..82dd92b8 100644 --- a/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-druid-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor"] volumeMounts: - name: ingestion-job-spec diff --git a/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml b/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml index ddec687b..0d2a1e95 100644 --- a/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-earthquake-data/IngestEarthquakesToKafka.xml && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml b/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml index 76a2de68..b3a72a3e 100644 --- a/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml +++ b/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-earthquake-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml b/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml index b1099c8e..ce8a0a28 100644 --- a/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-druid-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/stations-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor && curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor && curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-compaction-job-spec.json https://druid-coordinator:8281/druid/coordinator/v1/config/compaction"] volumeMounts: - name: ingestion-job-spec diff --git a/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml b/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml index 26520651..f46f64dc 100644 --- a/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-water-level-data/IngestWaterLevelsToKafka.xml && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml b/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml index fc73baa8..a6195157 100644 --- a/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml +++ b/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-water-level-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/signal-processing/Dockerfile-nifi b/demos/signal-processing/Dockerfile-nifi index 1bd50f50..0e89c9c7 100644 --- a/demos/signal-processing/Dockerfile-nifi +++ b/demos/signal-processing/Dockerfile-nifi @@ -1,3 +1,3 @@ -FROM docker.stackable.tech/stackable/nifi:1.27.0-stackable24.11.0 +FROM docker.stackable.tech/stackable/nifi:1.27.0-stackable24.11.1 RUN curl --fail -o /stackable/nifi/postgresql-42.6.0.jar "https://repo.stackable.tech/repository/misc/postgresql-timescaledb/postgresql-42.6.0.jar" diff --git a/demos/signal-processing/create-nifi-ingestion-job.yaml b/demos/signal-processing/create-nifi-ingestion-job.yaml index 0d59c7ba..866023b0 100644 --- a/demos/signal-processing/create-nifi-ingestion-job.yaml +++ b/demos/signal-processing/create-nifi-ingestion-job.yaml @@ -9,13 +9,13 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-timescale-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for timescaleDB tables to be ready' && kubectl wait --for=condition=complete job/create-timescale-tables-job" ] containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "export PGPASSWORD=$(cat /timescale-admin-credentials/password) && \ curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/signal-processing/DownloadAndWriteToDB.xml && \ sed -i \"s/PLACEHOLDERPGPASSWORD/$PGPASSWORD/g\" DownloadAndWriteToDB.xml && \ diff --git a/demos/signal-processing/create-timescale-tables.yaml b/demos/signal-processing/create-timescale-tables.yaml index 0804fa71..d3e2afd9 100644 --- a/demos/signal-processing/create-timescale-tables.yaml +++ b/demos/signal-processing/create-timescale-tables.yaml @@ -9,7 +9,7 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-timescale - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for timescaleDB to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/name=postgresql-timescaledb" ] diff --git a/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml b/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml index 3cafbec2..a8854e4c 100644 --- a/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml +++ b/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml @@ -8,11 +8,11 @@ spec: spec: initContainers: - name: wait-for-testdata - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for job load-ny-taxi-data to finish' && kubectl wait --for=condition=complete --timeout=30m job/load-ny-taxi-data"] containers: - name: create-spark-anomaly-detection-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "echo 'Submitting Spark job' && kubectl apply -f /tmp/manifest/spark-ad-job.yaml"] volumeMounts: - name: manifest diff --git a/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml b/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml index a2a704f6..790a1f82 100644 --- a/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml +++ b/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/spark-k8s-anomaly-detection-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/trino-taxi-data/create-table-in-trino.yaml b/demos/trino-taxi-data/create-table-in-trino.yaml index 7e02c16a..64dafcdb 100644 --- a/demos/trino-taxi-data/create-table-in-trino.yaml +++ b/demos/trino-taxi-data/create-table-in-trino.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-ny-taxi-data-table-in-trino - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/trino-taxi-data/setup-superset.yaml b/demos/trino-taxi-data/setup-superset.yaml index 9902ba7f..9a2933af 100644 --- a/demos/trino-taxi-data/setup-superset.yaml +++ b/demos/trino-taxi-data/setup-superset.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/trino-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/stacks/_templates/jupyterhub.yaml b/stacks/_templates/jupyterhub.yaml index 1939afd9..05d56a3f 100644 --- a/stacks/_templates/jupyterhub.yaml +++ b/stacks/_templates/jupyterhub.yaml @@ -50,7 +50,7 @@ options: HADOOP_CONF_DIR: "/home/jovyan/hdfs" initContainers: - name: download-notebook - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb'] volumeMounts: - mountPath: /notebook diff --git a/stacks/_templates/keycloak.yaml b/stacks/_templates/keycloak.yaml index b620e50f..3f7a9cb8 100644 --- a/stacks/_templates/keycloak.yaml +++ b/stacks/_templates/keycloak.yaml @@ -48,7 +48,7 @@ spec: - name: tls mountPath: /tls/ - name: create-auth-class - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["/bin/bash", "-c"] args: - | diff --git a/stacks/end-to-end-security/krb5.yaml b/stacks/end-to-end-security/krb5.yaml index c9dd6018..76c4635a 100644 --- a/stacks/end-to-end-security/krb5.yaml +++ b/stacks/end-to-end-security/krb5.yaml @@ -14,7 +14,7 @@ spec: spec: initContainers: - name: init - image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.0 + image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.1 args: - sh - -euo @@ -35,7 +35,7 @@ spec: name: data containers: - name: kdc - image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.0 + image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.1 args: - krb5kdc - -n @@ -48,7 +48,7 @@ spec: - mountPath: /var/kerberos/krb5kdc name: data - name: kadmind - image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.0 + image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.1 args: - kadmind - -nofork @@ -61,7 +61,7 @@ spec: - mountPath: /var/kerberos/krb5kdc name: data - name: client - image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.0 + image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.1 tty: true stdin: true env: diff --git a/stacks/end-to-end-security/superset.yaml b/stacks/end-to-end-security/superset.yaml index c80fd82f..f5ca1ff3 100644 --- a/stacks/end-to-end-security/superset.yaml +++ b/stacks/end-to-end-security/superset.yaml @@ -25,7 +25,7 @@ spec: initContainers: # The postgres image does not contain curl or wget... - name: download-dump - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: - bash - -c diff --git a/stacks/keycloak-opa-poc/keycloak.yaml b/stacks/keycloak-opa-poc/keycloak.yaml index 0e484a96..3da32e87 100644 --- a/stacks/keycloak-opa-poc/keycloak.yaml +++ b/stacks/keycloak-opa-poc/keycloak.yaml @@ -70,7 +70,7 @@ spec: spec: containers: - name: propagate-keycloak-address - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: - bash - -x diff --git a/stacks/keycloak-opa-poc/setup-keycloak.yaml b/stacks/keycloak-opa-poc/setup-keycloak.yaml index 499f8632..ab30c381 100644 --- a/stacks/keycloak-opa-poc/setup-keycloak.yaml +++ b/stacks/keycloak-opa-poc/setup-keycloak.yaml @@ -29,7 +29,7 @@ spec: spec: containers: - name: setup-keycloak - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 env: - name: KEYCLOAK_ADMIN_PASSWORD valueFrom: diff --git a/stacks/logging/setup-opensearch-dashboards.yaml b/stacks/logging/setup-opensearch-dashboards.yaml index ed565227..c3632f94 100644 --- a/stacks/logging/setup-opensearch-dashboards.yaml +++ b/stacks/logging/setup-opensearch-dashboards.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-opensearch-dashboards - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 env: - name: OPEN_SEARCH_ADMIN_PASSWORD valueFrom: diff --git a/stacks/signal-processing/jupyterhub.yaml b/stacks/signal-processing/jupyterhub.yaml index 8bf3a23d..5dc54a7f 100644 --- a/stacks/signal-processing/jupyterhub.yaml +++ b/stacks/signal-processing/jupyterhub.yaml @@ -30,7 +30,7 @@ options: singleuser: cmd: null image: - # TODO (@NickLarsenNZ): Use a versioned image with stackable24.11.0 or stackableXX.X.X so that + # TODO (@NickLarsenNZ): Use a versioned image with stackable24.11.1 or stackableXX.X.X so that # the demo is reproducable for the release and it will be automatically replaced for the release branch. name: docker.stackable.tech/demos/jupyter-pyspark-with-alibi-detect tag: python-3.9 @@ -41,7 +41,7 @@ options: stackable.tech/vendor: Stackable initContainers: - name: download-notebook - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/signal-processing/tsdb.ipynb -o /notebook/tsdb.ipynb'] volumeMounts: - mountPath: /notebook diff --git a/stacks/signal-processing/nifi.yaml b/stacks/signal-processing/nifi.yaml index 5e758282..3bb28b09 100644 --- a/stacks/signal-processing/nifi.yaml +++ b/stacks/signal-processing/nifi.yaml @@ -6,7 +6,7 @@ metadata: spec: image: productVersion: 1.27.0 - # TODO (@NickLarsenNZ): Use a versioned image with stackable24.11.0 or stackableXX.X.X so that + # TODO (@NickLarsenNZ): Use a versioned image with stackable24.11.1 or stackableXX.X.X so that # the demo is reproducable for the release and it will be automatically replaced for the release branch. custom: docker.stackable.tech/demos/nifi:1.27.0-postgresql clusterConfig: