diff --git a/.scripts/update_refs.sh b/.scripts/update_refs.sh index abe1f622..32cd97ed 100755 --- a/.scripts/update_refs.sh +++ b/.scripts/update_refs.sh @@ -35,10 +35,10 @@ function prepend { function maybe_commit { [ "$COMMIT" == "true" ] || return 0 local MESSAGE="$1" - PATCH=$(mktemp) + PATCH=$(mktemp --suffix=.diff) git add -u git diff --staged > "$PATCH" - git commit -S -m "$MESSAGE" --no-verify + git diff-index --quiet HEAD -- || git commit -S -m "$MESSAGE" --no-verify echo "patch written to: $PATCH" | prepend "\t" } @@ -55,8 +55,8 @@ if [[ "$CURRENT_BRANCH" == release-* ]]; then # Replace 0.0.0-dev refs with ${STACKABLE_RELEASE}.0 # TODO (@NickLarsenNZ): handle patches later, and what about release-candidates? - SEARCH='stackable(0\.0\.0-dev|24\.7\.[0-9]+)' # TODO (@NickLarsenNZ): After https://github.com/stackabletech/stackable-cockpit/issues/310, only search for 0.0.0-dev - REPLACEMENT="stackable${STACKABLE_RELEASE}.0" # TODO (@NickLarsenNZ): Be a bit smarter about patch releases. + SEARCH='stackable(0\.0\.0-dev|24\.7\.[0-9]+|24\.11\.0)' # TODO (@NickLarsenNZ): After https://github.com/stackabletech/stackable-cockpit/issues/310, only search for 0.0.0-dev + REPLACEMENT="stackable${STACKABLE_RELEASE}.1" # TODO (@NickLarsenNZ): Be a bit smarter about patch releases. MESSAGE="Update image references with $REPLACEMENT" echo "$MESSAGE" find demos stacks -type f \ diff --git a/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml b/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml index dd650853..46e2760f 100644 --- a/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml +++ b/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: start-pyspark-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 # N.B. it is possible for the scheduler to report that a DAG exists, only for the worker task to fail if a pod is unexpectedly # restarted. Additionally, the db-init job takes a few minutes to complete before the cluster is deployed. The wait/watch steps # below are not "water-tight" but add a layer of stability by at least ensuring that the db is initialized and ready and that diff --git a/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml b/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml index b5e9ba8d..84c42550 100644 --- a/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml +++ b/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: start-date-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 # N.B. it is possible for the scheduler to report that a DAG exists, only for the worker task to fail if a pod is unexpectedly # restarted. Additionally, the db-init job takes a few minutes to complete before the cluster is deployed. The wait/watch steps # below are not "water-tight" but add a layer of stability by at least ensuring that the db is initialized and ready and that diff --git a/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml b/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml index 277c6005..98472784 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml @@ -9,12 +9,12 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-kafka - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for all kafka brokers to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/instance=kafka -l app.kubernetes.io/name=kafka"] containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 - command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/LakehouseKafkaIngest.xml && python -u /tmp/script/script.py"] + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 + command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/data-lakehouse-iceberg-trino-spark/LakehouseKafkaIngest.xml && python -u /tmp/script/script.py"] volumeMounts: - name: script mountPath: /tmp/script diff --git a/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml b/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml index 423f0fad..526a8867 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml @@ -12,11 +12,11 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-kafka - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for all kafka brokers to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/name=kafka -l app.kubernetes.io/instance=kafka"] containers: - name: create-spark-ingestion-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Submitting Spark job' && kubectl apply -f /tmp/manifest/spark-ingestion-job.yaml"] volumeMounts: - name: manifest diff --git a/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml b/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml index 8cb6e3da..4a8a2e5d 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml @@ -9,11 +9,11 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-testdata - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for job load-test-data to finish' && kubectl wait --for=condition=complete --timeout=30m job/load-test-data"] containers: - name: create-tables-in-trino - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml b/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml index d5fdff67..53b0bb54 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml @@ -8,8 +8,8 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 - command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/superset-assets.zip && python -u /tmp/script/script.py"] + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 + command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/data-lakehouse-iceberg-trino-spark/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script mountPath: /tmp/script diff --git a/demos/demos-v2.yaml b/demos/demos-v2.yaml index a31ff1ea..9b52292b 100644 --- a/demos/demos-v2.yaml +++ b/demos/demos-v2.yaml @@ -7,10 +7,10 @@ demos: - airflow - job-scheduling manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/01-airflow-spark-clusterrole.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/02-airflow-spark-clusterrolebinding.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/airflow-scheduled-job/01-airflow-spark-clusterrole.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/airflow-scheduled-job/02-airflow-spark-clusterrolebinding.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml supportedNamespaces: [] resourceRequests: cpu: 2401m @@ -24,8 +24,8 @@ demos: - hdfs - cycling-tripdata manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml supportedNamespaces: [] resourceRequests: cpu: "3" @@ -43,9 +43,9 @@ demos: - opa - keycloak manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/end-to-end-security/create-trino-tables.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/end-to-end-security/serviceaccount.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/end-to-end-security/create-spark-report.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/end-to-end-security/create-trino-tables.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/end-to-end-security/serviceaccount.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/end-to-end-security/create-spark-report.yaml supportedNamespaces: [] resourceRequests: cpu: 9000m @@ -64,9 +64,9 @@ demos: - s3 - earthquakes manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml supportedNamespaces: ["default"] resourceRequests: cpu: 8700m @@ -85,9 +85,9 @@ demos: - s3 - water-levels manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml supportedNamespaces: ["default"] resourceRequests: cpu: 8900m @@ -104,10 +104,10 @@ demos: - s3 - ny-taxi-data manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/serviceaccount.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/spark-k8s-anomaly-detection-taxi-data/serviceaccount.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml supportedNamespaces: [] resourceRequests: cpu: 6400m @@ -139,9 +139,9 @@ demos: - s3 - ny-taxi-data manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/load-test-data.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/create-table-in-trino.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/setup-superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/trino-taxi-data/load-test-data.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/trino-taxi-data/create-table-in-trino.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/trino-taxi-data/setup-superset.yaml supportedNamespaces: [] resourceRequests: cpu: 6800m @@ -164,12 +164,12 @@ demos: - water-levels - earthquakes manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/serviceaccount.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/load-test-data.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/data-lakehouse-iceberg-trino-spark/serviceaccount.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/data-lakehouse-iceberg-trino-spark/load-test-data.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml supportedNamespaces: ["default"] resourceRequests: cpu: "80" @@ -185,7 +185,7 @@ demos: - pyspark - ny-taxi-data manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml supportedNamespaces: [] resourceRequests: cpu: 3350m @@ -202,7 +202,7 @@ demos: - vector - zookeeper manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/logging/zookeeper.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/logging/zookeeper.yaml supportedNamespaces: [] resourceRequests: cpu: 6500m @@ -218,9 +218,9 @@ demos: - grafana-dashboards - zookeeper manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/serviceaccount.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/create-timescale-tables.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/create-nifi-ingestion-job.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/signal-processing/serviceaccount.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/signal-processing/create-timescale-tables.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/signal-processing/create-nifi-ingestion-job.yaml supportedNamespaces: [] resourceRequests: cpu: "3" diff --git a/demos/end-to-end-security/create-spark-report.yaml b/demos/end-to-end-security/create-spark-report.yaml index 2c8c4df5..ded96dea 100644 --- a/demos/end-to-end-security/create-spark-report.yaml +++ b/demos/end-to-end-security/create-spark-report.yaml @@ -12,7 +12,7 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-trino-tables - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: - bash - -euo @@ -23,7 +23,7 @@ spec: kubectl wait --timeout=30m --for=condition=complete job/create-tables-in-trino containers: - name: create-spark-report - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: - bash - -euo diff --git a/demos/end-to-end-security/create-trino-tables.yaml b/demos/end-to-end-security/create-trino-tables.yaml index 7c488d5f..469d8925 100644 --- a/demos/end-to-end-security/create-trino-tables.yaml +++ b/demos/end-to-end-security/create-trino-tables.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-tables-in-trino - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml b/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml index 7c561ed3..b822d886 100644 --- a/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml +++ b/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml @@ -9,7 +9,7 @@ spec: spec: containers: - name: create-hfile-and-import-to-hbase - image: docker.stackable.tech/stackable/hbase:2.4.18-stackable24.7.0 + image: docker.stackable.tech/stackable/hbase:2.4.18-stackable24.11.1 env: - name: HADOOP_USER_NAME value: stackable diff --git a/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml b/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml index d02c5088..22abb7b1 100644 --- a/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml +++ b/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: load-ny-taxi-data - image: docker.stackable.tech/stackable/hadoop:3.4.0-stackable0.0.0-dev + image: docker.stackable.tech/stackable/hadoop:3.4.0-stackable24.11.1 # yamllint disable rule:line-length command: ["bash", "-c", "/stackable/hadoop/bin/hdfs dfs -mkdir -p /ny-taxi-data/raw \ && cd /tmp \ diff --git a/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml b/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml index 3416ed91..82dd92b8 100644 --- a/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-druid-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor"] volumeMounts: - name: ingestion-job-spec diff --git a/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml b/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml index 231d8818..0d2a1e95 100644 --- a/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml @@ -8,8 +8,8 @@ spec: spec: containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 - command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/IngestEarthquakesToKafka.xml && python -u /tmp/script/script.py"] + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 + command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-earthquake-data/IngestEarthquakesToKafka.xml && python -u /tmp/script/script.py"] volumeMounts: - name: script mountPath: /tmp/script diff --git a/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml b/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml index b52a2adf..b3a72a3e 100644 --- a/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml +++ b/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml @@ -8,8 +8,8 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 - command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/superset-assets.zip && python -u /tmp/script/script.py"] + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 + command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-earthquake-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script mountPath: /tmp/script diff --git a/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml b/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml index 3c2d6208..ce8a0a28 100644 --- a/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-druid-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/stations-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor && curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor && curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-compaction-job-spec.json https://druid-coordinator:8281/druid/coordinator/v1/config/compaction"] volumeMounts: - name: ingestion-job-spec diff --git a/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml b/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml index 6795a681..f46f64dc 100644 --- a/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml @@ -8,8 +8,8 @@ spec: spec: containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 - command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/IngestWaterLevelsToKafka.xml && python -u /tmp/script/script.py"] + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 + command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-water-level-data/IngestWaterLevelsToKafka.xml && python -u /tmp/script/script.py"] volumeMounts: - name: script mountPath: /tmp/script diff --git a/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml b/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml index 6cf44c53..a6195157 100644 --- a/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml +++ b/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml @@ -8,8 +8,8 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 - command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/superset-assets.zip && python -u /tmp/script/script.py"] + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 + command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/nifi-kafka-druid-water-level-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script mountPath: /tmp/script diff --git a/demos/signal-processing/Dockerfile-nifi b/demos/signal-processing/Dockerfile-nifi index db643c3f..0e89c9c7 100644 --- a/demos/signal-processing/Dockerfile-nifi +++ b/demos/signal-processing/Dockerfile-nifi @@ -1,3 +1,3 @@ -FROM docker.stackable.tech/stackable/nifi:1.27.0-stackable24.7.0 +FROM docker.stackable.tech/stackable/nifi:1.27.0-stackable24.11.1 RUN curl --fail -o /stackable/nifi/postgresql-42.6.0.jar "https://repo.stackable.tech/repository/misc/postgresql-timescaledb/postgresql-42.6.0.jar" diff --git a/demos/signal-processing/create-nifi-ingestion-job.yaml b/demos/signal-processing/create-nifi-ingestion-job.yaml index 51179a50..866023b0 100644 --- a/demos/signal-processing/create-nifi-ingestion-job.yaml +++ b/demos/signal-processing/create-nifi-ingestion-job.yaml @@ -9,15 +9,15 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-timescale-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for timescaleDB tables to be ready' && kubectl wait --for=condition=complete job/create-timescale-tables-job" ] containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "export PGPASSWORD=$(cat /timescale-admin-credentials/password) && \ - curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/DownloadAndWriteToDB.xml && \ + curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/signal-processing/DownloadAndWriteToDB.xml && \ sed -i \"s/PLACEHOLDERPGPASSWORD/$PGPASSWORD/g\" DownloadAndWriteToDB.xml && \ python -u /tmp/script/script.py"] volumeMounts: diff --git a/demos/signal-processing/create-timescale-tables.yaml b/demos/signal-processing/create-timescale-tables.yaml index 61089f34..d3e2afd9 100644 --- a/demos/signal-processing/create-timescale-tables.yaml +++ b/demos/signal-processing/create-timescale-tables.yaml @@ -9,7 +9,7 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-timescale - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for timescaleDB to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/name=postgresql-timescaledb" ] diff --git a/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml b/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml index 5dce76c3..a8854e4c 100644 --- a/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml +++ b/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml @@ -8,11 +8,11 @@ spec: spec: initContainers: - name: wait-for-testdata - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "echo 'Waiting for job load-ny-taxi-data to finish' && kubectl wait --for=condition=complete --timeout=30m job/load-ny-taxi-data"] containers: - name: create-spark-anomaly-detection-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "echo 'Submitting Spark job' && kubectl apply -f /tmp/manifest/spark-ad-job.yaml"] volumeMounts: - name: manifest diff --git a/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml b/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml index 36aba951..790a1f82 100644 --- a/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml +++ b/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml @@ -8,8 +8,8 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 - command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"] + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 + command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/spark-k8s-anomaly-detection-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script mountPath: /tmp/script diff --git a/demos/trino-taxi-data/create-table-in-trino.yaml b/demos/trino-taxi-data/create-table-in-trino.yaml index d45ce7d9..64dafcdb 100644 --- a/demos/trino-taxi-data/create-table-in-trino.yaml +++ b/demos/trino-taxi-data/create-table-in-trino.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-ny-taxi-data-table-in-trino - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["bash", "-c", "python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/trino-taxi-data/setup-superset.yaml b/demos/trino-taxi-data/setup-superset.yaml index 2c94efda..9a2933af 100644 --- a/demos/trino-taxi-data/setup-superset.yaml +++ b/demos/trino-taxi-data/setup-superset.yaml @@ -8,8 +8,8 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 - command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"] + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 + command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.11/demos/trino-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script mountPath: /tmp/script diff --git a/docs/antora.yml b/docs/antora.yml index 189f7d75..2d6b5aae 100644 --- a/docs/antora.yml +++ b/docs/antora.yml @@ -2,4 +2,4 @@ # Use 'home' here so that the versioning is picked up automatically based # on SDP releases name: home -version: "nightly" +version: "24.11" diff --git a/docs/modules/demos/pages/airflow-scheduled-job.adoc b/docs/modules/demos/pages/airflow-scheduled-job.adoc index 1ecf3d80..933de6f1 100644 --- a/docs/modules/demos/pages/airflow-scheduled-job.adoc +++ b/docs/modules/demos/pages/airflow-scheduled-job.adoc @@ -1,5 +1,4 @@ = airflow-scheduled-job -:page-aliases: stable@stackablectl::demos/airflow-scheduled-job.adoc :description: This demo installs Airflow with Postgres and Redis on Kubernetes, showcasing DAG scheduling, job runs, and status verification via the Airflow UI. Install this demo on an existing Kubernetes cluster: diff --git a/docs/modules/demos/pages/data-lakehouse-iceberg-trino-spark.adoc b/docs/modules/demos/pages/data-lakehouse-iceberg-trino-spark.adoc index f79dd363..96429e96 100644 --- a/docs/modules/demos/pages/data-lakehouse-iceberg-trino-spark.adoc +++ b/docs/modules/demos/pages/data-lakehouse-iceberg-trino-spark.adoc @@ -1,5 +1,4 @@ = data-lakehouse-iceberg-trino-spark -:page-aliases: stable@stackablectl::demos/data-lakehouse-iceberg-trino-spark.adoc :description: This demo shows a data workload with real-world data volumes using Trino, Kafka, Spark, NiFi, Superset and OPA. :demo-code: https://github.com/stackabletech/demos/blob/main/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml diff --git a/docs/modules/demos/pages/hbase-hdfs-load-cycling-data.adoc b/docs/modules/demos/pages/hbase-hdfs-load-cycling-data.adoc index 1d00ddd3..8421491c 100644 --- a/docs/modules/demos/pages/hbase-hdfs-load-cycling-data.adoc +++ b/docs/modules/demos/pages/hbase-hdfs-load-cycling-data.adoc @@ -1,5 +1,4 @@ = hbase-hdfs-cycling-data -:page-aliases: stable@stackablectl::demos/hbase-hdfs-load-cycling-data.adoc :description: Load cyclist data from HDFS to HBase on Kubernetes using Stackable's demo. Install, copy data, create HFiles, and query efficiently. :kaggle: https://www.kaggle.com/datasets/timgid/cyclistic-dataset-google-certificate-capstone?select=Divvy_Trips_2020_Q1.csv diff --git a/docs/modules/demos/pages/index.adoc b/docs/modules/demos/pages/index.adoc index 65ada08a..0e8b83cc 100644 --- a/docs/modules/demos/pages/index.adoc +++ b/docs/modules/demos/pages/index.adoc @@ -1,5 +1,4 @@ = Demos -:page-aliases: stable@stackablectl::demos/index.adoc :description: Explore Stackable demos showcasing data platform architectures. Includes external components for evaluation. The pages in this section guide you on how to use the demos provided by Stackable. diff --git a/docs/modules/demos/pages/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data.adoc b/docs/modules/demos/pages/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data.adoc index a732b82e..10f04d62 100644 --- a/docs/modules/demos/pages/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data.adoc +++ b/docs/modules/demos/pages/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data.adoc @@ -1,5 +1,4 @@ = jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data -:page-aliases: stable@stackablectl::demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data.adoc :scikit-lib: https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.IsolationForest.html :k8s-cpu: https://kubernetes.io/docs/tasks/debug/debug-cluster/resource-metrics-pipeline/#cpu diff --git a/docs/modules/demos/pages/logging.adoc b/docs/modules/demos/pages/logging.adoc index 0466d0eb..8d28082b 100644 --- a/docs/modules/demos/pages/logging.adoc +++ b/docs/modules/demos/pages/logging.adoc @@ -1,5 +1,4 @@ = logging -:page-aliases: stable@stackablectl::demos/logging.adoc :description: Deploy a logging stack with OpenSearch, Vector, and Zookeeper for log data analysis using OpenSearch Dashboards in Kubernetes. :k8s-cpu: https://kubernetes.io/docs/tasks/debug/debug-cluster/resource-metrics-pipeline/#cpu diff --git a/docs/modules/demos/pages/nifi-kafka-druid-earthquake-data.adoc b/docs/modules/demos/pages/nifi-kafka-druid-earthquake-data.adoc index dff15bf5..f49a40c8 100644 --- a/docs/modules/demos/pages/nifi-kafka-druid-earthquake-data.adoc +++ b/docs/modules/demos/pages/nifi-kafka-druid-earthquake-data.adoc @@ -1,5 +1,4 @@ = nifi-kafka-druid-earthquake-data -:page-aliases: stable@stackablectl::demos/nifi-kafka-druid-earthquake-data.adoc :description: Install this demo for a showcase of using Kafka, Druid and Superset to view the global earthquake distribution. :superset-docs: https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/#creating-charts-in-explore-view diff --git a/docs/modules/demos/pages/nifi-kafka-druid-water-level-data.adoc b/docs/modules/demos/pages/nifi-kafka-druid-water-level-data.adoc index 0d25daa6..090ea66c 100644 --- a/docs/modules/demos/pages/nifi-kafka-druid-water-level-data.adoc +++ b/docs/modules/demos/pages/nifi-kafka-druid-water-level-data.adoc @@ -1,5 +1,4 @@ = nifi-kafka-druid-water-level-data -:page-aliases: stable@stackablectl::demos/nifi-kafka-druid-water-level-data.adoc :description: Install this demo for a showcase of using Kafka, Druid and Superset to visualize water levels in across Germany. :superset: https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/#creating-charts-in-explore-view diff --git a/docs/modules/demos/pages/spark-k8s-anomaly-detection-taxi-data.adoc b/docs/modules/demos/pages/spark-k8s-anomaly-detection-taxi-data.adoc index 319c523e..fcd12656 100644 --- a/docs/modules/demos/pages/spark-k8s-anomaly-detection-taxi-data.adoc +++ b/docs/modules/demos/pages/spark-k8s-anomaly-detection-taxi-data.adoc @@ -1,5 +1,4 @@ = spark-k8s-anomaly-detection-taxi-data -:page-aliases: stable@stackablectl::demos/spark-k8s-anomaly-detection-taxi-data.adoc :description: Deploy a Kubernetes-based Spark demo for anomaly detection using the popular New York taxi dataset, featuring Trino, Spark, MinIO, and Superset. :scikit-lib: https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.IsolationForest.html diff --git a/docs/modules/demos/pages/trino-iceberg.adoc b/docs/modules/demos/pages/trino-iceberg.adoc index 34af036d..433e3a70 100644 --- a/docs/modules/demos/pages/trino-iceberg.adoc +++ b/docs/modules/demos/pages/trino-iceberg.adoc @@ -1,5 +1,4 @@ = trino-iceberg -:page-aliases: stable@stackablectl::demos/trino-iceberg.adoc :description: Install and explore Trino with Apache Iceberg for efficient SQL queries and scalable data management in a demo environment. :k8s-cpu: https://kubernetes.io/docs/tasks/debug/debug-cluster/resource-metrics-pipeline/#cpu diff --git a/docs/modules/demos/pages/trino-taxi-data.adoc b/docs/modules/demos/pages/trino-taxi-data.adoc index e8e9328f..541bb3ae 100644 --- a/docs/modules/demos/pages/trino-taxi-data.adoc +++ b/docs/modules/demos/pages/trino-taxi-data.adoc @@ -1,5 +1,4 @@ = trino-taxi-data -:page-aliases: stable@stackablectl::demos/trino-taxi-data.adoc :description: Install and demo Trino with NYC taxi data: Query with SQL, visualize with Superset, and explore data in MinIO and Trino on Kubernetes. :superset-docs: https://superset.apache.org/docs/creating-charts-dashboards/creating-your-first-dashboard#creating-charts-in-explore-view diff --git a/stacks/_templates/jupyterhub.yaml b/stacks/_templates/jupyterhub.yaml index fd4bbd81..05d56a3f 100644 --- a/stacks/_templates/jupyterhub.yaml +++ b/stacks/_templates/jupyterhub.yaml @@ -50,8 +50,8 @@ options: HADOOP_CONF_DIR: "/home/jovyan/hdfs" initContainers: - name: download-notebook - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 - command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb'] + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 + command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb'] volumeMounts: - mountPath: /notebook name: notebook diff --git a/stacks/_templates/keycloak.yaml b/stacks/_templates/keycloak.yaml index ecc9a9fb..3f7a9cb8 100644 --- a/stacks/_templates/keycloak.yaml +++ b/stacks/_templates/keycloak.yaml @@ -48,7 +48,7 @@ spec: - name: tls mountPath: /tls/ - name: create-auth-class - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: ["/bin/bash", "-c"] args: - | diff --git a/stacks/end-to-end-security/krb5.yaml b/stacks/end-to-end-security/krb5.yaml index 19657231..76c4635a 100644 --- a/stacks/end-to-end-security/krb5.yaml +++ b/stacks/end-to-end-security/krb5.yaml @@ -14,7 +14,7 @@ spec: spec: initContainers: - name: init - image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.7.0 + image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.1 args: - sh - -euo @@ -35,7 +35,7 @@ spec: name: data containers: - name: kdc - image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.7.0 + image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.1 args: - krb5kdc - -n @@ -48,7 +48,7 @@ spec: - mountPath: /var/kerberos/krb5kdc name: data - name: kadmind - image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.7.0 + image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.1 args: - kadmind - -nofork @@ -61,7 +61,7 @@ spec: - mountPath: /var/kerberos/krb5kdc name: data - name: client - image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.7.0 + image: docker.stackable.tech/stackable/krb5:1.21.1-stackable24.11.1 tty: true stdin: true env: diff --git a/stacks/end-to-end-security/superset.yaml b/stacks/end-to-end-security/superset.yaml index 0577245e..e1bb378f 100644 --- a/stacks/end-to-end-security/superset.yaml +++ b/stacks/end-to-end-security/superset.yaml @@ -25,13 +25,13 @@ spec: initContainers: # The postgres image does not contain curl or wget... - name: download-dump - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: - bash - -c - | cd /tmp - curl --fail -O https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/postgres_superset_dump.sql.gz + curl --fail -O https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/postgres_superset_dump.sql.gz gunzip postgres_superset_dump.sql.gz # We need to omit changing the users password, as otherwise the content in the Secrets does not match @@ -46,6 +46,11 @@ spec: - bash - -c - | + if psql --host postgresql-superset --user postgres --csv -c "SELECT datname FROM pg_database where datname = 'superset' limit 1" | grep -q superset; then + # The flask app will do any necesary migrations. + echo "Skip restoring the DB as it already exists" + exit 0 + fi psql --host postgresql-superset --user postgres < /dump/postgres_superset_dump.sql env: - name: PGPASSWORD diff --git a/stacks/keycloak-opa-poc/keycloak.yaml b/stacks/keycloak-opa-poc/keycloak.yaml index a6c2e225..3da32e87 100644 --- a/stacks/keycloak-opa-poc/keycloak.yaml +++ b/stacks/keycloak-opa-poc/keycloak.yaml @@ -70,7 +70,7 @@ spec: spec: containers: - name: propagate-keycloak-address - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 command: - bash - -x diff --git a/stacks/keycloak-opa-poc/setup-keycloak.yaml b/stacks/keycloak-opa-poc/setup-keycloak.yaml index f21d64a2..ab30c381 100644 --- a/stacks/keycloak-opa-poc/setup-keycloak.yaml +++ b/stacks/keycloak-opa-poc/setup-keycloak.yaml @@ -29,7 +29,7 @@ spec: spec: containers: - name: setup-keycloak - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 env: - name: KEYCLOAK_ADMIN_PASSWORD valueFrom: diff --git a/stacks/logging/setup-opensearch-dashboards.yaml b/stacks/logging/setup-opensearch-dashboards.yaml index c3b4330d..c3632f94 100644 --- a/stacks/logging/setup-opensearch-dashboards.yaml +++ b/stacks/logging/setup-opensearch-dashboards.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-opensearch-dashboards - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.11.1 env: - name: OPEN_SEARCH_ADMIN_PASSWORD valueFrom: diff --git a/stacks/signal-processing/jupyterhub.yaml b/stacks/signal-processing/jupyterhub.yaml index f26e5988..5dc54a7f 100644 --- a/stacks/signal-processing/jupyterhub.yaml +++ b/stacks/signal-processing/jupyterhub.yaml @@ -30,7 +30,7 @@ options: singleuser: cmd: null image: - # TODO (@NickLarsenNZ): Use a versioned image with stackable0.0.0-dev or stackableXX.X.X so that + # TODO (@NickLarsenNZ): Use a versioned image with stackable24.11.1 or stackableXX.X.X so that # the demo is reproducable for the release and it will be automatically replaced for the release branch. name: docker.stackable.tech/demos/jupyter-pyspark-with-alibi-detect tag: python-3.9 @@ -41,8 +41,8 @@ options: stackable.tech/vendor: Stackable initContainers: - name: download-notebook - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 - command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/signal-processing/tsdb.ipynb -o /notebook/tsdb.ipynb'] + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.11.1 + command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/signal-processing/tsdb.ipynb -o /notebook/tsdb.ipynb'] volumeMounts: - mountPath: /notebook name: notebook diff --git a/stacks/signal-processing/nifi.yaml b/stacks/signal-processing/nifi.yaml index 48d7c39d..3bb28b09 100644 --- a/stacks/signal-processing/nifi.yaml +++ b/stacks/signal-processing/nifi.yaml @@ -6,7 +6,7 @@ metadata: spec: image: productVersion: 1.27.0 - # TODO (@NickLarsenNZ): Use a versioned image with stackable0.0.0-dev or stackableXX.X.X so that + # TODO (@NickLarsenNZ): Use a versioned image with stackable24.11.1 or stackableXX.X.X so that # the demo is reproducable for the release and it will be automatically replaced for the release branch. custom: docker.stackable.tech/demos/nifi:1.27.0-postgresql clusterConfig: diff --git a/stacks/stacks-v2.yaml b/stacks/stacks-v2.yaml index 95f5ca29..21224a60 100644 --- a/stacks/stacks-v2.yaml +++ b/stacks/stacks-v2.yaml @@ -2,7 +2,7 @@ stacks: monitoring: description: Stack containing Prometheus and Grafana - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -11,9 +11,9 @@ stacks: - prometheus - grafana manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/monitoring/grafana-dashboards.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/prometheus.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/prometheus-service-monitor.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/monitoring/grafana-dashboards.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/prometheus.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/prometheus-service-monitor.yaml supportedNamespaces: [] resourceRequests: cpu: 1750m @@ -25,7 +25,7 @@ stacks: default: adminadmin logging: description: Stack containing OpenSearch, OpenSearch Dashboards (Kibana) and Vector aggregator - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -37,11 +37,11 @@ stacks: - opensearch-dashboards - vector manifests: - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/opensearch.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/opensearch-dashboards.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/logging/setup-opensearch-dashboards.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/vector-aggregator.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/vector-aggregator-discovery.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/opensearch.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/opensearch-dashboards.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/logging/setup-opensearch-dashboards.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/vector-aggregator.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/vector-aggregator-discovery.yaml supportedNamespaces: [] resourceRequests: cpu: 5150m @@ -60,7 +60,7 @@ stacks: observability: description: >- An observability stack with auto-injection of the opentelemetry-collector sidecar to receive traces/logs/metrics via OTLP, and send them to Jaeger/Tempo/Loki. - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -71,21 +71,21 @@ stacks: - observability - tracing manifests: - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/observability/jaeger.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/observability/opentelemetry-operator.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/observability/grafana-admin-credentials.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/observability/grafana.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/observability/grafana-tempo.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/observability/grafana-loki.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/observability/opentelemetry-collector-sidecar.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/observability/opentelemetry-collector-deployment.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/observability/jaeger.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/observability/opentelemetry-operator.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/observability/grafana-admin-credentials.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/observability/grafana.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/observability/grafana-tempo.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/observability/grafana-loki.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/observability/opentelemetry-collector-sidecar.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/observability/opentelemetry-collector-deployment.yaml parameters: - name: grafanaAdminPassword description: Password of the Grafana admin user default: adminadmin airflow: description: Stack containing Airflow scheduling platform - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -95,9 +95,9 @@ stacks: labels: - airflow manifests: - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-airflow.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/redis-airflow.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/airflow/airflow.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-airflow.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/redis-airflow.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/airflow/airflow.yaml supportedNamespaces: [] resourceRequests: cpu: 3400m @@ -112,7 +112,7 @@ stacks: default: airflowSecretKey data-lakehouse-iceberg-trino-spark: description: Data lakehouse using Iceberg lakehouse on S3, Trino as query engine, Spark for streaming ingest and Superset for data visualization - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -135,17 +135,17 @@ stacks: - minio - s3 manifests: - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/minio-distributed.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-hive.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-hive-iceberg.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-superset.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/data-lakehouse-iceberg-trino-spark/s3-connection.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/data-lakehouse-iceberg-trino-spark/hive-metastores.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/data-lakehouse-iceberg-trino-spark/trino.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/data-lakehouse-iceberg-trino-spark/zookeeper.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/data-lakehouse-iceberg-trino-spark/kafka.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/data-lakehouse-iceberg-trino-spark/nifi.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/nifi-kafka-druid-superset-s3/superset.yaml # Reuse + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/minio-distributed.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-hive.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-hive-iceberg.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/data-lakehouse-iceberg-trino-spark/s3-connection.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/data-lakehouse-iceberg-trino-spark/hive-metastores.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/data-lakehouse-iceberg-trino-spark/trino.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/data-lakehouse-iceberg-trino-spark/zookeeper.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/data-lakehouse-iceberg-trino-spark/kafka.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/data-lakehouse-iceberg-trino-spark/nifi.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/nifi-kafka-druid-superset-s3/superset.yaml # Reuse supportedNamespaces: [] resourceRequests: cpu: "71" @@ -169,7 +169,7 @@ stacks: default: supersetSecretKey hdfs-hbase: description: HBase cluster using HDFS as underlying storage - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -181,9 +181,9 @@ stacks: - hbase - hdfs manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/hdfs-hbase/zookeeper.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/hdfs-hbase/hdfs.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/hdfs-hbase/hbase.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/hdfs-hbase/zookeeper.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/hdfs-hbase/hdfs.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/hdfs-hbase/hbase.yaml supportedNamespaces: [] resourceRequests: cpu: 4200m @@ -192,7 +192,7 @@ stacks: parameters: [] nifi-kafka-druid-superset-s3: description: Stack containing NiFi, Kafka, Druid, MinIO and Superset for data visualization - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -210,14 +210,14 @@ stacks: - minio - s3 manifests: - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/minio.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-druid.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-superset.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/nifi-kafka-druid-superset-s3/zookeeper.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/nifi-kafka-druid-superset-s3/kafka.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/nifi-kafka-druid-superset-s3/druid.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/nifi-kafka-druid-superset-s3/superset.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/nifi-kafka-druid-superset-s3/nifi.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/minio.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-druid.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/nifi-kafka-druid-superset-s3/zookeeper.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/nifi-kafka-druid-superset-s3/kafka.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/nifi-kafka-druid-superset-s3/druid.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/nifi-kafka-druid-superset-s3/superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/nifi-kafka-druid-superset-s3/nifi.yaml supportedNamespaces: [] resourceRequests: cpu: 8900m @@ -238,7 +238,7 @@ stacks: default: adminadmin spark-trino-superset-s3: description: Stack containing MinIO, Trino and Superset for data visualization - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -254,15 +254,15 @@ stacks: - minio - s3 manifests: - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/minio.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-hive.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-hive-iceberg.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-superset.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/trino-superset-s3/s3-connection.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/spark-trino-superset-s3/hive-metastore.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/trino-superset-s3/trino.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/spark-trino-superset-s3/trino-prediction-catalog.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/trino-superset-s3/superset.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/minio.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-hive.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-hive-iceberg.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/trino-superset-s3/s3-connection.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/spark-trino-superset-s3/hive-metastore.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/trino-superset-s3/trino.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/spark-trino-superset-s3/trino-prediction-catalog.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/trino-superset-s3/superset.yaml supportedNamespaces: [] resourceRequests: cpu: 7100m @@ -283,7 +283,7 @@ stacks: default: supersetSecretKey trino-superset-s3: description: Stack containing MinIO, Trino and Superset for data visualization - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -298,13 +298,13 @@ stacks: - minio - s3 manifests: - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/minio.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-hive.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-superset.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/trino-superset-s3/s3-connection.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/trino-superset-s3/hive-metastore.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/trino-superset-s3/trino.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/trino-superset-s3/superset.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/minio.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-hive.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/trino-superset-s3/s3-connection.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/trino-superset-s3/hive-metastore.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/trino-superset-s3/trino.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/trino-superset-s3/superset.yaml supportedNamespaces: [] resourceRequests: cpu: 6800m @@ -325,7 +325,7 @@ stacks: default: supersetSecretKey trino-iceberg: description: Stack containing Trino using Apache Iceberg as a S3 data lakehouse - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -340,11 +340,11 @@ stacks: - minio - s3 manifests: - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/minio-distributed-small.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-hive-iceberg.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/trino-iceberg/s3-connection.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/trino-iceberg/hive-metastores.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/trino-iceberg/trino.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/minio-distributed-small.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-hive-iceberg.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/trino-iceberg/s3-connection.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/trino-iceberg/hive-metastores.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/trino-iceberg/trino.yaml supportedNamespaces: [] resourceRequests: cpu: 6000m # Measured 5600m @@ -359,7 +359,7 @@ stacks: default: adminadmin jupyterhub-pyspark-hdfs: description: Jupyterhub with PySpark and HDFS integration - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -372,12 +372,12 @@ stacks: - hdfs - pyspark manifests: - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/jupyterhub.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/zookeeper.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/hdfs.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/serviceaccount.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/jupyterhub.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/jupyterhub-pyspark-hdfs/zookeeper.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/jupyterhub-pyspark-hdfs/hdfs.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/jupyterhub-pyspark-hdfs/serviceaccount.yaml # TODO Use patched JHub that created service for us from customer setup (ask Sebastian) - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/spark_driver_service.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/jupyterhub-pyspark-hdfs/spark_driver_service.yaml supportedNamespaces: [] resourceRequests: cpu: 3350m @@ -389,7 +389,7 @@ stacks: default: adminadmin dual-hive-hdfs-s3: description: Dual stack Hive on HDFS and S3 for Hadoop/Hive to Trino migration - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -405,12 +405,12 @@ stacks: - hdfs - s3 manifests: - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-hivehdfs.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-hives3.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/minio.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/dual-hive-hdfs-s3/hdfs.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/dual-hive-hdfs-s3/hive.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/dual-hive-hdfs-s3/trino.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-hivehdfs.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-hives3.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/minio.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/dual-hive-hdfs-s3/hdfs.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/dual-hive-hdfs-s3/hive.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/dual-hive-hdfs-s3/trino.yaml supportedNamespaces: [] resourceRequests: cpu: 7750m @@ -426,7 +426,7 @@ stacks: The bind user credentials are: ldapadmin:ldapadminpassword. No AuthenticationClass is configured, The AuthenticationClass is created manually in the tutorial. Use the 'openldap' Stack for an OpenLDAD with an AuthenticationClass already installed. - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -435,7 +435,7 @@ stacks: - authentication - ldap manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/authentication/openldap-tls.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/authentication/openldap-tls.yaml supportedNamespaces: ["default"] resourceRequests: cpu: 1950m @@ -449,7 +449,7 @@ stacks: The bind user credentials are: ldapadmin:ldapadminpassword. The LDAP AuthenticationClass is called 'ldap' and the SecretClass for the bind credentials is called 'ldap-bind-credentials'. The stack already creates an appropriate Secret, so referring to the 'ldap' AuthenticationClass in your ProductCluster should be enough. - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -458,8 +458,8 @@ stacks: - authentication - ldap manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/authentication/openldap-tls.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/authentication/openldap-tls-authenticationclass.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/authentication/openldap-tls.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/authentication/openldap-tls-authenticationclass.yaml supportedNamespaces: [] resourceRequests: cpu: 1950m @@ -475,7 +475,7 @@ stacks: 3 users are created in Keycloak: admin:adminadmin, alice:alicealice, bob:bobbob. admin and alice are admins with full authorization in Druid and Trino, bob is not authorized. This is a proof-of-concept and the mechanisms used here are subject to change. - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -490,18 +490,18 @@ stacks: - authentication - sso manifests: - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-superset.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-druid.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/keycloak-opa-poc/serviceaccount.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/keycloak-opa-poc/keycloak.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/keycloak-opa-poc/setup-keycloak.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/keycloak-opa-poc/opa.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/keycloak-opa-poc/policies.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/keycloak-opa-poc/zookeeper.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/keycloak-opa-poc/hdfs.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/keycloak-opa-poc/druid.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/keycloak-opa-poc/trino.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/keycloak-opa-poc/superset.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-superset.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-druid.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/keycloak-opa-poc/serviceaccount.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/keycloak-opa-poc/keycloak.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/keycloak-opa-poc/setup-keycloak.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/keycloak-opa-poc/opa.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/keycloak-opa-poc/policies.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/keycloak-opa-poc/zookeeper.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/keycloak-opa-poc/hdfs.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/keycloak-opa-poc/druid.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/keycloak-opa-poc/trino.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/keycloak-opa-poc/superset.yaml supportedNamespaces: ["default"] # ClusterRoleBinding needs explicit namespace resourceRequests: cpu: 7850m @@ -541,7 +541,7 @@ stacks: Note that this stack is tightly coupled with the demo. So if you install the stack you will get demo-specific parts (such as Keycloak users or regorules). - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -567,22 +567,22 @@ stacks: memory: 19586Mi pvc: 40Gi manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/keycloak-serviceaccount.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/keycloak-realm-config.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/keycloak.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-hive-iceberg.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-superset.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/krb5.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/kerberos-secretclass.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/opa.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/zookeeper.yaml # TODO: Add authentication - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/hdfs.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/hdfs-regorules.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/hive-metastore.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/trino.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/trino-regorules.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/trino-policies.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/keycloak-serviceaccount.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/keycloak-realm-config.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/keycloak.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-hive-iceberg.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-superset.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/krb5.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/kerberos-secretclass.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/opa.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/zookeeper.yaml # TODO: Add authentication + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/hdfs.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/hdfs-regorules.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/hive-metastore.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/trino.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/trino-regorules.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/trino-policies.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/end-to-end-security/superset.yaml parameters: - name: keycloakAdminPassword description: Password of the Keycloak admin user @@ -611,7 +611,7 @@ stacks: signal-processing: description: >- A stack used for creating, streaming and processing in-flight data and persisting it to TimescaleDB before it is displayed in Grafana - stackableRelease: dev + stackableRelease: 24.11 stackableOperators: - commons - listener @@ -624,15 +624,15 @@ stacks: - jupyterhub - grafana manifests: - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/signal-processing/secrets.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/signal-processing/grafana-dashboards.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/signal-processing/grafana.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/_templates/postgresql-timescaledb.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/nifi-kafka-druid-superset-s3/zookeeper.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/signal-processing/nifi.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/serviceaccount.yaml - - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/spark_driver_service.yaml - - helmChart: https://raw.githubusercontent.com/stackabletech/demos/main/stacks/signal-processing/jupyterhub.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/signal-processing/secrets.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/signal-processing/grafana-dashboards.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/signal-processing/grafana.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/_templates/postgresql-timescaledb.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/nifi-kafka-druid-superset-s3/zookeeper.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/signal-processing/nifi.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/jupyterhub-pyspark-hdfs/serviceaccount.yaml + - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/jupyterhub-pyspark-hdfs/spark_driver_service.yaml + - helmChart: https://raw.githubusercontent.com/stackabletech/demos/release-24.11/stacks/signal-processing/jupyterhub.yaml parameters: - name: nifiAdminPassword description: Password of the NiFI admin user