diff --git a/.github/workflows/dev_nifi.yaml b/.github/workflows/dev_nifi.yaml
index 5e456f55..7384a735 100644
--- a/.github/workflows/dev_nifi.yaml
+++ b/.github/workflows/dev_nifi.yaml
@@ -5,7 +5,7 @@ env:
IMAGE_NAME: nifi
# TODO (@NickLarsenNZ): Use a versioned image with stackable0.0.0-dev or stackableXX.X.X so that
# the demo is reproducable for the release and it will be automatically replaced for the release branch.
- IMAGE_VERSION: 1.27.0-postgresql
+ IMAGE_VERSION: 1.28.1-postgresql
REGISTRY_PATH: stackable
DOCKERFILE_PATH: "demos/signal-processing/Dockerfile-nifi"
diff --git a/demos/demos-v2.yaml b/demos/demos-v2.yaml
index a31ff1ea..f8c92aed 100644
--- a/demos/demos-v2.yaml
+++ b/demos/demos-v2.yaml
@@ -64,6 +64,7 @@ demos:
- s3
- earthquakes
manifests:
+ - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml
@@ -85,6 +86,7 @@ demos:
- s3
- water-levels
manifests:
+ - plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml
diff --git a/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml b/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml
index 421ba348..2c25c32c 100644
--- a/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml
+++ b/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml
@@ -6,10 +6,30 @@ metadata:
spec:
template:
spec:
+ serviceAccountName: demo-serviceaccount
+ initContainers:
+ - name: wait-for-druid-coordinator
+ image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ echo 'Waiting for Druid Coordinator to be created'
+ kubectl wait --for=create pod/druid-coordinator-default-0
+ echo 'Waiting for Druid Coordinator to be ready'
+ kubectl wait --for=condition=Ready pod/druid-coordinator-default-0 --timeout=30m
containers:
- name: create-druid-ingestion-job
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
- command: ["bash", "-c", "curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor"]
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor
volumeMounts:
- name: ingestion-job-spec
mountPath: /tmp/ingestion-job-spec
diff --git a/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml b/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml
index 19bb7675..efddab0c 100644
--- a/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml
+++ b/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml
@@ -6,10 +6,31 @@ metadata:
spec:
template:
spec:
+ serviceAccountName: demo-serviceaccount
+ initContainers:
+ - name: wait-for-nifi
+ image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ echo 'Waiting for NiFi to be created'
+ kubectl wait --for=create pod/nifi-node-default-0 --timeout=30m
+ echo 'Waiting for NiFi to be ready'
+ kubectl wait --for=condition=Ready pod/nifi-node-default-0 --timeout=30m
containers:
- name: create-nifi-ingestion-job
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
- command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/IngestEarthquakesToKafka.xml && python -u /tmp/script/script.py"]
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/IngestEarthquakesToKafka.xml
+ python -u /tmp/script/script.py
volumeMounts:
- name: script
mountPath: /tmp/script
diff --git a/demos/nifi-kafka-druid-earthquake-data/serviceaccount.yaml b/demos/nifi-kafka-druid-earthquake-data/serviceaccount.yaml
new file mode 100644
index 00000000..8a2d0b19
--- /dev/null
+++ b/demos/nifi-kafka-druid-earthquake-data/serviceaccount.yaml
@@ -0,0 +1,47 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: demo-serviceaccount
+ namespace: default
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRoleBinding
+metadata:
+ name: demo-clusterrolebinding
+subjects:
+ - kind: ServiceAccount
+ name: demo-serviceaccount
+ namespace: default
+roleRef:
+ kind: ClusterRole
+ name: demo-clusterrole
+ apiGroup: rbac.authorization.k8s.io
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRole
+metadata:
+ name: demo-clusterrole
+rules:
+ - apiGroups:
+ - ""
+ resources:
+ - pods
+ verbs:
+ - get
+ - list
+ - watch
+ - apiGroups:
+ - batch
+ resources:
+ - jobs
+ verbs:
+ - get
+ - list
+ - watch
+ - apiGroups:
+ - ""
+ resources:
+ - pods/exec
+ verbs:
+ - create
diff --git a/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml b/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml
index 15ee14c9..9be84027 100644
--- a/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml
+++ b/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml
@@ -6,10 +6,31 @@ metadata:
spec:
template:
spec:
+ serviceAccountName: demo-serviceaccount
+ initContainers:
+ - name: wait-for-superset
+ image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ echo 'Waiting for Superset to be created'
+ kubectl wait --for=create pod/superset-node-default-0 --timeout=30m
+ echo 'Waiting for Superset to be ready'
+ kubectl wait --for=condition=Ready pod/superset-node-default-0 --timeout=30m
containers:
- name: setup-superset
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
- command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/superset-assets.zip && python -u /tmp/script/script.py"]
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/superset-assets.zip
+ python -u /tmp/script/script.py
volumeMounts:
- name: script
mountPath: /tmp/script
diff --git a/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml b/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml
index 01570f29..3ce637dc 100644
--- a/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml
+++ b/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml
@@ -6,10 +6,32 @@ metadata:
spec:
template:
spec:
+ serviceAccountName: demo-serviceaccount
+ initContainers:
+ - name: wait-for-druid-coordinator
+ image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ echo 'Waiting for Druid Coordinator to be created'
+ kubectl wait --for=create pod/druid-coordinator-default-0
+ echo 'Waiting for Druid Coordinator to be ready'
+ kubectl wait --for=condition=Ready pod/druid-coordinator-default-0 --timeout=30m
containers:
- name: create-druid-ingestion-job
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
- command: ["bash", "-c", "curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/stations-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor && curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor && curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-compaction-job-spec.json https://druid-coordinator:8281/druid/coordinator/v1/config/compaction"]
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/stations-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor
+ curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor
+ curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-compaction-job-spec.json https://druid-coordinator:8281/druid/coordinator/v1/config/compaction
volumeMounts:
- name: ingestion-job-spec
mountPath: /tmp/ingestion-job-spec
diff --git a/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml b/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml
index 7dcf70ed..edd8fac2 100644
--- a/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml
+++ b/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml
@@ -6,10 +6,31 @@ metadata:
spec:
template:
spec:
+ serviceAccountName: demo-serviceaccount
+ initContainers:
+ - name: wait-for-nifi
+ image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ echo 'Waiting for NiFi to be created'
+ kubectl wait --for=create pod/nifi-node-default-0 --timeout=30m
+ echo 'Waiting for NiFi to be ready'
+ kubectl wait --for=condition=Ready pod/nifi-node-default-0 --timeout=30m
containers:
- name: create-nifi-ingestion-job
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
- command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/IngestWaterLevelsToKafka.xml && python -u /tmp/script/script.py"]
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/IngestWaterLevelsToKafka.xml
+ python -u /tmp/script/script.py
volumeMounts:
- name: script
mountPath: /tmp/script
diff --git a/demos/nifi-kafka-druid-water-level-data/serviceaccount.yaml b/demos/nifi-kafka-druid-water-level-data/serviceaccount.yaml
new file mode 100644
index 00000000..8a2d0b19
--- /dev/null
+++ b/demos/nifi-kafka-druid-water-level-data/serviceaccount.yaml
@@ -0,0 +1,47 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: demo-serviceaccount
+ namespace: default
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRoleBinding
+metadata:
+ name: demo-clusterrolebinding
+subjects:
+ - kind: ServiceAccount
+ name: demo-serviceaccount
+ namespace: default
+roleRef:
+ kind: ClusterRole
+ name: demo-clusterrole
+ apiGroup: rbac.authorization.k8s.io
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRole
+metadata:
+ name: demo-clusterrole
+rules:
+ - apiGroups:
+ - ""
+ resources:
+ - pods
+ verbs:
+ - get
+ - list
+ - watch
+ - apiGroups:
+ - batch
+ resources:
+ - jobs
+ verbs:
+ - get
+ - list
+ - watch
+ - apiGroups:
+ - ""
+ resources:
+ - pods/exec
+ verbs:
+ - create
diff --git a/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml b/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml
index fad556f2..0d85b0b0 100644
--- a/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml
+++ b/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml
@@ -6,10 +6,31 @@ metadata:
spec:
template:
spec:
+ serviceAccountName: demo-serviceaccount
+ initContainers:
+ - name: wait-for-superset
+ image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ echo 'Waiting for Superset to be created'
+ kubectl wait --for=create pod/superset-node-default-0 --timeout=30m
+ echo 'Waiting for Superset to be ready'
+ kubectl wait --for=condition=Ready pod/superset-node-default-0 --timeout=30m
containers:
- name: setup-superset
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
- command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/superset-assets.zip && python -u /tmp/script/script.py"]
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/superset-assets.zip
+ python -u /tmp/script/script.py
volumeMounts:
- name: script
mountPath: /tmp/script
diff --git a/demos/signal-processing/Dockerfile-nifi b/demos/signal-processing/Dockerfile-nifi
index b53549da..faba9af2 100644
--- a/demos/signal-processing/Dockerfile-nifi
+++ b/demos/signal-processing/Dockerfile-nifi
@@ -1,3 +1,13 @@
-FROM oci.stackable.tech/sdp/nifi:1.27.0-stackable0.0.0-dev
+FROM oci.stackable.tech/sdp/nifi:1.28.1-stackable0.0.0-dev
-RUN curl --fail -o /stackable/nifi/postgresql-42.6.0.jar "https://repo.stackable.tech/repository/misc/postgresql-timescaledb/postgresql-42.6.0.jar"
+# This is the postgresql JDBC driver from https://jdbc.postgresql.org/download/
+# There appear to be no signatures to validate against 😬
+#
+# VERSION="42.7.5"
+# curl -O "https://jdbc.postgresql.org/download/postgresql-$VERSION.jar"
+# curl --fail -u "your_username" --upload-file "postgresql-$VERSION.jar" 'https://repo.stackable.tech/repository/misc/jdbc/'
+# rm "postgresql-$VERSION.jar"
+
+# IMPORTANT (@NickLarsenNZ): Changing this version requires a change in the NiFi template (DownloadAndWriteToDB.xml)
+ARG DRIVER_VERSION="42.7.5"
+RUN curl --fail -o "/stackable/nifi/postgresql-$DRIVER_VERSION.jar" "https://repo.stackable.tech/repository/misc/jdbc/postgresql-$DRIVER_VERSION.jar"
diff --git a/demos/signal-processing/DownloadAndWriteToDB.xml b/demos/signal-processing/DownloadAndWriteToDB.xml
index 28fcb414..f4ddccd6 100644
--- a/demos/signal-processing/DownloadAndWriteToDB.xml
+++ b/demos/signal-processing/DownloadAndWriteToDB.xml
@@ -189,7 +189,7 @@
database-driver-locations
- /stackable/nifi/postgresql-42.6.0.jar
+ /stackable/nifi/postgresql-42.7.5.jar
kerberos-user-service
diff --git a/demos/signal-processing/create-nifi-ingestion-job.yaml b/demos/signal-processing/create-nifi-ingestion-job.yaml
index 4b44a343..8f9790fd 100644
--- a/demos/signal-processing/create-nifi-ingestion-job.yaml
+++ b/demos/signal-processing/create-nifi-ingestion-job.yaml
@@ -8,18 +8,37 @@ spec:
spec:
serviceAccountName: demo-serviceaccount
initContainers:
- - name: wait-for-timescale-job
+ - name: wait-for-timescale-job-and-nifi
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
- command: ["bash", "-c", "echo 'Waiting for timescaleDB tables to be ready'
- && kubectl wait --for=condition=complete job/create-timescale-tables-job"
- ]
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ echo 'Waiting for timescaleDB job to be created'
+ until kubectl get job create-timescale-tables-job 2>/dev/null; do sleep 5; done
+ kubectl wait --for=create job/create-timescale-tables-job --timeout=30m
+ echo 'Waiting for timescaleDB job to be completed'
+ kubectl wait --for=condition=complete job/create-timescale-tables-job --timeout=30m
+
+ echo 'Waiting for NiFi to be created'
+ kubectl wait --for=create pod/nifi-node-default-0 --timeout=30m
+ echo 'Waiting for NiFi to be ready'
+ kubectl wait --for=condition=Ready pod/nifi-node-default-0 --timeout=30m
containers:
- name: create-nifi-ingestion-job
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
- command: ["bash", "-c", "export PGPASSWORD=$(cat /timescale-admin-credentials/password) && \
- curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/DownloadAndWriteToDB.xml && \
- sed -i \"s/PLACEHOLDERPGPASSWORD/$PGPASSWORD/g\" DownloadAndWriteToDB.xml && \
- python -u /tmp/script/script.py"]
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ export PGPASSWORD=$(cat /timescale-admin-credentials/password)
+ curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/DownloadAndWriteToDB.xml
+ sed -i "s/PLACEHOLDERPGPASSWORD/$PGPASSWORD/g" DownloadAndWriteToDB.xml
+ python -u /tmp/script/script.py
volumeMounts:
- name: script
mountPath: /tmp/script
@@ -70,11 +89,12 @@ data:
nipyapi.config.nifi_config.host = f"{ENDPOINT}/nifi-api"
nipyapi.config.nifi_config.verify_ssl = False
- print("Logging in")
+ print(f"Logging in as {USERNAME}")
service_login(username=USERNAME, password=PASSWORD)
print("Logged in")
pg_id = get_root_pg_id()
+ print(f"pgid={pg_id}")
upload_template(pg_id, TEMPLATE_FILE)
diff --git a/demos/signal-processing/create-timescale-tables.yaml b/demos/signal-processing/create-timescale-tables.yaml
index 853ea310..efa8c7d6 100644
--- a/demos/signal-processing/create-timescale-tables.yaml
+++ b/demos/signal-processing/create-timescale-tables.yaml
@@ -10,17 +10,40 @@ spec:
initContainers:
- name: wait-for-timescale
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
- command: ["bash", "-c", "echo 'Waiting for timescaleDB to be ready'
- && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/name=postgresql-timescaledb"
- ]
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ echo 'Waiting for timescaleDB to be created'
+ kubectl wait --for=create pod/postgresql-timescaledb-0 --timeout=30m
+
+ echo 'Waiting for timescaleDB to be ready'
+ kubectl wait --for=condition=ready pod/postgresql-timescaledb-0 --timeout=30m
containers:
- name: create-timescale-tables-job
image: postgres
- command: ["bash", "-c", "export PGPASSWORD=$(cat /timescale-admin-credentials/password) && \
- echo 'Submitting DDL...' && \
- psql -U admin -h postgresql-timescaledb.default.svc.cluster.local postgres -c '\\x' -c 'CREATE DATABASE tsdb' -c '\\c tsdb' -f /tmp/sql/timescaledb.sql -c 'select count(*) from conditions' -c '\\q' && \
- echo 'Creating extension as superuser...' && \
- psql -U postgres -h postgresql-timescaledb.default.svc.cluster.local postgres -c '\\x' -c '\\c tsdb' -c 'CREATE EXTENSION timescaledb_toolkit' -c '\\q'"]
+ command:
+ - bash
+ - -euo
+ - pipefail
+ - -c
+ - |
+ export PGPASSWORD=$(cat /timescale-admin-credentials/password)
+ echo 'Submitting DDL...'
+ psql -U admin -h postgresql-timescaledb.default.svc.cluster.local postgres \
+ -c '\x' -c 'CREATE DATABASE tsdb' \
+ -c '\c tsdb' \
+ -f /tmp/sql/timescaledb.sql \
+ -c 'select count(*) from conditions' \
+ -c '\q'
+ echo 'Creating extension as superuser...'
+ psql -U postgres -h postgresql-timescaledb.default.svc.cluster.local postgres \
+ -c '\x' \
+ -c '\c tsdb' \
+ -c 'CREATE EXTENSION timescaledb_toolkit' \
+ -c '\q'
volumeMounts:
- name: script
mountPath: /tmp/sql
diff --git a/docs/modules/demos/pages/signal-processing.adoc b/docs/modules/demos/pages/signal-processing.adoc
index 4e1ac075..256b76f6 100644
--- a/docs/modules/demos/pages/signal-processing.adoc
+++ b/docs/modules/demos/pages/signal-processing.adoc
@@ -138,14 +138,14 @@ There are two located in the "Stackable Data Platform" folder.
=== Measurements
-This is the original data. The first graph plots two measurments (`r1`, `r2`), together with the model scores (`r1_score`, `r2_score`, `r1_score_lttb`).#
+The _Gas measurements_ dashboard shows the original data. The first graph plots two measurments (`r1`, `r2`), together with the model scores (`r1_score`, `r2_score`, `r1_score_lttb`).#
These are superimposed on each other for ease of comparison.
image::signal-processing/measurements.png[]
=== Predictions
-In this second dashboard the predictions for all r-values are plotted: the top graph takes an average across all measurements, with a threshold marked as a red line across the top.
+The _Spectral Residuals_ dashboard shows the predictions for all r-values are plotted: the top graph takes an average across all measurements, with a threshold marked as a red line across the top.
This can be used for triggering email alerts.
Underneath the individual r-values are plotted, firstly as raw data and then the same using downsampling.
Downsampling uses a built-in Timescale extension to significantly reduce the number of data plotted while retaining the same overall shape.
diff --git a/stacks/data-lakehouse-iceberg-trino-spark/nifi.yaml b/stacks/data-lakehouse-iceberg-trino-spark/nifi.yaml
index de3f4d99..1ea8a69d 100644
--- a/stacks/data-lakehouse-iceberg-trino-spark/nifi.yaml
+++ b/stacks/data-lakehouse-iceberg-trino-spark/nifi.yaml
@@ -5,7 +5,7 @@ metadata:
name: nifi
spec:
image:
- productVersion: 1.27.0
+ productVersion: 1.28.1
clusterConfig:
authentication:
- authenticationClass: nifi-admin-credentials
diff --git a/stacks/nifi-kafka-druid-superset-s3/nifi.yaml b/stacks/nifi-kafka-druid-superset-s3/nifi.yaml
index 8105be96..3736dd8e 100644
--- a/stacks/nifi-kafka-druid-superset-s3/nifi.yaml
+++ b/stacks/nifi-kafka-druid-superset-s3/nifi.yaml
@@ -5,7 +5,7 @@ metadata:
name: nifi
spec:
image:
- productVersion: 1.27.0
+ productVersion: 1.28.1
clusterConfig:
authentication:
- authenticationClass: nifi-admin-credentials
diff --git a/stacks/signal-processing/nifi.yaml b/stacks/signal-processing/nifi.yaml
index 7f3d3964..3af838e7 100644
--- a/stacks/signal-processing/nifi.yaml
+++ b/stacks/signal-processing/nifi.yaml
@@ -5,10 +5,12 @@ metadata:
name: nifi
spec:
image:
- productVersion: 1.27.0
+ productVersion: 1.28.1
# TODO (@NickLarsenNZ): Use a versioned image with stackable0.0.0-dev or stackableXX.X.X so that
# the demo is reproducable for the release and it will be automatically replaced for the release branch.
- custom: oci.stackable.tech/stackable/nifi:1.27.0-postgresql
+ # custom: oci.stackable.tech/stackable/nifi:2.2.0-postgresql
+ custom: oci.stackable.tech/stackable/nifi:1.28.1-postgresql
+ # pullPolicy: IfNotPresent
clusterConfig:
listenerClass: external-unstable
zookeeperConfigMapName: nifi-znode