Skip to content

Commit 508ee83

Browse files
authored
Merge pull request #74 from stackabletech/fix/release-24.7-refs
fix: use release-24.7 branch references
2 parents 34b515b + 4f4ed31 commit 508ee83

File tree

14 files changed

+158
-158
lines changed

14 files changed

+158
-158
lines changed

demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ spec:
1414
containers:
1515
- name: create-nifi-ingestion-job
1616
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0
17-
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/LakehouseKafkaIngest.xml && python -u /tmp/script/script.py"]
17+
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/data-lakehouse-iceberg-trino-spark/LakehouseKafkaIngest.xml && python -u /tmp/script/script.py"]
1818
volumeMounts:
1919
- name: script
2020
mountPath: /tmp/script

demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: setup-superset
1111
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0
12-
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/superset-assets.zip && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/data-lakehouse-iceberg-trino-spark/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

demos/demos-v2.yaml

+33-33
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,10 @@ demos:
77
- airflow
88
- job-scheduling
99
manifests:
10-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/01-airflow-spark-clusterrole.yaml
11-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/02-airflow-spark-clusterrolebinding.yaml
12-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml
13-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml
10+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/airflow-scheduled-job/01-airflow-spark-clusterrole.yaml
11+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/airflow-scheduled-job/02-airflow-spark-clusterrolebinding.yaml
12+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml
13+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml
1414
supportedNamespaces: []
1515
resourceRequests:
1616
cpu: 2401m
@@ -24,8 +24,8 @@ demos:
2424
- hdfs
2525
- cycling-tripdata
2626
manifests:
27-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml
28-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml
27+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml
28+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml
2929
supportedNamespaces: []
3030
resourceRequests:
3131
cpu: "3"
@@ -43,9 +43,9 @@ demos:
4343
- opa
4444
- keycloak
4545
manifests:
46-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/end-to-end-security/create-trino-tables.yaml
47-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/end-to-end-security/serviceaccount.yaml
48-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/end-to-end-security/create-spark-report.yaml
46+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/end-to-end-security/create-trino-tables.yaml
47+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/end-to-end-security/serviceaccount.yaml
48+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/end-to-end-security/create-spark-report.yaml
4949
supportedNamespaces: []
5050
resourceRequests:
5151
cpu: 9000m
@@ -64,9 +64,9 @@ demos:
6464
- s3
6565
- earthquakes
6666
manifests:
67-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml
68-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml
69-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml
67+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml
68+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml
69+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml
7070
supportedNamespaces: ["default"]
7171
resourceRequests:
7272
cpu: 8700m
@@ -85,9 +85,9 @@ demos:
8585
- s3
8686
- water-levels
8787
manifests:
88-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml
89-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml
90-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml
88+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml
89+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml
90+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml
9191
supportedNamespaces: ["default"]
9292
resourceRequests:
9393
cpu: 8900m
@@ -104,10 +104,10 @@ demos:
104104
- s3
105105
- ny-taxi-data
106106
manifests:
107-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/serviceaccount.yaml
108-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml
109-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml
110-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml
107+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/spark-k8s-anomaly-detection-taxi-data/serviceaccount.yaml
108+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml
109+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml
110+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml
111111
supportedNamespaces: []
112112
resourceRequests:
113113
cpu: 6400m
@@ -139,9 +139,9 @@ demos:
139139
- s3
140140
- ny-taxi-data
141141
manifests:
142-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/load-test-data.yaml
143-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/create-table-in-trino.yaml
144-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/setup-superset.yaml
142+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/trino-taxi-data/load-test-data.yaml
143+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/trino-taxi-data/create-table-in-trino.yaml
144+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/trino-taxi-data/setup-superset.yaml
145145
supportedNamespaces: []
146146
resourceRequests:
147147
cpu: 6800m
@@ -164,12 +164,12 @@ demos:
164164
- water-levels
165165
- earthquakes
166166
manifests:
167-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/serviceaccount.yaml
168-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/load-test-data.yaml
169-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml
170-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml
171-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml
172-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml
167+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/data-lakehouse-iceberg-trino-spark/serviceaccount.yaml
168+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/data-lakehouse-iceberg-trino-spark/load-test-data.yaml
169+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml
170+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml
171+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml
172+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml
173173
supportedNamespaces: ["default"]
174174
resourceRequests:
175175
cpu: "80"
@@ -185,7 +185,7 @@ demos:
185185
- pyspark
186186
- ny-taxi-data
187187
manifests:
188-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml
188+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml
189189
supportedNamespaces: []
190190
resourceRequests:
191191
cpu: 3350m
@@ -202,7 +202,7 @@ demos:
202202
- vector
203203
- zookeeper
204204
manifests:
205-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/logging/zookeeper.yaml
205+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/logging/zookeeper.yaml
206206
supportedNamespaces: []
207207
resourceRequests:
208208
cpu: 6500m
@@ -218,9 +218,9 @@ demos:
218218
- grafana-dashboards
219219
- zookeeper
220220
manifests:
221-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/serviceaccount.yaml
222-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/create-timescale-tables.yaml
223-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/create-nifi-ingestion-job.yaml
221+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/signal-processing/serviceaccount.yaml
222+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/signal-processing/create-timescale-tables.yaml
223+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/signal-processing/create-nifi-ingestion-job.yaml
224224
supportedNamespaces: []
225225
resourceRequests:
226226
cpu: "3"

demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: create-nifi-ingestion-job
1111
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0
12-
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/IngestEarthquakesToKafka.xml && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/nifi-kafka-druid-earthquake-data/IngestEarthquakesToKafka.xml && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: setup-superset
1111
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0
12-
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/superset-assets.zip && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/nifi-kafka-druid-earthquake-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: create-nifi-ingestion-job
1111
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0
12-
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/IngestWaterLevelsToKafka.xml && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/nifi-kafka-druid-water-level-data/IngestWaterLevelsToKafka.xml && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

demos/nifi-kafka-druid-water-level-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: setup-superset
1111
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0
12-
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/superset-assets.zip && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/nifi-kafka-druid-water-level-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

demos/signal-processing/create-nifi-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ spec:
1717
- name: create-nifi-ingestion-job
1818
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0
1919
command: ["bash", "-c", "export PGPASSWORD=$(cat /timescale-admin-credentials/password) && \
20-
curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/DownloadAndWriteToDB.xml && \
20+
curl -O https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/signal-processing/DownloadAndWriteToDB.xml && \
2121
sed -i \"s/PLACEHOLDERPGPASSWORD/$PGPASSWORD/g\" DownloadAndWriteToDB.xml && \
2222
python -u /tmp/script/script.py"]
2323
volumeMounts:

demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: setup-superset
1111
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0
12-
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/spark-k8s-anomaly-detection-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

demos/trino-taxi-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: setup-superset
1111
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0
12-
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-24.7/demos/trino-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

stacks/_templates/jupyterhub.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ options:
5050
initContainers:
5151
- name: download-notebook
5252
image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0
53-
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb']
53+
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/release-24.7/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb']
5454
volumeMounts:
5555
- mountPath: /notebook
5656
name: notebook

stacks/end-to-end-security/superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ spec:
3131
- -c
3232
- |
3333
cd /tmp
34-
curl --fail -O https://raw.githubusercontent.com/stackabletech/demos/main/stacks/end-to-end-security/postgres_superset_dump.sql.gz
34+
curl --fail -O https://raw.githubusercontent.com/stackabletech/demos/release-24.7/stacks/end-to-end-security/postgres_superset_dump.sql.gz
3535
gunzip postgres_superset_dump.sql.gz
3636
3737
# We need to omit changing the users password, as otherwise the content in the Secrets does not match

stacks/signal-processing/jupyterhub.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ options:
3939
initContainers:
4040
- name: download-notebook
4141
image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0
42-
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/signal-processing/tsdb.ipynb -o /notebook/tsdb.ipynb']
42+
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/release-24.7/stacks/signal-processing/tsdb.ipynb -o /notebook/tsdb.ipynb']
4343
volumeMounts:
4444
- mountPath: /notebook
4545
name: notebook

0 commit comments

Comments
 (0)