Skip to content

Commit 903a2f1

Browse files
authored
Merge pull request #72 from stackabletech/fix/release-23.11
fix: use release-23.11 branch references
2 parents fd75ca9 + 457eb83 commit 903a2f1

File tree

14 files changed

+120
-120
lines changed

14 files changed

+120
-120
lines changed

Diff for: demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ spec:
1414
containers:
1515
- name: create-nifi-ingestion-job
1616
image: docker.stackable.tech/stackable/testing-tools:0.1.0-stackable0.1.0
17-
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/LakehouseKafkaIngest.xml && python -u /tmp/script/script.py"]
17+
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/data-lakehouse-iceberg-trino-spark/LakehouseKafkaIngest.xml && python -u /tmp/script/script.py"]
1818
volumeMounts:
1919
- name: script
2020
mountPath: /tmp/script

Diff for: demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: setup-superset
1111
image: docker.stackable.tech/stackable/testing-tools:0.1.0-stackable0.1.0
12-
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/superset-assets.zip && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/data-lakehouse-iceberg-trino-spark/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

Diff for: demos/demos-v2.yaml

+30-30
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,10 @@ demos:
77
- airflow
88
- job-scheduling
99
manifests:
10-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/01-airflow-spark-clusterrole.yaml
11-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/02-airflow-spark-clusterrolebinding.yaml
12-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml
13-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml
10+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/airflow-scheduled-job/01-airflow-spark-clusterrole.yaml
11+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/airflow-scheduled-job/02-airflow-spark-clusterrolebinding.yaml
12+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml
13+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml
1414
supportedNamespaces: []
1515
resourceRequests:
1616
cpu: 2401m
@@ -24,8 +24,8 @@ demos:
2424
- hdfs
2525
- cycling-tripdata
2626
manifests:
27-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml
28-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml
27+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml
28+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml
2929
supportedNamespaces: []
3030
resourceRequests:
3131
cpu: "3"
@@ -44,9 +44,9 @@ demos:
4444
- s3
4545
- earthquakes
4646
manifests:
47-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml
48-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml
49-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml
47+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml
48+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml
49+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml
5050
supportedNamespaces: ["default"]
5151
resourceRequests:
5252
cpu: 8700m
@@ -65,9 +65,9 @@ demos:
6565
- s3
6666
- water-levels
6767
manifests:
68-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml
69-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml
70-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml
68+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml
69+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml
70+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml
7171
supportedNamespaces: ["default"]
7272
resourceRequests:
7373
cpu: 8900m
@@ -84,10 +84,10 @@ demos:
8484
- s3
8585
- ny-taxi-data
8686
manifests:
87-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/serviceaccount.yaml
88-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml
89-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml
90-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml
87+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/spark-k8s-anomaly-detection-taxi-data/serviceaccount.yaml
88+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml
89+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml
90+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml
9191
supportedNamespaces: []
9292
resourceRequests:
9393
cpu: 6400m
@@ -119,9 +119,9 @@ demos:
119119
- s3
120120
- ny-taxi-data
121121
manifests:
122-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/load-test-data.yaml
123-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/create-table-in-trino.yaml
124-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/setup-superset.yaml
122+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/trino-taxi-data/load-test-data.yaml
123+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/trino-taxi-data/create-table-in-trino.yaml
124+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/trino-taxi-data/setup-superset.yaml
125125
supportedNamespaces: []
126126
resourceRequests:
127127
cpu: 6800m
@@ -144,12 +144,12 @@ demos:
144144
- water-levels
145145
- earthquakes
146146
manifests:
147-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/serviceaccount.yaml
148-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/load-test-data.yaml
149-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml
150-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml
151-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml
152-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml
147+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/data-lakehouse-iceberg-trino-spark/serviceaccount.yaml
148+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/data-lakehouse-iceberg-trino-spark/load-test-data.yaml
149+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml
150+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml
151+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml
152+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml
153153
supportedNamespaces: ["default"]
154154
resourceRequests:
155155
cpu: "80"
@@ -165,7 +165,7 @@ demos:
165165
- pyspark
166166
- ny-taxi-data
167167
manifests:
168-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml
168+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml
169169
supportedNamespaces: []
170170
resourceRequests:
171171
cpu: 3350m
@@ -182,7 +182,7 @@ demos:
182182
- vector
183183
- zookeeper
184184
manifests:
185-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/logging/zookeeper.yaml
185+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/logging/zookeeper.yaml
186186
supportedNamespaces: []
187187
resourceRequests:
188188
cpu: 6500m
@@ -198,9 +198,9 @@ demos:
198198
- grafana-dashboards
199199
- zookeeper
200200
manifests:
201-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/serviceaccount.yaml
202-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/create-timescale-tables.yaml
203-
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/create-nifi-ingestion-job.yaml
201+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/signal-processing/serviceaccount.yaml
202+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/signal-processing/create-timescale-tables.yaml
203+
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/signal-processing/create-nifi-ingestion-job.yaml
204204
supportedNamespaces: []
205205
resourceRequests:
206206
cpu: "3"

Diff for: demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: create-nifi-ingestion-job
1111
image: docker.stackable.tech/stackable/testing-tools:0.1.0-stackable0.1.0
12-
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/IngestEarthquakesToKafka.xml && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/nifi-kafka-druid-earthquake-data/IngestEarthquakesToKafka.xml && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

Diff for: demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: setup-superset
1111
image: docker.stackable.tech/stackable/testing-tools:0.1.0-stackable0.1.0
12-
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/superset-assets.zip && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/nifi-kafka-druid-earthquake-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

Diff for: demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: create-nifi-ingestion-job
1111
image: docker.stackable.tech/stackable/testing-tools:0.1.0-stackable0.1.0
12-
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/IngestWaterLevelsToKafka.xml && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/nifi-kafka-druid-water-level-data/IngestWaterLevelsToKafka.xml && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

Diff for: demos/nifi-kafka-druid-water-level-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: setup-superset
1111
image: docker.stackable.tech/stackable/testing-tools:0.1.0-stackable0.1.0
12-
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/superset-assets.zip && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/nifi-kafka-druid-water-level-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

Diff for: demos/signal-processing/create-nifi-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ spec:
1717
- name: create-nifi-ingestion-job
1818
image: docker.stackable.tech/stackable/testing-tools:0.1.0-stackable0.1.0
1919
command: ["bash", "-c", "export PGPASSWORD=$(cat /timescale-admin-credentials/password) && \
20-
curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/DownloadAndWriteToDB.xml && \
20+
curl -O https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/signal-processing/DownloadAndWriteToDB.xml && \
2121
sed -i \"s/PLACEHOLDERPGPASSWORD/$PGPASSWORD/g\" DownloadAndWriteToDB.xml && \
2222
python -u /tmp/script/script.py"]
2323
volumeMounts:

Diff for: demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: setup-superset
1111
image: docker.stackable.tech/stackable/testing-tools:0.1.0-stackable0.1.0
12-
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/spark-k8s-anomaly-detection-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

Diff for: demos/trino-taxi-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
containers:
1010
- name: setup-superset
1111
image: docker.stackable.tech/stackable/testing-tools:0.1.0-stackable0.1.0
12-
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"]
12+
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-23.11/demos/trino-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
1515
mountPath: /tmp/script

Diff for: docs/modules/demos/pages/data-lakehouse-iceberg-trino-spark.adoc

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
= data-lakehouse-iceberg-trino-spark
22

3-
:demo-code: https://github.com/stackabletech/stackablectl/blob/main/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml
3+
:demo-code: https://github.com/stackabletech/stackablectl/blob/release-23.11/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml
44
:iceberg-table-maintenance: https://iceberg.apache.org/docs/latest/spark-procedures/#metadata-management
55
:spark-streaming-docs: https://spark.apache.org/docs/latest/structured-streaming-kafka-integration.html
66
:iceberg-rewrite: https://iceberg.apache.org/docs/latest/spark-procedures/#rewrite_data_files

Diff for: stacks/_templates/jupyterhub.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ options:
4343
initContainers:
4444
- name: download-notebook
4545
image: docker.stackable.tech/stackable/tools:1.0.0-stackable23.4
46-
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb']
46+
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/release-23.11/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb']
4747
volumeMounts:
4848
- mountPath: /notebook
4949
name: notebook

Diff for: stacks/signal-processing/jupyterhub.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ options:
3535
initContainers:
3636
- name: download-notebook
3737
image: docker.stackable.tech/stackable/tools:1.0.0-stackable23.4
38-
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/signal-processing/tsdb.ipynb -o /notebook/tsdb.ipynb']
38+
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/release-23.11/stacks/signal-processing/tsdb.ipynb -o /notebook/tsdb.ipynb']
3939
volumeMounts:
4040
- mountPath: /notebook
4141
name: notebook

0 commit comments

Comments
 (0)