Skip to content

Commit 2852ab2

Browse files
authoredJan 31, 2025··
chore: update repo references to oci (#141)
1 parent d4baa48 commit 2852ab2

36 files changed

+49
-49
lines changed
 

‎demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: start-pyspark-job
11-
image: docker.stackable.tech/stackable/tools:1.0.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
1212
# N.B. it is possible for the scheduler to report that a DAG exists, only for the worker task to fail if a pod is unexpectedly
1313
# restarted. Additionally, the db-init job takes a few minutes to complete before the cluster is deployed. The wait/watch steps
1414
# below are not "water-tight" but add a layer of stability by at least ensuring that the db is initialized and ready and that

‎demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: start-date-job
11-
image: docker.stackable.tech/stackable/tools:1.0.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
1212
# N.B. it is possible for the scheduler to report that a DAG exists, only for the worker task to fail if a pod is unexpectedly
1313
# restarted. Additionally, the db-init job takes a few minutes to complete before the cluster is deployed. The wait/watch steps
1414
# below are not "water-tight" but add a layer of stability by at least ensuring that the db is initialized and ready and that

‎demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,11 @@ spec:
99
serviceAccountName: demo-serviceaccount
1010
initContainers:
1111
- name: wait-for-kafka
12-
image: docker.stackable.tech/stackable/tools:1.0.0-stackable0.0.0-dev
12+
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
1313
command: ["bash", "-c", "echo 'Waiting for all kafka brokers to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/instance=kafka -l app.kubernetes.io/name=kafka"]
1414
containers:
1515
- name: create-nifi-ingestion-job
16-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
16+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1717
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/LakehouseKafkaIngest.xml && python -u /tmp/script/script.py"]
1818
volumeMounts:
1919
- name: script

‎demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,11 @@ spec:
1212
serviceAccountName: demo-serviceaccount
1313
initContainers:
1414
- name: wait-for-kafka
15-
image: docker.stackable.tech/stackable/tools:1.0.0-stackable0.0.0-dev
15+
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
1616
command: ["bash", "-c", "echo 'Waiting for all kafka brokers to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/name=kafka -l app.kubernetes.io/instance=kafka"]
1717
containers:
1818
- name: create-spark-ingestion-job
19-
image: docker.stackable.tech/stackable/tools:1.0.0-stackable0.0.0-dev
19+
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
2020
command: ["bash", "-c", "echo 'Submitting Spark job' && kubectl apply -f /tmp/manifest/spark-ingestion-job.yaml"]
2121
volumeMounts:
2222
- name: manifest

‎demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,11 @@ spec:
99
serviceAccountName: demo-serviceaccount
1010
initContainers:
1111
- name: wait-for-testdata
12-
image: docker.stackable.tech/stackable/tools:1.0.0-stackable0.0.0-dev
12+
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
1313
command: ["bash", "-c", "echo 'Waiting for job load-test-data to finish' && kubectl wait --for=condition=complete --timeout=30m job/load-test-data"]
1414
containers:
1515
- name: create-tables-in-trino
16-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
16+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1717
command: ["bash", "-c", "python -u /tmp/script/script.py"]
1818
volumeMounts:
1919
- name: script

‎demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: setup-superset
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script

‎demos/end-to-end-security/create-spark-report.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ spec:
1212
serviceAccountName: demo-serviceaccount
1313
initContainers:
1414
- name: wait-for-trino-tables
15-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
15+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1616
command:
1717
- bash
1818
- -euo
@@ -23,7 +23,7 @@ spec:
2323
kubectl wait --timeout=30m --for=condition=complete job/create-tables-in-trino
2424
containers:
2525
- name: create-spark-report
26-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
26+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
2727
command:
2828
- bash
2929
- -euo

‎demos/end-to-end-security/create-trino-tables.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: create-tables-in-trino
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script

‎demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
spec:
1010
containers:
1111
- name: create-hfile-and-import-to-hbase
12-
image: docker.stackable.tech/stackable/hbase:2.4.18-stackable0.0.0-dev
12+
image: oci.stackable.tech/sdp/hbase:2.4.18-stackable0.0.0-dev
1313
env:
1414
- name: HADOOP_USER_NAME
1515
value: stackable

‎demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ spec:
1111
# We use 24.3.0 here which contains the distcp MapReduce components
1212
# This is not included in the 24.7 and 24.11 images and will fail.
1313
# See: https://github.com/stackabletech/docker-images/issues/793
14-
image: docker.stackable.tech/stackable/hadoop:3.3.6-stackable24.3.0
14+
image: oci.stackable.tech/sdp/hadoop:3.3.6-stackable24.3.0
1515
env:
1616
- name: HADOOP_USER_NAME
1717
value: stackable

‎demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM docker.stackable.tech/stackable/spark-k8s:3.5.0-stackable24.3.0
1+
FROM oci.stackable.tech/sdp/spark-k8s:3.5.0-stackable24.3.0
22

33
COPY demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/requirements.txt .
44

‎demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: load-ny-taxi-data
11-
image: docker.stackable.tech/stackable/hadoop:3.4.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/hadoop:3.4.0-stackable0.0.0-dev
1212
# yamllint disable rule:line-length
1313
command: ["bash", "-c", "/stackable/hadoop/bin/hdfs dfs -mkdir -p /ny-taxi-data/raw \
1414
&& cd /tmp \

‎demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: create-druid-ingestion-job
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor"]
1313
volumeMounts:
1414
- name: ingestion-job-spec

‎demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: create-nifi-ingestion-job
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/IngestEarthquakesToKafka.xml && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script

‎demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: setup-superset
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script

‎demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: create-druid-ingestion-job
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/stations-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor && curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor && curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-compaction-job-spec.json https://druid-coordinator:8281/druid/coordinator/v1/config/compaction"]
1313
volumeMounts:
1414
- name: ingestion-job-spec

‎demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: create-nifi-ingestion-job
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/IngestWaterLevelsToKafka.xml && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script

‎demos/nifi-kafka-druid-water-level-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: setup-superset
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script
+1-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
FROM docker.stackable.tech/stackable/nifi:1.27.0-stackable0.0.0-dev
1+
FROM oci.stackable.tech/sdp/nifi:1.27.0-stackable0.0.0-dev
22

33
RUN curl --fail -o /stackable/nifi/postgresql-42.6.0.jar "https://repo.stackable.tech/repository/misc/postgresql-timescaledb/postgresql-42.6.0.jar"

‎demos/signal-processing/create-nifi-ingestion-job.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,13 @@ spec:
99
serviceAccountName: demo-serviceaccount
1010
initContainers:
1111
- name: wait-for-timescale-job
12-
image: docker.stackable.tech/stackable/tools:1.0.0-stackable0.0.0-dev
12+
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
1313
command: ["bash", "-c", "echo 'Waiting for timescaleDB tables to be ready'
1414
&& kubectl wait --for=condition=complete job/create-timescale-tables-job"
1515
]
1616
containers:
1717
- name: create-nifi-ingestion-job
18-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
18+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1919
command: ["bash", "-c", "export PGPASSWORD=$(cat /timescale-admin-credentials/password) && \
2020
curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/DownloadAndWriteToDB.xml && \
2121
sed -i \"s/PLACEHOLDERPGPASSWORD/$PGPASSWORD/g\" DownloadAndWriteToDB.xml && \

‎demos/signal-processing/create-timescale-tables.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ spec:
99
serviceAccountName: demo-serviceaccount
1010
initContainers:
1111
- name: wait-for-timescale
12-
image: docker.stackable.tech/stackable/tools:1.0.0-stackable0.0.0-dev
12+
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
1313
command: ["bash", "-c", "echo 'Waiting for timescaleDB to be ready'
1414
&& kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/name=postgresql-timescaledb"
1515
]

‎demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,11 @@ spec:
88
spec:
99
initContainers:
1010
- name: wait-for-testdata
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "echo 'Waiting for job load-ny-taxi-data to finish' && kubectl wait --for=condition=complete --timeout=30m job/load-ny-taxi-data"]
1313
containers:
1414
- name: create-spark-anomaly-detection-job
15-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
15+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1616
command: ["bash", "-c", "echo 'Submitting Spark job' && kubectl apply -f /tmp/manifest/spark-ad-job.yaml"]
1717
volumeMounts:
1818
- name: manifest

‎demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: setup-superset
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script

‎demos/trino-taxi-data/create-table-in-trino.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: create-ny-taxi-data-table-in-trino
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script

‎demos/trino-taxi-data/setup-superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: setup-superset
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"]
1313
volumeMounts:
1414
- name: script

‎docs/modules/demos/pages/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data.adoc

+4-4
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@ Libraries can be added to a custom *product* image launched by the notebook. Sup
159159
spark = (SparkSession
160160
.builder
161161
.master(f'k8s://https://{os.environ["KUBERNETES_SERVICE_HOST"]}:{os.environ["KUBERNETES_SERVICE_PORT"]}')
162-
.config("spark.kubernetes.container.image", "docker.stackable.tech/demos/spark-k8s-with-scikit-learn:3.5.0-stackable24.3.0")
162+
.config("spark.kubernetes.container.image", "oci.stackable.tech/stackable/spark-k8s-with-scikit-learn:3.5.0-stackable24.3.0")
163163
.config("spark.driver.port", "2222")
164164
.config("spark.driver.blockManager.port", "7777")
165165
.config("spark.driver.host", "driver-service.default.svc.cluster.local")
@@ -182,16 +182,16 @@ It requires a specific Spark image:
182182
[source,python]
183183
----
184184
.config("spark.kubernetes.container.image",
185-
"docker.stackable.tech/demos/spark-k8s-with-scikit-learn:3.5.0-stackable24.3.0"),
185+
"oci.stackable.tech/stackable/spark-k8s-with-scikit-learn:3.5.0-stackable24.3.0"),
186186
...
187187
----
188188

189-
This is created by taking a Spark image, in this case `docker.stackable.tech/stackable/spark-k8s:3.5.0-stackable24.3.0`, installing specific python libraries into it
189+
This is created by taking a Spark image, in this case `oci.stackable.tech/sdp/spark-k8s:3.5.0-stackable24.3.0`, installing specific python libraries into it
190190
, and re-tagging the image:
191191

192192
[source,console]
193193
----
194-
FROM docker.stackable.tech/stackable/spark-k8s:3.5.0-stackable24.3.0
194+
FROM oci.stackable.tech/sdp/spark-k8s:3.5.0-stackable24.3.0
195195
196196
COPY demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/requirements.txt .
197197

‎stacks/_templates/jupyterhub.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ options:
5050
HADOOP_CONF_DIR: "/home/jovyan/hdfs"
5151
initContainers:
5252
- name: download-notebook
53-
image: docker.stackable.tech/stackable/tools:1.0.0-stackable0.0.0-dev
53+
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
5454
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb']
5555
volumeMounts:
5656
- mountPath: /notebook

‎stacks/_templates/keycloak.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ spec:
4848
- name: tls
4949
mountPath: /tls/
5050
- name: create-auth-class
51-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
51+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
5252
command: ["/bin/bash", "-c"]
5353
args:
5454
- |

‎stacks/end-to-end-security/krb5.yaml

+4-4
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ spec:
1414
spec:
1515
initContainers:
1616
- name: init
17-
image: docker.stackable.tech/stackable/krb5:1.21.1-stackable0.0.0-dev
17+
image: oci.stackable.tech/sdp/krb5:1.21.1-stackable0.0.0-dev
1818
args:
1919
- sh
2020
- -euo
@@ -35,7 +35,7 @@ spec:
3535
name: data
3636
containers:
3737
- name: kdc
38-
image: docker.stackable.tech/stackable/krb5:1.21.1-stackable0.0.0-dev
38+
image: oci.stackable.tech/sdp/krb5:1.21.1-stackable0.0.0-dev
3939
args:
4040
- krb5kdc
4141
- -n
@@ -48,7 +48,7 @@ spec:
4848
- mountPath: /var/kerberos/krb5kdc
4949
name: data
5050
- name: kadmind
51-
image: docker.stackable.tech/stackable/krb5:1.21.1-stackable0.0.0-dev
51+
image: oci.stackable.tech/sdp/krb5:1.21.1-stackable0.0.0-dev
5252
args:
5353
- kadmind
5454
- -nofork
@@ -61,7 +61,7 @@ spec:
6161
- mountPath: /var/kerberos/krb5kdc
6262
name: data
6363
- name: client
64-
image: docker.stackable.tech/stackable/krb5:1.21.1-stackable0.0.0-dev
64+
image: oci.stackable.tech/sdp/krb5:1.21.1-stackable0.0.0-dev
6565
tty: true
6666
stdin: true
6767
env:

‎stacks/end-to-end-security/superset.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ spec:
2525
initContainers:
2626
# The postgres image does not contain curl or wget...
2727
- name: download-dump
28-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
28+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
2929
command:
3030
- bash
3131
- -c

‎stacks/jupyterhub-pyspark-hdfs/notebook.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
" SparkSession\n",
4646
" .builder\n",
4747
" .master(f'k8s://https://{os.environ[\"KUBERNETES_SERVICE_HOST\"]}:{os.environ[\"KUBERNETES_SERVICE_PORT\"]}')\n",
48-
" .config(\"spark.kubernetes.container.image\", \"docker.stackable.tech/demos/spark-k8s-with-scikit-learn:3.5.0-stackable24.3.0\")\n",
48+
" .config(\"spark.kubernetes.container.image\", \"oci.stackable.tech/stackable/spark-k8s-with-scikit-learn:3.5.0-stackable24.3.0\")\n",
4949
" .config(\"spark.driver.port\", \"2222\")\n",
5050
" .config(\"spark.driver.blockManager.port\", \"7777\")\n",
5151
" .config(\"spark.driver.host\", f\"driver-service.{NAMESPACE}.svc.cluster.local\")\n",

‎stacks/keycloak-opa-poc/keycloak.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ spec:
7070
spec:
7171
containers:
7272
- name: propagate-keycloak-address
73-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
73+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
7474
command:
7575
- bash
7676
- -x

‎stacks/keycloak-opa-poc/setup-keycloak.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ spec:
2929
spec:
3030
containers:
3131
- name: setup-keycloak
32-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
32+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
3333
env:
3434
- name: KEYCLOAK_ADMIN_PASSWORD
3535
valueFrom:

‎stacks/logging/setup-opensearch-dashboards.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
spec:
99
containers:
1010
- name: setup-opensearch-dashboards
11-
image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev
11+
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
1212
env:
1313
- name: OPEN_SEARCH_ADMIN_PASSWORD
1414
valueFrom:

‎stacks/signal-processing/jupyterhub.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ options:
3232
image:
3333
# TODO (@NickLarsenNZ): Use a versioned image with stackable0.0.0-dev or stackableXX.X.X so that
3434
# the demo is reproducable for the release and it will be automatically replaced for the release branch.
35-
name: docker.stackable.tech/demos/jupyter-pyspark-with-alibi-detect
35+
name: oci.stackable.tech/stackable/jupyter-pyspark-with-alibi-detect
3636
tag: python-3.9
3737
serviceAccountName: spark
3838
networkPolicy:
@@ -41,7 +41,7 @@ options:
4141
stackable.tech/vendor: Stackable
4242
initContainers:
4343
- name: download-notebook
44-
image: docker.stackable.tech/stackable/tools:1.0.0-stackable0.0.0-dev
44+
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
4545
command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/signal-processing/tsdb.ipynb -o /notebook/tsdb.ipynb']
4646
volumeMounts:
4747
- mountPath: /notebook

‎stacks/signal-processing/nifi.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
productVersion: 1.27.0
99
# TODO (@NickLarsenNZ): Use a versioned image with stackable0.0.0-dev or stackableXX.X.X so that
1010
# the demo is reproducable for the release and it will be automatically replaced for the release branch.
11-
custom: docker.stackable.tech/demos/nifi:1.27.0-postgresql
11+
custom: oci.stackable.tech/stackable/nifi:1.27.0-postgresql
1212
clusterConfig:
1313
listenerClass: external-unstable
1414
zookeeperConfigMapName: nifi-znode

0 commit comments

Comments
 (0)
Please sign in to comment.