diff --git a/.yamllint.yaml b/.yamllint.yaml
index 1a2db9f6..ac4941c4 100644
--- a/.yamllint.yaml
+++ b/.yamllint.yaml
@@ -7,7 +7,4 @@ rules:
check-keys: false
comments:
min-spaces-from-content: 1 # Needed due to https://github.com/adrienverge/yamllint/issues/443
- braces:
- ignore: |
- stacks/signal-processing/*
- stacks/_templates/*
+ braces: disable # because the yaml files are templates which can have {{ ... }}
diff --git a/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml b/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml
index 8915daf5..813aea2c 100644
--- a/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml
+++ b/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml
@@ -43,13 +43,13 @@ data:
stackable.tech/vendor: Stackable
spec:
sparkImage:
- productVersion: 3.5.1
+ productVersion: 3.5.2
mode: cluster
mainApplicationFile: local:///stackable/spark/jobs/spark-ingest-into-lakehouse.py
deps:
packages:
- - org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.5.0
- - org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.1
+ - org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.6.1
+ - org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.2
s3connection:
reference: minio
sparkConf:
diff --git a/demos/end-to-end-security/create-spark-report.yaml b/demos/end-to-end-security/create-spark-report.yaml
index c256f2b1..c72845e5 100644
--- a/demos/end-to-end-security/create-spark-report.yaml
+++ b/demos/end-to-end-security/create-spark-report.yaml
@@ -55,7 +55,7 @@ data:
name: spark-report
spec:
sparkImage:
- productVersion: 3.5.1
+ productVersion: 3.5.2
mode: cluster
mainApplicationFile: local:///stackable/spark/jobs/spark-report.py
deps:
diff --git a/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml b/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml
index d487b8a6..67cfe784 100644
--- a/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml
+++ b/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml
@@ -11,7 +11,7 @@ spec:
# We use 24.3.0 here which contains the distcp MapReduce components
# This is not included in the 24.7 images and will fail.
# See: https://github.com/stackabletech/docker-images/issues/793
- image: docker.stackable.tech/stackable/hadoop:3.3.4-stackable24.3.0
+ image: docker.stackable.tech/stackable/hadoop:3.4.0-stackable0.0.0-dev
env:
- name: HADOOP_USER_NAME
value: stackable
@@ -19,6 +19,7 @@ spec:
value: "/stackable/conf/hdfs"
- name: HADOOP_CLASSPATH
value: "/stackable/hadoop/share/hadoop/tools/lib/*.jar"
+ # yamllint disable-line rule:line-length
command: ["bash", "-c", "bin/hdfs dfs -mkdir -p /data/raw && bin/hadoop distcp -D fs.s3a.aws.credentials.provider=org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider s3a://public-backup-nyc-tlc/cycling-tripdata/demo-cycling-tripdata.csv.gz hdfs://hdfs/data/raw"]
volumeMounts:
- name: config-volume-hdfs
diff --git a/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml b/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml
index 7add3113..d02c5088 100644
--- a/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml
+++ b/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml
@@ -8,13 +8,15 @@ spec:
spec:
containers:
- name: load-ny-taxi-data
- image: docker.stackable.tech/stackable/hadoop:3.3.4-stackable24.7.0
+ image: docker.stackable.tech/stackable/hadoop:3.4.0-stackable0.0.0-dev
+ # yamllint disable rule:line-length
command: ["bash", "-c", "/stackable/hadoop/bin/hdfs dfs -mkdir -p /ny-taxi-data/raw \
&& cd /tmp \
&& for month in 2020-09; do \
curl -O https://repo.stackable.tech/repository/misc/ny-taxi-data/fhvhv_tripdata_$month.parquet \
&& /stackable/hadoop/bin/hdfs dfs -put fhvhv_tripdata_$month.parquet /ny-taxi-data/raw/; \
done"]
+ # yamllint enable rule:line-length
volumeMounts:
- name: hdfs-discovery-configmap
mountPath: /hdfs
diff --git a/demos/signal-processing/DownloadAndWriteToDB.xml b/demos/signal-processing/DownloadAndWriteToDB.xml
index 6bacabef..28fcb414 100644
--- a/demos/signal-processing/DownloadAndWriteToDB.xml
+++ b/demos/signal-processing/DownloadAndWriteToDB.xml
@@ -61,7 +61,7 @@
nifi-dbcp-service-nar
org.apache.nifi
- 1.21.0
+ 1.27.0
@@ -258,7 +258,7 @@
nifi-record-serialization-services-nar
org.apache.nifi
- 1.21.0
+ 1.27.0
@@ -561,7 +561,7 @@
88.0
@@ -584,7 +584,7 @@ through "future" data that has already been persisted.
nifi-standard-nar
org.apache.nifi
- 1.21.0
+ 1.27.0
PENALIZE_FLOWFILE
@@ -1069,7 +1069,7 @@ through "future" data that has already been persisted.
nifi-standard-nar
org.apache.nifi
- 1.21.0
+ 1.27.0
PENALIZE_FLOWFILE
@@ -1223,7 +1223,7 @@ from conditions_temp;
nifi-standard-nar
org.apache.nifi
- 1.21.0
+ 1.27.0
PENALIZE_FLOWFILE
diff --git a/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml b/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml
index 39cf03a3..5dce76c3 100644
--- a/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml
+++ b/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml
@@ -37,7 +37,7 @@ data:
name: spark-ad
spec:
sparkImage:
- productVersion: 3.5.1
+ productVersion: 3.5.2
mode: cluster
mainApplicationFile: local:///spark-scripts/spark-ad.py
deps:
diff --git a/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml b/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml
index 395249eb..299d473a 100644
--- a/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml
+++ b/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml
@@ -9,6 +9,7 @@ spec:
containers:
- name: load-ny-taxi-data
image: "bitnami/minio:2022-debian-10"
+ # yamllint disable-line rule:line-length
command: ["bash", "-c", "cd /tmp && for month in 2020-09 2020-10 2020-11 2020-12; do curl -O https://repo.stackable.tech/repository/misc/ny-taxi-data/fhvhv_tripdata_$month.parquet && mc --insecure alias set minio http://minio:9000/ $(cat /minio-s3-credentials/accessKey) $(cat /minio-s3-credentials/secretKey) && mc cp fhvhv_tripdata_$month.parquet minio/demo/ny-taxi-data/raw/ && mc mb --ignore-existing minio/prediction; done"]
volumeMounts:
- name: minio-s3-credentials
diff --git a/demos/trino-taxi-data/load-test-data.yaml b/demos/trino-taxi-data/load-test-data.yaml
index d0639ab6..4afe0d58 100644
--- a/demos/trino-taxi-data/load-test-data.yaml
+++ b/demos/trino-taxi-data/load-test-data.yaml
@@ -9,6 +9,7 @@ spec:
containers:
- name: load-ny-taxi-data
image: "bitnami/minio:2024-debian-12"
+ # yamllint disable-line rule:line-length
command: ["bash", "-c", "cd /tmp && for month in 2020-01 2020-02 2020-03 2020-04 2020-05 2020-06 2020-07 2020-08 2020-09 2020-10 2020-11 2020-12 2021-01 2021-02 2021-03 2021-04 2021-05 2021-06 2021-07 2021-08 2021-09 2021-10 2021-11 2021-12 2022-01 2022-02 2022-03 2022-04; do curl -O https://repo.stackable.tech/repository/misc/ny-taxi-data/yellow_tripdata_$month.parquet && mc --insecure alias set minio http://minio:9000/ $(cat /minio-s3-credentials/accessKey) $(cat /minio-s3-credentials/secretKey) && mc cp yellow_tripdata_$month.parquet minio/demo/ny-taxi-data/raw/; done"]
volumeMounts:
- name: minio-s3-credentials
diff --git a/docs/modules/demos/pages/trino-iceberg.adoc b/docs/modules/demos/pages/trino-iceberg.adoc
index ef6439f5..34af036d 100644
--- a/docs/modules/demos/pages/trino-iceberg.adoc
+++ b/docs/modules/demos/pages/trino-iceberg.adoc
@@ -78,7 +78,7 @@ As an alternative, you can use https://trino.io/download.html[trino-cli] by runn
[source,console]
----
-$ java -jar ~/Downloads/trino-cli-451-executable.jar --user admin --insecure --password --server https://172.18.0.2:30856
+$ java -jar ~/Downloads/trino-cli-455-executable.jar --user admin --insecure --password --server https://172.18.0.2:30856
----
Make sure to replace the server endpoint with the endpoint listed in the `stackablectl stacklet list` output.
diff --git a/stacks/_templates/vector-aggregator.yaml b/stacks/_templates/vector-aggregator.yaml
index c6bf486b..49afd887 100644
--- a/stacks/_templates/vector-aggregator.yaml
+++ b/stacks/_templates/vector-aggregator.yaml
@@ -4,11 +4,11 @@ name: vector
repo:
name: vector
url: https://helm.vector.dev
-version: 0.37.0
+version: 0.36.1 # app version 0.41.1
options:
commonLabels:
stackable.tech/vendor: Stackable
- podLabels: # Doesn't seem to work?
+ podLabels:
stackable.tech/vendor: Stackable
role: Aggregator
customConfig:
@@ -26,7 +26,7 @@ options:
- https://opensearch-cluster-master.default.svc.cluster.local:9200
mode: bulk
# The auto-detection of the API version does not work in Vector
- # 0.39.0 for OpenSearch, so the version must be set explicitly
+ # 0.41.1 for OpenSearch, so the version must be set explicitly
# (see https://github.com/vectordotdev/vector/issues/17690).
api_version: v8
tls:
diff --git a/stacks/airflow/airflow.yaml b/stacks/airflow/airflow.yaml
index 03d2b278..d382de3c 100644
--- a/stacks/airflow/airflow.yaml
+++ b/stacks/airflow/airflow.yaml
@@ -1,12 +1,12 @@
-{% raw %}
---
+# {% raw %}
apiVersion: airflow.stackable.tech/v1alpha1
kind: AirflowCluster
metadata:
name: airflow
spec:
image:
- productVersion: 2.9.2
+ productVersion: 2.9.3
clusterConfig:
listenerClass: external-unstable
loadExamples: false
@@ -272,7 +272,7 @@ data:
spec:
version: "1.0"
sparkImage:
- productVersion: 3.5.1
+ productVersion: 3.5.2
mode: cluster
mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py
job:
@@ -285,7 +285,7 @@ data:
limit: 512Mi
driver:
config:
- resources:
+ resources:
cpu:
min: 1000m
max: 1200m
@@ -293,14 +293,14 @@ data:
limit: 1024Mi
executor:
config:
- resources:
+ resources:
cpu:
min: 500m
max: 1000m
memory:
limit: 1024Mi
replicas: 3
-{% endraw %}
+# {% endraw %}
---
apiVersion: v1
kind: Secret
diff --git a/stacks/data-lakehouse-iceberg-trino-spark/nifi.yaml b/stacks/data-lakehouse-iceberg-trino-spark/nifi.yaml
index 20a8c805..de3f4d99 100644
--- a/stacks/data-lakehouse-iceberg-trino-spark/nifi.yaml
+++ b/stacks/data-lakehouse-iceberg-trino-spark/nifi.yaml
@@ -5,7 +5,7 @@ metadata:
name: nifi
spec:
image:
- productVersion: 1.25.0
+ productVersion: 1.27.0
clusterConfig:
authentication:
- authenticationClass: nifi-admin-credentials
diff --git a/stacks/data-lakehouse-iceberg-trino-spark/trino.yaml b/stacks/data-lakehouse-iceberg-trino-spark/trino.yaml
index 10274bd2..6ba799fd 100644
--- a/stacks/data-lakehouse-iceberg-trino-spark/trino.yaml
+++ b/stacks/data-lakehouse-iceberg-trino-spark/trino.yaml
@@ -5,7 +5,7 @@ metadata:
name: trino
spec:
image:
- productVersion: "451"
+ productVersion: "455"
clusterConfig:
listenerClass: external-unstable
catalogLabelSelector:
@@ -114,7 +114,7 @@ metadata:
name: opa
spec:
image:
- productVersion: 0.66.0
+ productVersion: 0.67.1
servers:
roleGroups:
default: {}
diff --git a/stacks/dual-hive-hdfs-s3/hdfs.yaml b/stacks/dual-hive-hdfs-s3/hdfs.yaml
index 386e8793..5c6b6d83 100644
--- a/stacks/dual-hive-hdfs-s3/hdfs.yaml
+++ b/stacks/dual-hive-hdfs-s3/hdfs.yaml
@@ -25,7 +25,7 @@ metadata:
name: hdfs
spec:
image:
- productVersion: 3.3.4
+ productVersion: 3.4.0
clusterConfig:
listenerClass: external-unstable
dfsReplication: 1
diff --git a/stacks/dual-hive-hdfs-s3/trino.yaml b/stacks/dual-hive-hdfs-s3/trino.yaml
index 0ac7bb9f..9aff8a9c 100644
--- a/stacks/dual-hive-hdfs-s3/trino.yaml
+++ b/stacks/dual-hive-hdfs-s3/trino.yaml
@@ -61,7 +61,7 @@ metadata:
name: trino
spec:
image:
- productVersion: "451"
+ productVersion: "455"
clusterConfig:
authorization:
opa:
diff --git a/stacks/end-to-end-security/hdfs.yaml b/stacks/end-to-end-security/hdfs.yaml
index 0330e001..e976e21e 100644
--- a/stacks/end-to-end-security/hdfs.yaml
+++ b/stacks/end-to-end-security/hdfs.yaml
@@ -1,10 +1,11 @@
+---
apiVersion: hdfs.stackable.tech/v1alpha1
kind: HdfsCluster
metadata:
name: hdfs
spec:
image:
- productVersion: 3.3.4
+ productVersion: 3.4.0
clusterConfig:
zookeeperConfigMapName: hdfs-znode
authentication:
@@ -29,8 +30,9 @@ spec:
level: DEBUG
configOverrides: &configOverrides
core-site.xml:
- # The idea is that the user "hive" can't do anything in hdfs, *but* it can impersonate other users
- # (such as trino), that have the needed permissions
+ # The idea is that the user "hive" can't do anything in hdfs,
+ # *but* it can impersonate other users (such as trino),
+ # that have the needed permissions
hadoop.proxyuser.hive.users: "*"
hadoop.proxyuser.hive.hosts: "*"
roleGroups:
diff --git a/stacks/end-to-end-security/opa.yaml b/stacks/end-to-end-security/opa.yaml
index 96e4dec9..371966e0 100644
--- a/stacks/end-to-end-security/opa.yaml
+++ b/stacks/end-to-end-security/opa.yaml
@@ -5,7 +5,7 @@ metadata:
name: opa
spec:
image:
- productVersion: 0.66.0
+ productVersion: 0.67.1
clusterConfig:
userInfo:
backend:
diff --git a/stacks/end-to-end-security/trino-regorules.yaml b/stacks/end-to-end-security/trino-regorules.yaml
index cb45f4e8..4a1afd9f 100644
--- a/stacks/end-to-end-security/trino-regorules.yaml
+++ b/stacks/end-to-end-security/trino-regorules.yaml
@@ -1,5 +1,5 @@
-{% raw %}
---
+# {% raw %}
apiVersion: v1
kind: ConfigMap
metadata:
@@ -1460,7 +1460,7 @@ data:
# "groups": ["group1", ...],
# "user": "username",
# },
- # "softwareStack": {"trinoVersion": "442"},
+ # "softwareStack": {"trinoVersion": "455"},
# }
# }
#
@@ -1610,7 +1610,7 @@ data:
# "groups": ["group1", ...],
# "user": "username",
# },
- # "softwareStack": {"trinoVersion": "442"},
+ # "softwareStack": {"trinoVersion": "455"},
# }
# }
#
@@ -1671,7 +1671,7 @@ data:
# "groups": ["group1", ...],
# "user": "username",
# },
- # "softwareStack": {"trinoVersion": "442"},
+ # "softwareStack": {"trinoVersion": "455"},
# }
# }
#
@@ -1736,7 +1736,7 @@ data:
# "groups": ["group1", ...],
# "user": "username",
# },
- # "softwareStack": {"trinoVersion": "442"},
+ # "softwareStack": {"trinoVersion": "455"},
# }
# }
#
@@ -1797,4 +1797,4 @@ data:
regex.match(pattern_with_anchors, value)
}
-{% endraw %}
+# {% endraw %}
diff --git a/stacks/end-to-end-security/trino.yaml b/stacks/end-to-end-security/trino.yaml
index 4aebad31..5f5d4f0a 100644
--- a/stacks/end-to-end-security/trino.yaml
+++ b/stacks/end-to-end-security/trino.yaml
@@ -5,7 +5,7 @@ metadata:
name: trino
spec:
image:
- productVersion: "451"
+ productVersion: "455"
clusterConfig:
listenerClass: external-unstable
tls:
diff --git a/stacks/hdfs-hbase/hdfs.yaml b/stacks/hdfs-hbase/hdfs.yaml
index 07649b3b..c65a2b9a 100644
--- a/stacks/hdfs-hbase/hdfs.yaml
+++ b/stacks/hdfs-hbase/hdfs.yaml
@@ -1,10 +1,11 @@
+---
apiVersion: hdfs.stackable.tech/v1alpha1
kind: HdfsCluster
metadata:
name: hdfs
spec:
image:
- productVersion: 3.3.4
+ productVersion: 3.4.0
clusterConfig:
dfsReplication: 1
zookeeperConfigMapName: hdfs-znode
diff --git a/stacks/jupyterhub-pyspark-hdfs/hdfs.yaml b/stacks/jupyterhub-pyspark-hdfs/hdfs.yaml
index f7835b70..54b0ad69 100644
--- a/stacks/jupyterhub-pyspark-hdfs/hdfs.yaml
+++ b/stacks/jupyterhub-pyspark-hdfs/hdfs.yaml
@@ -13,7 +13,7 @@ metadata:
name: hdfs
spec:
image:
- productVersion: 3.3.4
+ productVersion: 3.4.0
clusterConfig:
dfsReplication: 1
zookeeperConfigMapName: hdfs-znode
diff --git a/stacks/keycloak-opa-poc/druid.yaml b/stacks/keycloak-opa-poc/druid.yaml
index 514761ae..12cc47a4 100644
--- a/stacks/keycloak-opa-poc/druid.yaml
+++ b/stacks/keycloak-opa-poc/druid.yaml
@@ -5,7 +5,7 @@ metadata:
name: druid
spec:
image:
- productVersion: 28.0.1
+ productVersion: 30.0.0
clusterConfig:
listenerClass: external-unstable
deepStorage:
@@ -67,7 +67,7 @@ spec:
"druid-pac4j"]
# basic authenticator needed for internal authentication among Druid processes
- # Trying to use the pac4j authenticator in the escalator below leads to 302 errors,
+ # Trying to use the pac4j authenticator in the escalator below leads to 302 errors,
# it seems like the Druid processes cannot handle the OIDC authentication flow.
druid.auth.authenticator.MyBasicMetadataAuthenticator.type: basic
druid.auth.authenticator.MyBasicMetadataAuthenticator.initialInternalClientPassword: '${env:DRUID_SYSTEM_USER_PASSWORD}' # Default password for internal 'druid_system' user
diff --git a/stacks/keycloak-opa-poc/hdfs.yaml b/stacks/keycloak-opa-poc/hdfs.yaml
index 05eb35d5..30222c36 100644
--- a/stacks/keycloak-opa-poc/hdfs.yaml
+++ b/stacks/keycloak-opa-poc/hdfs.yaml
@@ -5,7 +5,7 @@ metadata:
name: hdfs
spec:
image:
- productVersion: 3.3.4
+ productVersion: 3.4.0
clusterConfig:
dfsReplication: 1
zookeeperConfigMapName: hdfs-znode
diff --git a/stacks/keycloak-opa-poc/opa.yaml b/stacks/keycloak-opa-poc/opa.yaml
index 41615844..bcd2919f 100644
--- a/stacks/keycloak-opa-poc/opa.yaml
+++ b/stacks/keycloak-opa-poc/opa.yaml
@@ -5,7 +5,7 @@ metadata:
name: opa
spec:
image:
- productVersion: 0.66.0
+ productVersion: 0.67.1
servers:
roleGroups:
default: {}
diff --git a/stacks/keycloak-opa-poc/trino.yaml b/stacks/keycloak-opa-poc/trino.yaml
index 315ec5b0..97a6453a 100644
--- a/stacks/keycloak-opa-poc/trino.yaml
+++ b/stacks/keycloak-opa-poc/trino.yaml
@@ -5,7 +5,7 @@ metadata:
name: trino
spec:
image:
- productVersion: "451"
+ productVersion: "455"
clusterConfig:
listenerClass: external-unstable
tls:
diff --git a/stacks/monitoring/grafana-dashboards.yaml b/stacks/monitoring/grafana-dashboards.yaml
index 0df74960..523c2ed7 100644
--- a/stacks/monitoring/grafana-dashboards.yaml
+++ b/stacks/monitoring/grafana-dashboards.yaml
@@ -1,4 +1,5 @@
-{% raw %}
+---
+# {% raw %}
apiVersion: v1
kind: ConfigMap
metadata:
@@ -10457,4 +10458,4 @@ data:
"version": 17,
"weekStart": ""
}
-{% endraw %}
+# {% endraw %}
diff --git a/stacks/nifi-kafka-druid-superset-s3/druid.yaml b/stacks/nifi-kafka-druid-superset-s3/druid.yaml
index 5407d200..dae6d10a 100644
--- a/stacks/nifi-kafka-druid-superset-s3/druid.yaml
+++ b/stacks/nifi-kafka-druid-superset-s3/druid.yaml
@@ -5,7 +5,7 @@ metadata:
name: druid
spec:
image:
- productVersion: 28.0.1
+ productVersion: 30.0.0
clusterConfig:
listenerClass: external-unstable
zookeeperConfigMapName: druid-znode
@@ -101,4 +101,4 @@ metadata:
name: druid-db-credentials
stringData:
username: druid
- password: druid
\ No newline at end of file
+ password: druid
diff --git a/stacks/signal-processing/grafana-dashboards.yaml b/stacks/signal-processing/grafana-dashboards.yaml
index 24ac2427..6ca6a2fc 100644
--- a/stacks/signal-processing/grafana-dashboards.yaml
+++ b/stacks/signal-processing/grafana-dashboards.yaml
@@ -1,4 +1,5 @@
-{% raw %}
+---
+# {% raw %}
apiVersion: v1
kind: ConfigMap
metadata:
@@ -2206,4 +2207,4 @@ data:
"version": 7,
"weekStart": ""
}
-{% endraw %}
+# {% endraw %}
diff --git a/stacks/trino-iceberg/trino.yaml b/stacks/trino-iceberg/trino.yaml
index 4f15796a..b826b798 100644
--- a/stacks/trino-iceberg/trino.yaml
+++ b/stacks/trino-iceberg/trino.yaml
@@ -5,7 +5,7 @@ metadata:
name: trino
spec:
image:
- productVersion: "451"
+ productVersion: "455"
clusterConfig:
listenerClass: external-unstable
catalogLabelSelector:
@@ -99,7 +99,7 @@ metadata:
name: opa
spec:
image:
- productVersion: 0.66.0
+ productVersion: 0.67.1
servers:
roleGroups:
default:
diff --git a/stacks/trino-superset-s3/trino.yaml b/stacks/trino-superset-s3/trino.yaml
index c8df87b2..04a61a7e 100644
--- a/stacks/trino-superset-s3/trino.yaml
+++ b/stacks/trino-superset-s3/trino.yaml
@@ -5,7 +5,7 @@ metadata:
name: trino
spec:
image:
- productVersion: "451"
+ productVersion: "455"
clusterConfig:
listenerClass: external-unstable
catalogLabelSelector:
@@ -71,7 +71,7 @@ metadata:
name: opa
spec:
image:
- productVersion: 0.66.0
+ productVersion: 0.67.1
servers:
roleGroups:
default: {}