diff --git a/.gitattributes b/.gitattributes index 287fe385..9dcd77fb 100644 --- a/.gitattributes +++ b/.gitattributes @@ -2,4 +2,4 @@ docs/** -linguist-documentation *.adoc linguist-detectable *.yaml linguist-detectable -*.yml linguist-detectable \ No newline at end of file +*.yml linguist-detectable diff --git a/.github/workflows/dev_jupyter-pyspark-with-alibi-detect.yaml b/.github/workflows/dev_jupyter-pyspark-with-alibi-detect.yaml index ac842416..1d59ba6e 100644 --- a/.github/workflows/dev_jupyter-pyspark-with-alibi-detect.yaml +++ b/.github/workflows/dev_jupyter-pyspark-with-alibi-detect.yaml @@ -3,6 +3,8 @@ name: Build and publish jupyter-pyspark-with-alibi-detect env: IMAGE_NAME: jupyter-pyspark-with-alibi-detect + # TODO (@NickLarsenNZ): Use a versioned image with stackable0.0.0-dev or stackableXX.X.X so that + # the demo is reproducable for the release and it will be automatically replaced for the release branch. IMAGE_VERSION: python-3.9 REGISTRY_PATH: stackable DOCKERFILE_PATH: "demos/signal-processing/Dockerfile-jupyter" @@ -12,6 +14,8 @@ on: push: branches: - main + # TODO (@NickLarsenNZ): Also build on release branches, but with a stackable0.0.0-dev or stackableXX.X.X tag. + # - release-* paths: - demos/signal-processing/Dockerfile-jupyter - demos/signal-processing/requirements.txt diff --git a/.github/workflows/dev_nifi.yaml b/.github/workflows/dev_nifi.yaml index b244c930..5e456f55 100644 --- a/.github/workflows/dev_nifi.yaml +++ b/.github/workflows/dev_nifi.yaml @@ -3,6 +3,8 @@ name: Build and publish NiFi for signal-processing demo env: IMAGE_NAME: nifi + # TODO (@NickLarsenNZ): Use a versioned image with stackable0.0.0-dev or stackableXX.X.X so that + # the demo is reproducable for the release and it will be automatically replaced for the release branch. IMAGE_VERSION: 1.27.0-postgresql REGISTRY_PATH: stackable DOCKERFILE_PATH: "demos/signal-processing/Dockerfile-nifi" @@ -12,6 +14,8 @@ on: push: branches: - main + # TODO (@NickLarsenNZ): Also build on release branches, but with a stackable0.0.0-dev or stackableXX.X.X tag. + # - release-* paths: - demos/signal-processing/Dockerfile-nifi - .github/workflows/dev_nifi.yaml diff --git a/.github/workflows/dev_spark-k8s-with-scikit-learn.yaml b/.github/workflows/dev_spark-k8s-with-scikit-learn.yaml index 70147118..56748f57 100644 --- a/.github/workflows/dev_spark-k8s-with-scikit-learn.yaml +++ b/.github/workflows/dev_spark-k8s-with-scikit-learn.yaml @@ -3,6 +3,8 @@ name: Build and publish spark-k8s-with-scikit-learn env: IMAGE_NAME: spark-k8s-with-scikit-learn + # TODO (@NickLarsenNZ): Use a versioned image with stackable0.0.0-dev or stackableXX.X.X so that + # the demo is reproducable for the release and it will be automatically replaced for the release branch. IMAGE_VERSION: 3.5.0-stackable24.3.0 REGISTRY_PATH: stackable DOCKERFILE_PATH: "demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/Dockerfile" @@ -12,6 +14,8 @@ on: push: branches: - main + # TODO (@NickLarsenNZ): Also build on release branches, but with a stackable0.0.0-dev or stackableXX.X.X tag. + # - release-* paths: - demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/Dockerfile - demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/requirements.txt @@ -30,7 +34,7 @@ jobs: # TODO: the image 3.5.0-stackable24.3.0 does not have an arm64 build. # Re-activate the arm runner when the image is updated to one that does. # Also adjust publish_manifest step to include arm architecture - #- {name: "ubicloud-standard-8-arm", arch: "arm64"} + # - {name: "ubicloud-standard-8-arm", arch: "arm64"} steps: - name: Checkout Repository uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 diff --git a/.gitignore b/.gitignore index 2eea525d..deaa14d1 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,3 @@ -.env \ No newline at end of file +.env +.envrc +.direnv/ diff --git a/.scripts/update_refs.sh b/.scripts/update_refs.sh new file mode 100755 index 00000000..f1f91910 --- /dev/null +++ b/.scripts/update_refs.sh @@ -0,0 +1,93 @@ +#!/usr/bin/env bash +set -euo pipefail + +# This script is used by the stackable-utils release script to update the demos +# repository branch references as well as the stackableRelease versions so that +# demos are properly versioned. + +# Parse args: +# $1 if `commit` is specified as the first argument, then changes will be staged and committed. +COMMIT="${1:-false}" +COMMIT="${COMMIT/commit/true}" + +CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) + +# Ensure we are not on the `main` branch. +if [[ "$CURRENT_BRANCH" == "main" ]]; then + >&2 echo "Will not replace github references for the main branch. Exiting." + exit 1 +fi + +# Ensure the index is clean +if ! git diff-index --quiet HEAD --; then + >&2 echo "Dirty git index. Check working tree or staged changes. Exiting." + exit 2 +fi + +# prepend a string to each line of stdout +function prepend { + while read -r line; do + echo -e "${1}${line}" + done +} + +# stage and commit based on a message +function maybe_commit { + [ "$COMMIT" == "true" ] || return 0 + local MESSAGE="$1" + PATCH=$(mktemp) + git add -u + git diff --staged > "$PATCH" + git commit -S -m "$MESSAGE" --no-verify + echo "patch written to: $PATCH" | prepend "\t" +} + +if [[ "$CURRENT_BRANCH" == release-* ]]; then + STACKABLE_RELEASE="${CURRENT_BRANCH#release-}" + MESSAGE="Update stackableRelease to $STACKABLE_RELEASE" + echo "$MESSAGE" + # NOTE (@NickLarsenNZ): find is not required for such a trivial case, but it is done for consitency + find stacks/stacks-v2.yaml \ + -exec grep --color=always -l stackableRelease {} \; \ + -exec sed -i -E "s|(stackableRelease:\s+)(\S+)|\1${STACKABLE_RELEASE}|" {} \; \ + | prepend "\t" + maybe_commit "chore(release): $MESSAGE" + + # Replace 0.0.0-dev refs with ${STACKABLE_RELEASE}.0 + # TODO (@NickLarsenNZ): handle patches later, and what about release-candidates? + SEARCH='stackable(0\.0\.0-dev|24\.7\.[0-9]+)' # TODO (@NickLarsenNZ): After https://github.com/stackabletech/stackable-cockpit/issues/310, only search for 0.0.0-dev + REPLACEMENT="stackable${STACKABLE_RELEASE}.0" # TODO (@NickLarsenNZ): Be a bit smarter about patch releases. + MESSAGE="Update image references with $REPLACEMENT" + echo "$MESSAGE" + find demos stacks -type f \ + -exec grep --color=always -lE "$SEARCH" {} \; \ + -exec sed -i -E "s|${SEARCH}|${REPLACEMENT}|" {} \; \ + | prepend "\t" + maybe_commit "chore(release): $MESSAGE" + + # Look for remaining references + echo "Checking files with older stackable release references which will be assumed to be intentional." + grep --color=always -ronE "stackable24\.3(\.[0-9]+)" | prepend "\t" + echo +else + >&2 echo "WARNING: doesn't look like a release branch. Will not update stackableRelease versions in stacks and image references." +fi + +MESSAGE="Replace githubusercontent references main->${CURRENT_BRANCH}" +echo "$MESSAGE" +# Search for githubusercontent urls and replace the branch reference with a placeholder variable +# This is done just in case the branch has special regex characters (like `/`). +# shellcheck disable=SC2016 # We intentionally don't want to expand the variable. +find demos stacks -type f \ + -exec grep --color=always -l githubusercontent {} \; \ + -exec sed -i -E 's|(stackabletech/demos)/main/|\1/\${UPDATE_BRANCH_REF}/|' {} \; \ +| prepend "\t" + +# Now, for all modified files, we can use envsubst +export UPDATE_BRANCH_REF="$CURRENT_BRANCH" +for MODIFIED_FILE in $(git diff --name-only); do + # shellcheck disable=SC2016 # We intentionally don't want to expand the variable. + envsubst '$UPDATE_BRANCH_REF' < "$MODIFIED_FILE" > "$MODIFIED_FILE.replacements" + mv "$MODIFIED_FILE.replacements" "$MODIFIED_FILE" +done +maybe_commit "chore(release): $MESSAGE" diff --git a/.yamllint.yaml b/.yamllint.yaml index ac4941c4..f5acaf98 100644 --- a/.yamllint.yaml +++ b/.yamllint.yaml @@ -8,3 +8,4 @@ rules: comments: min-spaces-from-content: 1 # Needed due to https://github.com/adrienverge/yamllint/issues/443 braces: disable # because the yaml files are templates which can have {{ ... }} + indentation: disable # There are many conflicting styles and it isn't so important in this repo. It can be enabled later if we want consistency. diff --git a/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml b/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml index 0c2a42d9..8cb6e3da 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml @@ -338,29 +338,6 @@ data: ) """) - - - - - - - - - - - - - - - - - - - - - - - run_query(connection, """ create table if not exists lakehouse.house_sales.house_sales with ( partitioning = ARRAY['year(date_of_transfer)'] @@ -504,23 +481,6 @@ data: where tpep_pickup_datetime >= date '2015-01-01' and tpep_pickup_datetime <= now() -- We have to remove some invalid records """) - - - - - - - - - - - - - - - - - run_query(connection, """ create or replace materialized view lakehouse.taxi.yellow_tripdata_daily_agg as select @@ -566,11 +526,6 @@ data: REFRESH MATERIALIZED VIEW lakehouse.taxi.yellow_tripdata_monthly_agg """) - - - - - # At this point Spark should have created the needed underlying tables run_query(connection, """ create or replace view lakehouse.smart_city.shared_bikes_station_status_latest as diff --git a/demos/demos-v1.yaml b/demos/demos-v1.yaml index b2e4ee0f..d17d97c4 100644 --- a/demos/demos-v1.yaml +++ b/demos/demos-v1.yaml @@ -1,3 +1,4 @@ +--- demos: please-update: description: This version of stackablectl is outdated, please visit https://docs.stackable.tech/stackablectl/stable/installation.html on how to get the latest version diff --git a/demos/demos-v2.yaml b/demos/demos-v2.yaml index 4e04efb6..a31ff1ea 100644 --- a/demos/demos-v2.yaml +++ b/demos/demos-v2.yaml @@ -73,7 +73,7 @@ demos: memory: 42034Mi pvc: 75Gi # 30Gi for Kafka nifi-kafka-druid-water-level-data: - description: Demo ingesting water level data into Kafka using NiFi, streaming it into Druid and creating a Superset dashboard + description: Demo ingesting water level data into Kafka using NiFi, streaming it into Druid and creating a Superset dashboard documentation: https://docs.stackable.tech/stackablectl/stable/demos/nifi-kafka-druid-water-level-data.html stackableStack: nifi-kafka-druid-superset-s3 labels: diff --git a/demos/end-to-end-security/README.md b/demos/end-to-end-security/README.md index eb320555..4d11d17e 100644 --- a/demos/end-to-end-security/README.md +++ b/demos/end-to-end-security/README.md @@ -5,6 +5,7 @@ 3. Optional: Add Database connection 4. Add admin user in Keycloak to all relevant groups (so that he has access to the tables, so he can create datasets, charts and dashboards). 5. `pgdump` the Postgres and update the dump in Git. For that shell into `postgresql-superset-0` and execute + ```sh export PGPASSWORD="$POSTGRES_POSTGRES_PASSWORD" diff --git a/demos/nifi-kafka-druid-earthquake-data/download_earthquake_data.sh b/demos/nifi-kafka-druid-earthquake-data/download_earthquake_data.sh index 17238430..4e413ee2 100755 --- a/demos/nifi-kafka-druid-earthquake-data/download_earthquake_data.sh +++ b/demos/nifi-kafka-druid-earthquake-data/download_earthquake_data.sh @@ -1,3 +1,6 @@ +#!/usr/bin/env bash +set -euo pipefail + # This script is not used for the demo # Its purpose is to document how to retrieve the used earthquake data diff --git a/docs/modules/demos/images/end-to-end-security/README.md b/docs/modules/demos/images/end-to-end-security/README.md index 13024029..86596c11 100644 --- a/docs/modules/demos/images/end-to-end-security/README.md +++ b/docs/modules/demos/images/end-to-end-security/README.md @@ -1,3 +1,5 @@ +# ent-to-end-security + The images are exported from -https://docs.google.com/presentation/d/19h3sBve_dOSgpZ6eTZqmYXxGoiQqXNs1/edit?usp=sharing&ouid=105504333647320477456&rtpof=true&sd=true. + Ask Sebastian for access if needed. diff --git a/docs/modules/demos/images/end-to-end-security/trino-schema.svg b/docs/modules/demos/images/end-to-end-security/trino-schema.svg index 98e2daad..36c10a49 100644 --- a/docs/modules/demos/images/end-to-end-security/trino-schema.svg +++ b/docs/modules/demos/images/end-to-end-security/trino-schema.svg @@ -1 +1 @@ - \ No newline at end of file + diff --git a/docs/modules/demos/pages/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data.adoc b/docs/modules/demos/pages/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data.adoc index 886cff7c..a732b82e 100644 --- a/docs/modules/demos/pages/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data.adoc +++ b/docs/modules/demos/pages/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data.adoc @@ -151,6 +151,10 @@ This is described below. Libraries can be added to a custom *product* image launched by the notebook. Suppose a Spark job is prepared like this: +// TODO (@NickLarsenNZ): Use stackable0.0.0-dev so that the demo is reproducable for the release +// and it will be automatically replaced for the release branch. +// Also update the reference in notebook.ipynb. + [source,python] ---- spark = (SparkSession @@ -172,6 +176,10 @@ spark = (SparkSession It requires a specific Spark image: +// TODO (@NickLarsenNZ): Use stackable0.0.0-dev so that the demo is reproducable for the release +// and it will be automatically replaced for the release branch. +// Also update the reference in notebook.ipynb. + [source,python] ---- .config("spark.kubernetes.container.image", diff --git a/shell.nix b/shell.nix new file mode 100644 index 00000000..63d72e2c --- /dev/null +++ b/shell.nix @@ -0,0 +1,6 @@ +{ pkgs ? import { } }: +pkgs.mkShell { + packages = with pkgs; [ + gettext # envsubst + ]; +} diff --git a/stacks/_templates/keycloak.yaml b/stacks/_templates/keycloak.yaml index 7491b28b..ecc9a9fb 100644 --- a/stacks/_templates/keycloak.yaml +++ b/stacks/_templates/keycloak.yaml @@ -126,14 +126,14 @@ metadata: labels: app: keycloak spec: -# We want a stable Keycloak address that does not change when Keycloak reboots. -# We could simply pick LoadBalancer here, but on-prem clusters often times don't support LBs, -# so the demo would not run on their environments. Additionally, LB addresses often take a while to allocate -# (order of minutes on GCP iirc). So there's no way for us to know whether there's no LB address because it's still -# in progress, or if there's no LB address because it's unsupported. -# -# But we can at least make sure to reconcile the AuthClass once Keycloak restarts. -# We achieve this by letting the keycloak itself propagate it's address instead of a separate Job. + # We want a stable Keycloak address that does not change when Keycloak reboots. + # We could simply pick LoadBalancer here, but on-prem clusters often times don't support LBs, + # so the demo would not run on their environments. Additionally, LB addresses often take a while to allocate + # (order of minutes on GCP iirc). So there's no way for us to know whether there's no LB address because it's still + # in progress, or if there's no LB address because it's unsupported. + # + # But we can at least make sure to reconcile the AuthClass once Keycloak restarts. + # We achieve this by letting the keycloak itself propagate it's address instead of a separate Job. type: NodePort selector: app: keycloak diff --git a/stacks/_templates/prometheus-service-monitor.yaml b/stacks/_templates/prometheus-service-monitor.yaml index 5e470cf1..3d597a7b 100644 --- a/stacks/_templates/prometheus-service-monitor.yaml +++ b/stacks/_templates/prometheus-service-monitor.yaml @@ -1,3 +1,4 @@ +--- apiVersion: monitoring.coreos.com/v1 kind: ServiceMonitor metadata: diff --git a/stacks/_templates/vector-aggregator-discovery.yaml b/stacks/_templates/vector-aggregator-discovery.yaml index daba2720..49aa04bc 100644 --- a/stacks/_templates/vector-aggregator-discovery.yaml +++ b/stacks/_templates/vector-aggregator-discovery.yaml @@ -1,3 +1,4 @@ +--- apiVersion: v1 kind: ConfigMap metadata: diff --git a/stacks/authentication/openldap-tls.yaml b/stacks/authentication/openldap-tls.yaml index 0f61f417..6850fd2e 100644 --- a/stacks/authentication/openldap-tls.yaml +++ b/stacks/authentication/openldap-tls.yaml @@ -93,4 +93,4 @@ spec: port: 1636 targetPort: tls-ldap selector: - app.kubernetes.io/name: openldap \ No newline at end of file + app.kubernetes.io/name: openldap diff --git a/stacks/end-to-end-security/kerberos-secretclass.yaml b/stacks/end-to-end-security/kerberos-secretclass.yaml index 26beb7fd..51a98379 100644 --- a/stacks/end-to-end-security/kerberos-secretclass.yaml +++ b/stacks/end-to-end-security/kerberos-secretclass.yaml @@ -1,4 +1,3 @@ - --- apiVersion: secrets.stackable.tech/v1alpha1 kind: SecretClass diff --git a/stacks/end-to-end-security/superset.yaml b/stacks/end-to-end-security/superset.yaml index 4b213faa..0577245e 100644 --- a/stacks/end-to-end-security/superset.yaml +++ b/stacks/end-to-end-security/superset.yaml @@ -23,7 +23,7 @@ spec: spec: # We need to restore the postgres state before the superset container itself starts some database migrations initContainers: - # The postgres image does not contain curl or wget... + # The postgres image does not contain curl or wget... - name: download-dump image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: diff --git a/stacks/keycloak-opa-poc/policies.yaml b/stacks/keycloak-opa-poc/policies.yaml index c2c0ebec..cfaeb799 100644 --- a/stacks/keycloak-opa-poc/policies.yaml +++ b/stacks/keycloak-opa-poc/policies.yaml @@ -58,4 +58,4 @@ data: # "57d3b407-ecc0-4cc1-aaaf-45a63f43b96b", # "170b4130-ca4d-417b-b229-f2917d5ab3d1" # ] -# } \ No newline at end of file +# } diff --git a/stacks/keycloak-opa-poc/setup-keycloak.yaml b/stacks/keycloak-opa-poc/setup-keycloak.yaml index 62d538a2..f21d64a2 100644 --- a/stacks/keycloak-opa-poc/setup-keycloak.yaml +++ b/stacks/keycloak-opa-poc/setup-keycloak.yaml @@ -1,3 +1,4 @@ +--- apiVersion: v1 kind: Secret metadata: diff --git a/stacks/observability/grafana-admin-credentials.yaml b/stacks/observability/grafana-admin-credentials.yaml index b6bb02b3..c8037bd2 100644 --- a/stacks/observability/grafana-admin-credentials.yaml +++ b/stacks/observability/grafana-admin-credentials.yaml @@ -1,3 +1,4 @@ +--- apiVersion: v1 kind: Secret metadata: diff --git a/stacks/observability/grafana.yaml b/stacks/observability/grafana.yaml index d215406e..aa8f3be8 100644 --- a/stacks/observability/grafana.yaml +++ b/stacks/observability/grafana.yaml @@ -1,5 +1,6 @@ -# https://github.com/grafana/helm-charts/tree/main/charts/grafana +# yamllint disable rule:comments-indentation --- +# https://github.com/grafana/helm-charts/tree/main/charts/grafana releaseName: grafana name: grafana repo: diff --git a/stacks/observability/opentelemetry-collector-deployment.yaml b/stacks/observability/opentelemetry-collector-deployment.yaml index 7e9741ec..c7978492 100644 --- a/stacks/observability/opentelemetry-collector-deployment.yaml +++ b/stacks/observability/opentelemetry-collector-deployment.yaml @@ -1,3 +1,5 @@ +# yamllint disable rule:comments-indentation +--- # The OpenTelemetry Operator does not yet allow setting the service to be a NodePort # See: https://github.com/open-telemetry/opentelemetry-operator/issues/902 apiVersion: opentelemetry.io/v1beta1 @@ -122,7 +124,7 @@ spec: - resourcedetection/env - resource/logs - attributes/logs - exporters: + exporters: - loki # - debug # Set the log level of the opentelemetry-collector application. @@ -141,7 +143,7 @@ spec: cpu: 250m memory: 64Mi volumeMounts: - # We mount a certificate from the secret-operator so that we have the CA + # We mount a certificate from the secret-operator so that we have the CA # cert for exporters to trust what they connect to. - name: tls mountPath: /certs diff --git a/stacks/observability/opentelemetry-collector-sidecar.yaml b/stacks/observability/opentelemetry-collector-sidecar.yaml index 453b2ee1..07c5b964 100644 --- a/stacks/observability/opentelemetry-collector-sidecar.yaml +++ b/stacks/observability/opentelemetry-collector-sidecar.yaml @@ -1,3 +1,5 @@ +# yamllint disable rule:comments-indentation +--- apiVersion: opentelemetry.io/v1beta1 kind: OpenTelemetryCollector metadata: @@ -121,7 +123,7 @@ spec: - resourcedetection/env - resource/logs - attributes/logs - exporters: + exporters: - loki # - debug # Set the log level of the opentelemetry-collector application. @@ -140,7 +142,7 @@ spec: cpu: 250m memory: 64Mi volumeMounts: - # We mount a certificate from the secret-operator so that we have the CA + # We mount a certificate from the secret-operator so that we have the CA # cert for exporters to trust what they connect to. - name: tls mountPath: /certs diff --git a/stacks/signal-processing/jupyterhub.yaml b/stacks/signal-processing/jupyterhub.yaml index a3b745b6..f26e5988 100644 --- a/stacks/signal-processing/jupyterhub.yaml +++ b/stacks/signal-processing/jupyterhub.yaml @@ -30,6 +30,8 @@ options: singleuser: cmd: null image: + # TODO (@NickLarsenNZ): Use a versioned image with stackable0.0.0-dev or stackableXX.X.X so that + # the demo is reproducable for the release and it will be automatically replaced for the release branch. name: docker.stackable.tech/demos/jupyter-pyspark-with-alibi-detect tag: python-3.9 serviceAccountName: spark diff --git a/stacks/signal-processing/nifi.yaml b/stacks/signal-processing/nifi.yaml index 19e88cda..48d7c39d 100644 --- a/stacks/signal-processing/nifi.yaml +++ b/stacks/signal-processing/nifi.yaml @@ -6,6 +6,8 @@ metadata: spec: image: productVersion: 1.27.0 + # TODO (@NickLarsenNZ): Use a versioned image with stackable0.0.0-dev or stackableXX.X.X so that + # the demo is reproducable for the release and it will be automatically replaced for the release branch. custom: docker.stackable.tech/demos/nifi:1.27.0-postgresql clusterConfig: listenerClass: external-unstable diff --git a/stacks/stacks-v1.yaml b/stacks/stacks-v1.yaml index b346c1f7..f8a80050 100644 --- a/stacks/stacks-v1.yaml +++ b/stacks/stacks-v1.yaml @@ -1,3 +1,4 @@ +--- stacks: please-update: description: This version of stackablectl is outdated, please visit https://docs.stackable.tech/stackablectl/stable/installation.html on how to get the latest version diff --git a/stacks/trino-superset-s3/hive-metastore.yaml b/stacks/trino-superset-s3/hive-metastore.yaml index fe2deea9..c8c9fb25 100644 --- a/stacks/trino-superset-s3/hive-metastore.yaml +++ b/stacks/trino-superset-s3/hive-metastore.yaml @@ -25,4 +25,4 @@ metadata: type: Opaque stringData: username: hive - password: hive + password: hive