From 98824b1a5e9af1cc684cc4de4877cf75131102f3 Mon Sep 17 00:00:00 2001 From: dag Date: Fri, 13 Oct 2023 13:28:01 +0200 Subject: [PATCH 1/3] Feature/deploy changes (#482) * gurobi10 and yml changes * ldap compose deployment changes * fix cornflow exposed port * config custom Airflow UI for CornflowEnv * new dashboard alert to welcome users --------- Co-authored-by: Guillermo Gonzalez-Santander --- .gitignore | 5 +- cornflow-server/Dockerfile | 7 +- cornflow-server/airflow_config/Dockerfile | 4 +- .../airflow_config/airflow_local_settings.py | 23 ++ .../airflow_config/logs/README.rst | 10 + .../scripts/solvers/get_gurobi.py | 6 +- docker-compose-cornflow-celery.yml | 264 +++++++++++------- docker-compose-cornflow-ldap.yml | 181 ++++++++---- docker-compose.yml | 165 +++++++---- 9 files changed, 467 insertions(+), 198 deletions(-) create mode 100644 cornflow-server/airflow_config/airflow_local_settings.py create mode 100644 cornflow-server/airflow_config/logs/README.rst diff --git a/.gitignore b/.gitignore index fd7086dce..584d2d0f5 100644 --- a/.gitignore +++ b/.gitignore @@ -169,4 +169,7 @@ cornssh.pub *.db # IO files -*.mps \ No newline at end of file +*.mps + +# airflow logs +cornflow-server/airflow_config/logs \ No newline at end of file diff --git a/cornflow-server/Dockerfile b/cornflow-server/Dockerfile index 6520f93cb..e9bb2b221 100644 --- a/cornflow-server/Dockerfile +++ b/cornflow-server/Dockerfile @@ -2,7 +2,7 @@ # AUTHOR: sistemas@baobabsoluciones.es FROM python:3.10-slim-buster -LABEL maintainer="sistemas@baobabsoluciones" +LABEL maintainer="cornflow@baobabsoluciones" # Never prompt the user for choices on installation/configuration of packages ENV DEBIAN_FRONTEND noninteractive @@ -39,6 +39,11 @@ RUN mkdir -p /usr/src/app/log # create folder for custom ssh keys RUN mkdir /usr/src/app/.ssh +# user cornflow on application +RUN useradd -ms /bin/bash -d /usr/src/app cornflow +RUN chown -R cornflow: /usr/src/app +USER cornflow + EXPOSE 5000 # execute python init script diff --git a/cornflow-server/airflow_config/Dockerfile b/cornflow-server/airflow_config/Dockerfile index e4b1ad305..44239dcba 100644 --- a/cornflow-server/airflow_config/Dockerfile +++ b/cornflow-server/airflow_config/Dockerfile @@ -1,9 +1,9 @@ # VERSION 2.7.1 -# AUTHOR: sistemas@baobabsoluciones.es +# AUTHOR: cornflow@baobabsoluciones.es # DESCRIPTION: Airflow 2.7.1 image personalized for use with Cornflow (from puckel/docker-airflow https://github.com/puckel/docker-airflow by "Puckel_") FROM python:3.10-slim-buster -LABEL maintainer="sistemas@baobabsoluciones" +LABEL maintainer="cornflow@baobabsoluciones" # Never prompt the user for choices on installation/configuration of packages ENV DEBIAN_FRONTEND noninteractive diff --git a/cornflow-server/airflow_config/airflow_local_settings.py b/cornflow-server/airflow_config/airflow_local_settings.py new file mode 100644 index 000000000..daa7c99e4 --- /dev/null +++ b/cornflow-server/airflow_config/airflow_local_settings.py @@ -0,0 +1,23 @@ +# AIRFLOW custom UI colors for CornflowEnv + +STATE_COLORS = { + "deferred": "#7c4fe0", + "failed": "#c9301c", + "queued": "#656d78", + "removed": "#7bdcb5", + "restarting": "#9b51e0", + "running": "#01FF70", + "scheduled": "#fcb900", + "shutdown": "blue", + "skipped": "darkorchid", + "success": "#00d084", + "up_for_reschedule": "#f78da7", + "up_for_retry": "yellow", + "upstream_failed": "#b31e1e", +} + +from airflow.www.utils import UIAlert + +DASHBOARD_UIALERTS = [ + UIAlert("Welcome! This is the backend of your Cornflow environment. Airflowâ„¢ is a platform created by the community to programmatically author, schedule and monitor workflows."), +] \ No newline at end of file diff --git a/cornflow-server/airflow_config/logs/README.rst b/cornflow-server/airflow_config/logs/README.rst new file mode 100644 index 000000000..b61608dfa --- /dev/null +++ b/cornflow-server/airflow_config/logs/README.rst @@ -0,0 +1,10 @@ +Airflow LOGS +-------------- + +Users can specify the directory to place log files in airflow.cfg using base_log_folder. By default, logs are placed in the AIRFLOW_HOME directory. + +The following convention is followed while naming logs: {dag_id}/{task_id}/{execution_date}/{try_number}.log + +In addition, users can supply a remote location to store current logs and backups. + +In the Airflow Web UI, local logs take precedence over remote logs. If local logs can not be found or accessed, the remote logs will be displayed. Note that logs are only sent to remote storage once a task is complete (including failure); In other words, remote logs for running tasks are unavailable. \ No newline at end of file diff --git a/cornflow-server/airflow_config/scripts/solvers/get_gurobi.py b/cornflow-server/airflow_config/scripts/solvers/get_gurobi.py index 7b22e413d..522cb4f43 100644 --- a/cornflow-server/airflow_config/scripts/solvers/get_gurobi.py +++ b/cornflow-server/airflow_config/scripts/solvers/get_gurobi.py @@ -22,10 +22,10 @@ def install(): install_dir = "/usr/local/airflow/gurobi" os.chdir("/usr/local/airflow") subprocess.check_output( - ["wget", "https://packages.gurobi.com/9.5/gurobi9.5.0_linux64.tar.gz"] + ["wget", "https://packages.gurobi.com/10.0/gurobi10.0.1_linux64.tar.gz"] ) - subprocess.check_output(["tar", "-xvf", "gurobi9.5.0_linux64.tar.gz"]) - os.rename("gurobi950", "gurobi") + subprocess.check_output(["tar", "-xvf", "gurobi10.0.1_linux64.tar.gz"]) + os.rename("gurobi1001", "gurobi") uid = pwd.getpwnam("airflow").pw_uid gid = grp.getgrnam("airflow").gr_gid os.chown(install_dir, uid, gid) diff --git a/docker-compose-cornflow-celery.yml b/docker-compose-cornflow-celery.yml index ee6ec2681..8e530ef44 100644 --- a/docker-compose-cornflow-celery.yml +++ b/docker-compose-cornflow-celery.yml @@ -1,136 +1,212 @@ -version: '3' +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL. +# +# WARNING: This configuration is for local development. Do not use it in a production deployment. +# +# This configuration supports basic configuration using environment variables or an .env file +# The following variables are supported: +# +# Feel free to modify this file to suit your needs. +version: '3.8' + +x-airflow-common: + &airflow-common + # In order to add custom dependencies or upgrade provider packages you can use your extended image. + # Comment the image line and uncomment the "build" and "context" lines below, Then run `docker-compose build` to build the images. + image: baobabsoluciones/airflow:latest + platform: linux/amd64 + #build: + #context: ./cornflow-server/airflow_config + environment: + &airflow-common-env + AIRFLOW_USER: admin + AIRFLOW_PWD: admin + AIRFLOW_DB_HOST: airflow_db + AIRFLOW_DB_PORT: 5432 + AIRFLOW_DB_USER: airflow + AIRFLOW_DB_PASSWORD: airflow + AIRFLOW_DB: airflow + AIRFLOW__CORE__FERNET_KEY: '' + EXECUTOR: Celery + AIRFLOW__WEBSERVER__INSTANCE_NAME: CornflowEnv + AIRFLOW__WEBSERVER__NAVBAR_COLOR: '#0693e3' + volumes: + - ./cornflow-dags/DAG:/usr/local/airflow/dags + - ./cornflow-dags/requirements.txt:/requirements.txt + - ./cornflow-server/airflow_config/logs:/usr/local/airflow/logs + - ./cornflow-server/airflow_config/plugins:/usr/local/airflow/plugins + - ./cornflow-server/airflow_config/airflow_local_settings.py:/usr/local/airflow/config/airflow_local_settings.py + depends_on: + &airflow-common-depends-on + redis: + condition: service_healthy + airflow_db: + condition: service_healthy + +x-cornflow-common: + &cornflow-common + # In order to add custom dependencies or upgrade provider packages you can use your extended image. + # Comment the image line and uncomment the "build" and "context" lines below, Then run `docker-compose build` to build the images. + image: baobabsoluciones/cornflow:release-v1.0.7 + platform: linux/amd64 + #build: + #context: ./cornflow-server + environment: + &cornflow-common-env + CORNFLOW_ADMIN_USER: cornflow_admin + CORNFLOW_ADMIN_PWD: Cornflow_admin1234 + CORNFLOW_SERVICE_USER: service_user + CORNFLOW_SERVICE_PWD: Service_user1234 + CORNFLOW_DB_HOST: cornflow_db + CORNFLOW_DB_PORT: 5432 + CORNFLOW_DB_USER: cornflow + CORNFLOW_DB_PASSWORD: cornflow + CORNFLOW_DB: cornflow + depends_on: + &cornflow-common-depends-on + cornflow_db: + condition: service_healthy + webserver: + condition: service_healthy services: cornflow: - build: - context: ./cornflow-server - dockerfile: Dockerfile + <<: *cornflow-common restart: always ports: - 5000:5000 + healthcheck: + test: ["CMD-SHELL", "[ -f /usr/src/app/gunicorn.pid ]"] + interval: 30s + timeout: 30s + retries: 5 environment: - - CORNFLOW_ADMIN_USER=cornflow_admin - - CORNFLOW_ADMIN_PWD=Cornflow_admin1234 - - CORNFLOW_SERVICE_USER=service_user - - CORNFLOW_SERVICE_PWD=Service_user1234 - - CORNFLOW_DB_HOST=cornflow_db - - CORNFLOW_DB_PORT=5432 - - CORNFLOW_DB_USER=cornflow - - CORNFLOW_DB_PASSWORD=cornflow - - CORNFLOW_DB=cornflow + <<: *cornflow-common-env depends_on: - - cornflow_db - - webserver + <<: *cornflow-common-depends-on cornflow_db: - image: postgres + image: postgres:15.4 environment: - POSTGRES_USER=cornflow - POSTGRES_PASSWORD=cornflow - POSTGRES_DB=cornflow + healthcheck: + test: ["CMD", "pg_isready", "-U", "cornflow"] + interval: 10s + retries: 5 + start_period: 5s + restart: always volumes: - postgres_cf_data:/var/lib/postgresql/data/ + + airflow_db: + image: postgres:15.4 + environment: + - POSTGRES_USER=airflow + - POSTGRES_PASSWORD=airflow + - POSTGRES_DB=airflow + healthcheck: + test: ["CMD", "pg_isready", "-U", "airflow"] + interval: 10s + retries: 5 + start_period: 5s + volumes: + - postgres_af_data:/var/lib/postgresql/data/ webserver: - build: - context: ./cornflow-server/airflow_config - dockerfile: Dockerfile - restart: always - volumes: - - ./cornflow-dags/DAG:/usr/local/airflow/dags - - ./cornflow-dags/requirements.txt:/requirements.txt - - ./cornflow-server/airflow_config/plugins:/usr/local/airflow/plugins - environment: - - AIRFLOW_USER=admin - - AIRFLOW_PWD=admin - - AIRFLOW_DB_HOST=airflow_db - - AIRFLOW_DB_PORT=5432 - - AIRFLOW_DB_USER=airflow - - AIRFLOW_DB_PASSWORD=airflow - - AIRFLOW_DB=airflow - - AIRFLOW__CORE__FERNET_KEY=L5Z7WRVv5zaexK0OL3pS2stYUF-gx_UmfDBqWQ4br6Y= - - EXECUTOR=Celery + <<: *airflow-common + command: webserver ports: - 8080:8080 - depends_on: - - airflow_db - - redis - command: webserver healthcheck: test: ["CMD-SHELL", "[ -f /usr/local/airflow/airflow-webserver.pid ]"] interval: 30s timeout: 30s retries: 5 + environment: + <<: *airflow-common-env + restart: always + depends_on: + <<: *airflow-common-depends-on scheduler: - build: - context: ./cornflow-server/airflow_config - dockerfile: Dockerfile + <<: *airflow-common + command: scheduler + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8974/health"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + environment: + <<: *airflow-common-env restart: always depends_on: - - webserver - volumes: - - ./cornflow-dags/DAG:/usr/local/airflow/dags - - ./cornflow-dags/requirements.txt:/requirements.txt - - ./cornflow-server/airflow_config/plugins:/usr/local/airflow/plugins - environment: - - AIRFLOW_USER=admin - - AIRFLOW_PWD=admin - - AIRFLOW_DB_HOST=airflow_db - - AIRFLOW_DB_PORT=5432 - - AIRFLOW_DB_USER=airflow - - AIRFLOW_DB_PASSWORD=airflow - - AIRFLOW_DB=airflow - - AIRFLOW__CORE__FERNET_KEY=L5Z7WRVv5zaexK0OL3pS2stYUF-gx_UmfDBqWQ4br6Y= - - EXECUTOR=Celery - command: scheduler + <<: *airflow-common-depends-on + # You can enable flower by adding "--profile flower" option e.g. docker-compose --profile flower up + # or by explicitly targeted on the command line e.g. docker-compose up flower. + # See: https://docs.docker.com/compose/profiles/ flower: - build: - context: ./cornflow-server/airflow_config - dockerfile: Dockerfile - restart: always - depends_on: - - redis - environment: - - AIRFLOW_DB_HOST=airflow_db - - AIRFLOW_DB_USER=airflow - - AIRFLOW_DB_PASSWORD=airflow - - EXECUTOR=Celery - ports: - - "5555:5555" + <<: *airflow-common command: flower - - airflow_db: - image: postgres + profiles: + - flower + ports: + - "5555:5555" + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:5555/"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s environment: - - POSTGRES_USER=airflow - - POSTGRES_PASSWORD=airflow - - POSTGRES_DB=airflow - volumes: - - postgres_af_data:/var/lib/postgresql/data/ + <<: *airflow-common-env + restart: always + depends_on: + <<: *airflow-common-depends-on redis: - image: redis:5.0.5 + image: redis:latest + expose: + - 6379 + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 30s + retries: 50 + start_period: 30s + restart: always worker: - build: - context: ./cornflow-server/airflow_config - dockerfile: Dockerfile + <<: *airflow-common + command: worker + environment: + <<: *airflow-common-env + # Required to handle warm shutdown of the celery workers properly + # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation + DUMB_INIT_SETSID: "0" restart: always depends_on: - - scheduler - volumes: - - ./cornflow-dags/DAG:/usr/local/airflow/dags - - ./cornflow-dags/requirements.txt:/requirements.txt - - ./cornflow-server/airflow_config/plugins:/usr/local/airflow/plugins - environment: - - AIRFLOW_DB_HOST=airflow_db - - AIRFLOW_DB_USER=airflow - - AIRFLOW_DB_PASSWORD=airflow - - AIRFLOW__CORE__FERNET_KEY=L5Z7WRVv5zaexK0OL3pS2stYUF-gx_UmfDBqWQ4br6Y= - - EXECUTOR=Celery - command: worker + <<: *airflow-common-depends-on volumes: postgres_cf_data: diff --git a/docker-compose-cornflow-ldap.yml b/docker-compose-cornflow-ldap.yml index 79735357e..4457dd588 100644 --- a/docker-compose-cornflow-ldap.yml +++ b/docker-compose-cornflow-ldap.yml @@ -1,79 +1,160 @@ -version: '3' + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Basic Airflow cluster configuration for development purposes with LDAP Auth. +# +# WARNING: This configuration is for local development. Do not use it in a production deployment. +# +# This configuration supports basic configuration using environment variables or an .env file +# The following variables are supported: +# +# Feel free to modify this file to suit your needs. +version: '3.8' + +x-airflow-common: + &airflow-common + # In order to add custom dependencies or upgrade provider packages you can use your extended image. + # Comment the image line and uncomment the "build" and "context" lines below, Then run `docker-compose build` to build the images. + image: baobabsoluciones/airflow:latest + platform: linux/amd64 + #build: + #context: ./cornflow-server/airflow_config + environment: + &airflow-common-env + AIRFLOW_DB_HOST: airflow_db + AIRFLOW_DB_PORT: 5432 + AIRFLOW_DB_USER: airflow + AIRFLOW_DB_PASSWORD: airflow + AIRFLOW_DB: airflow + AIRFLOW__WEBSERVER__INSTANCE_NAME: CornflowEnv + AIRFLOW__WEBSERVER__NAVBAR_COLOR: '#0693e3' + AIRFLOW_LDAP_ENABLE: 'True' + AIRFLOW_LDAP_URI: ldap://openldap:389 + AIRFLOW_LDAP_SEARCH: ou=users,dc=example,dc=org + AIRFLOW_LDAP_BIND_USER: cn=admin,dc=example,dc=org + AIRFLOW_LDAP_BIND_PASSWORD: admin + AIRFLOW_LDAP_UID_FIELD: cn + # cornflow user and password from LDAP credentials file in cornflow-server/airflow_config/ldapbootstrap.ldif + CORNFLOW_SERVICE_USER: cornflow + CORNFLOW_SERVICE_PWD: cornflow1234 + volumes: + - ./cornflow-dags/DAG:/usr/local/airflow/dags + - ./cornflow-dags/requirements.txt:/requirements.txt + - ./cornflow-server/airflow_config/logs:/usr/local/airflow/logs + - ./cornflow-server/airflow_config/plugins:/usr/local/airflow/plugins + - ./cornflow-server/airflow_config/airflow_local_settings.py:/usr/local/airflow/config/airflow_local_settings.py + depends_on: + &airflow-common-depends-on + airflow_db: + condition: service_healthy + +x-cornflow-common: + &cornflow-common + # In order to add custom dependencies or upgrade provider packages you can use your extended image. + # Comment the image line and uncomment the "build" and "context" lines below, Then run `docker-compose build` to build the images. + image: baobabsoluciones/cornflow:release-v1.0.7 + platform: linux/amd64 + #build: + #context: ./cornflow-server + environment: + &cornflow-common-env + CORNFLOW_DB_HOST: cornflow_db + CORNFLOW_DB_PORT: 5432 + CORNFLOW_DB_USER: cornflow + CORNFLOW_DB_PASSWORD: cornflow + CORNFLOW_DB: cornflow + AUTH_TYPE: 2 + LDAP_HOST: ldap://openldap:389 + LDAP_BIND_DN: cn=admin,dc=example,dc=org + LDAP_BIND_PASSWORD: admin + LDAP_SERVICE_BASE: ou=service,dc=example,dc=org + CORNFLOW_SERVICE_USER: cornflow + # airflow user and password from LDAP credentials file in cornflow-server/airflow_config/ldapbootstrap.ldif + AIRFLOW_USER: administrator + AIRFLOW_PWD: administrator1234 + depends_on: + &cornflow-common-depends-on + cornflow_db: + condition: service_healthy + webserver: + condition: service_healthy services: cornflow: - build: - context: ./cornflow-server - dockerfile: Dockerfile + <<: *cornflow-common restart: always ports: - 5000:5000 + healthcheck: + test: ["CMD-SHELL", "[ -f /usr/src/app/gunicorn.pid ]"] + interval: 30s + timeout: 30s + retries: 5 environment: - - CORNFLOW_DB_HOST=cornflow_db - - CORNFLOW_DB_PORT=5432 - - CORNFLOW_DB_USER=cornflow - - CORNFLOW_DB_PASSWORD=cornflow - - CORNFLOW_DB=cornflow - - AUTH_TYPE=2 - - LDAP_HOST=ldap://openldap:389 - - LDAP_BIND_DN=cn=admin,dc=example,dc=org - - LDAP_BIND_PASSWORD=admin - - LDAP_SERVICE_BASE=ou=service,dc=example,dc=org - - CORNFLOW_SERVICE_USER=cornflow + <<: *cornflow-common-env depends_on: - - cornflow_db - - webserver + <<: *cornflow-common-depends-on cornflow_db: - image: postgres + image: postgres:15.4 environment: - POSTGRES_USER=cornflow - POSTGRES_PASSWORD=cornflow - POSTGRES_DB=cornflow + healthcheck: + test: ["CMD", "pg_isready", "-U", "cornflow"] + interval: 10s + retries: 5 + start_period: 5s + restart: always volumes: - postgres_cf_data:/var/lib/postgresql/data/ + + airflow_db: + image: postgres:15.4 + environment: + - POSTGRES_USER=airflow + - POSTGRES_PASSWORD=airflow + - POSTGRES_DB=airflow + healthcheck: + test: ["CMD", "pg_isready", "-U", "airflow"] + interval: 10s + retries: 5 + start_period: 5s + volumes: + - postgres_af_data:/var/lib/postgresql/data/ webserver: - build: - context: ./cornflow-server/airflow_config - dockerfile: Dockerfile - restart: always - volumes: - - ./cornflow-dags/DAG:/usr/local/airflow/dags - - ./cornflow-dags/requirements.txt:/requirements.txt - - ./cornflow-server/airflow_config/plugins:/usr/local/airflow/plugins - environment: - - AIRFLOW_DB_HOST=airflow_db - - AIRFLOW_DB_PORT=5432 - - AIRFLOW_DB_USER=airflow - - AIRFLOW_DB_PASSWORD=airflow - - AIRFLOW_DB=airflow - - AIRFLOW_LDAP_ENABLE=True - - AIRFLOW_LDAP_URI=ldap://openldap:389 - - AIRFLOW_LDAP_SEARCH=ou=users,dc=example,dc=org - - AIRFLOW_LDAP_BIND_USER=cn=admin,dc=example,dc=org - - AIRFLOW_LDAP_BIND_PASSWORD=admin - - AIRFLOW_LDAP_UID_FIELD=cn + <<: *airflow-common + command: webserver ports: - 8080:8080 - depends_on: - - airflow_db - - openldap - command: webserver healthcheck: test: ["CMD-SHELL", "[ -f /usr/local/airflow/airflow-webserver.pid ]"] interval: 30s timeout: 30s retries: 5 - - airflow_db: - image: postgres environment: - - POSTGRES_USER=airflow - - POSTGRES_PASSWORD=airflow - - POSTGRES_DB=airflow - volumes: - - postgres_af_data:/var/lib/postgresql/data/ + <<: *airflow-common-env + restart: always + depends_on: + <<: *airflow-common-depends-on openldap: image: osixia/openldap diff --git a/docker-compose.yml b/docker-compose.yml index 2b4c99695..9308d8226 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,77 +1,148 @@ -version: '3' +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Basic Airflow cluster configuration for development purposes. +# +# WARNING: This configuration is for local development. Do not use it in a production deployment. +# +# This configuration supports basic configuration using environment variables or an .env file +# The following variables are supported: +# +# Feel free to modify this file to suit your needs. +version: '3.8' + +x-airflow-common: + &airflow-common + # In order to add custom dependencies or upgrade provider packages you can use your extended image. + # Comment the image line and uncomment the "build" and "context" lines below, Then run `docker-compose build` to build the images. + image: baobabsoluciones/airflow:latest + platform: linux/amd64 + #build: + #context: ./cornflow-server/airflow_config + environment: + &airflow-common-env + AIRFLOW_USER: admin + AIRFLOW_PWD: admin + AIRFLOW_DB_HOST: airflow_db + AIRFLOW_DB_PORT: 5432 + AIRFLOW_DB_USER: airflow + AIRFLOW_DB_PASSWORD: airflow + AIRFLOW_DB: airflow + AIRFLOW__WEBSERVER__INSTANCE_NAME: CornflowEnv + AIRFLOW__WEBSERVER__NAVBAR_COLOR: '#0693e3' + volumes: + - ./cornflow-dags/DAG:/usr/local/airflow/dags + - ./cornflow-dags/requirements.txt:/requirements.txt + - ./cornflow-server/airflow_config/logs:/usr/local/airflow/logs + - ./cornflow-server/airflow_config/plugins:/usr/local/airflow/plugins + - ./cornflow-server/airflow_config/airflow_local_settings.py:/usr/local/airflow/config/airflow_local_settings.py + depends_on: + &airflow-common-depends-on + airflow_db: + condition: service_healthy + +x-cornflow-common: + &cornflow-common + # In order to add custom dependencies or upgrade provider packages you can use your extended image. + # Comment the image line and uncomment the "build" and "context" lines below, Then run `docker-compose build` to build the images. + image: baobabsoluciones/cornflow:release-v1.0.7 + platform: linux/amd64 + #build: + #context: ./cornflow-server + environment: + &cornflow-common-env + CORNFLOW_ADMIN_USER: cornflow_admin + CORNFLOW_ADMIN_PWD: Cornflow_admin1234 + CORNFLOW_SERVICE_USER: service_user + CORNFLOW_SERVICE_PWD: Service_user1234 + CORNFLOW_DB_HOST: cornflow_db + CORNFLOW_DB_PORT: 5432 + CORNFLOW_DB_USER: cornflow + CORNFLOW_DB_PASSWORD: cornflow + CORNFLOW_DB: cornflow + depends_on: + &cornflow-common-depends-on + cornflow_db: + condition: service_healthy + webserver: + condition: service_healthy services: cornflow: - build: - context: ./cornflow-server - dockerfile: Dockerfile + <<: *cornflow-common restart: always ports: - 5000:5000 + healthcheck: + test: ["CMD-SHELL", "[ -f /usr/src/app/gunicorn.pid ]"] + interval: 30s + timeout: 30s + retries: 5 environment: - - CORNFLOW_ADMIN_USER=cornflow_admin - - CORNFLOW_ADMIN_PWD=Cornflow_admin1234 - - CORNFLOW_SERVICE_USER=service_user - - CORNFLOW_SERVICE_PWD=Service_user1234 - - CORNFLOW_DB_HOST=cornflow_db - - CORNFLOW_DB_PORT=5432 - - CORNFLOW_DB_USER=cornflow - - CORNFLOW_DB_PASSWORD=cornflow - - CORNFLOW_DB=cornflow - - CF_ALARMS_ENDPOINT=0 + <<: *cornflow-common-env depends_on: - - cornflow_db - - webserver + <<: *cornflow-common-depends-on cornflow_db: - image: postgres + image: postgres:15.4 environment: - POSTGRES_USER=cornflow - POSTGRES_PASSWORD=cornflow - POSTGRES_DB=cornflow + healthcheck: + test: ["CMD", "pg_isready", "-U", "cornflow"] + interval: 10s + retries: 5 + start_period: 5s + restart: always volumes: - postgres_cf_data:/var/lib/postgresql/data/ + + airflow_db: + image: postgres:15.4 + environment: + - POSTGRES_USER=airflow + - POSTGRES_PASSWORD=airflow + - POSTGRES_DB=airflow + healthcheck: + test: ["CMD", "pg_isready", "-U", "airflow"] + interval: 10s + retries: 5 + start_period: 5s + volumes: + - postgres_af_data:/var/lib/postgresql/data/ webserver: - build: - context: ./cornflow-server/airflow_config - dockerfile: Dockerfile - restart: always - volumes: - - ./cornflow-dags/DAG:/usr/local/airflow/dags - - ./cornflow-dags/requirements.txt:/requirements.txt - - ./cornflow-server/airflow_config/plugins:/usr/local/airflow/plugins - environment: - - AIRFLOW_USER=admin - - AIRFLOW_PWD=admin - - CORNFLOW_SERVICE_USER=service_user - - CORNFLOW_SERVICE_PWD=Service_user1234 - - AIRFLOW_DB_HOST=airflow_db - - AIRFLOW_DB_PORT=5432 - - AIRFLOW_DB_USER=airflow - - AIRFLOW_DB_PASSWORD=airflow - - AIRFLOW_DB=airflow + <<: *airflow-common + command: webserver ports: - 8080:8080 - depends_on: - - airflow_db - command: webserver healthcheck: test: ["CMD-SHELL", "[ -f /usr/local/airflow/airflow-webserver.pid ]"] interval: 30s timeout: 30s retries: 5 - - airflow_db: - image: postgres environment: - - POSTGRES_USER=airflow - - POSTGRES_PASSWORD=airflow - - POSTGRES_DB=airflow - volumes: - - postgres_af_data:/var/lib/postgresql/data/ + <<: *airflow-common-env + restart: always + depends_on: + <<: *airflow-common-depends-on volumes: postgres_cf_data: postgres_af_data: - From 17efc14f7c29c86fafc38ef7fc12e1cc74e5c14a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Guillermo=20Gonz=C3=A1lez-Santander=20de=20la=20Cruz?= Date: Fri, 13 Oct 2023 13:42:51 +0200 Subject: [PATCH 2/3] Now responses go back in the intended order instead of in alphabetical order (#485) --- cornflow-server/cornflow/app.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cornflow-server/cornflow/app.py b/cornflow-server/cornflow/app.py index 613d9e2a2..314a7627b 100644 --- a/cornflow-server/cornflow/app.py +++ b/cornflow-server/cornflow/app.py @@ -49,6 +49,7 @@ def create_app(env_name="development", dataconn=None): dictConfig(log_config(app_config[env_name].LOG_LEVEL)) app = Flask(__name__) + app.json.sort_keys = False app.logger.setLevel(app_config[env_name].LOG_LEVEL) app.config.from_object(app_config[env_name]) From 7c4dc95f770d2fff639cbd64fe7f971b3c8ac071 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Guillermo=20Gonz=C3=A1lez-Santander=20de=20la=20Cruz?= Date: Fri, 13 Oct 2023 16:01:07 +0200 Subject: [PATCH 3/3] Airflow variables (#486) * Changed the way we invoke the connection uri Changed the variable that setups the environment name * Updated unit test --- libs/client/cornflow_client/airflow/dag_utilities.py | 4 ++-- libs/client/cornflow_client/tests/unit/test_dag_utilities.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/libs/client/cornflow_client/airflow/dag_utilities.py b/libs/client/cornflow_client/airflow/dag_utilities.py index 6e5a4ae5f..741b148e6 100644 --- a/libs/client/cornflow_client/airflow/dag_utilities.py +++ b/libs/client/cornflow_client/airflow/dag_utilities.py @@ -78,7 +78,7 @@ def connect_to_cornflow(secrets): """ # This secret comes from airflow configuration print("Getting connection information from ENV VAR=CF_URI") - uri = secrets.get_conn_uri("CF_URI") + uri = secrets.get_conn_value("CF_URI") conn = urlparse(uri) scheme = conn.scheme if scheme == "cornflow": @@ -326,7 +326,7 @@ def callback_email(context): notification_email = EnvironmentVariablesBackend().get_variable( "NOTIFICATION_EMAIL" ) - environment_name = EnvironmentVariablesBackend().get_variable("ENVIRONMENT_NAME") + environment_name = os.getenv("AIRFLOW__WEBSERVER__INSTANCE_NAME", "CornflowEnv") title = f"Airflow. {environment_name} ({environment}). DAG/task error: {context['dag'].dag_id}/{context['ti'].task_id} Failed" body = f""" diff --git a/libs/client/cornflow_client/tests/unit/test_dag_utilities.py b/libs/client/cornflow_client/tests/unit/test_dag_utilities.py index fdaa33abb..6469dd850 100644 --- a/libs/client/cornflow_client/tests/unit/test_dag_utilities.py +++ b/libs/client/cornflow_client/tests/unit/test_dag_utilities.py @@ -31,8 +31,8 @@ def test_env_connection_vars(self, CornFlow): ] client_instance = CornFlow.return_value client_instance.login.return_value = "" - for (conn_str, user_info, url) in conn_uris: - secrets.get_conn_uri.return_value = conn_str + for conn_str, user_info, url in conn_uris: + secrets.get_conn_value.return_value = conn_str du.connect_to_cornflow(secrets) client_instance.login.assert_called_with( username=user_info[0], pwd=user_info[1]