diff --git a/.circleci/config.yml b/.circleci/config.yml index 4e1c4d4105..519f7caf58 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -8,7 +8,7 @@ jobs: # 'machine' executor runs Unit tests ~x1.5 faster, comparing to 'docker' executor # but the fastest is still ~x1.5-2 slower, comparing to Travis machine: true - parallelism: 4 + parallelism: 3 working_directory: ~/st2 steps: - checkout @@ -43,8 +43,8 @@ jobs: # Run st2 Integration tests integration: docker: - - image: circleci/python:3.6 - - image: mongo:4.0 + - image: circleci/python:3.8 + - image: mongo:4.4 - image: rabbitmq:3 working_directory: ~/st2 steps: @@ -57,8 +57,9 @@ jobs: name: Install Mongo Shell command: | set -x - sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 - echo "deb http://repo.mongodb.org/apt/debian jessie/mongodb-org/4.0 main" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.0.list + sudo apt-get -qq -y install gnupg curl + curl -fsSL https://www.mongodb.org/static/pgp/server-4.4.asc | sudo gpg --dearmor -o /etc/apt/trusted.gpg.d/mongodb-server-4.4.gpg + echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/4.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.4.list sudo apt-get -qq -y update sudo apt-get -qq -y install mongodb-org-shell - run: @@ -79,8 +80,8 @@ jobs: # Run st2 Lint Checks lint: docker: - - image: circleci/python:3.6 - - image: mongo:4.0 + - image: circleci/python:3.8 + - image: mongo:4.4 - image: rabbitmq:3 working_directory: ~/st2 steps: @@ -107,16 +108,16 @@ jobs: # Build & Test st2 packages packages: - parallelism: 4 + parallelism: 3 # 4CPUs & 8GB RAM CircleCI machine # sadly, it doesn't work with 'setup_remote_docker' resource_class: large docker: # The primary container is an instance of the first list image listed. Your build commands run in this container. - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/st2 environment: - - DISTROS: "bionic focal el7 el8" + - DISTROS: "focal el8 el9" - ST2_PACKAGES_REPO: https://github.com/StackStorm/st2-packages - ST2_PACKAGES: "st2" - ST2_CHECKOUT: 0 @@ -124,25 +125,40 @@ jobs: - BASH_ENV: ~/.buildenv steps: - checkout + - run: + name: Install latest Docker Compose V2 + command: | + set -x + export CODENAME=$(source /etc/os-release && echo "$VERSION_CODENAME") + export DISTRO=$(source /etc/os-release && echo "$ID") + export ARCH=$(dpkg --print-architecture) + # get gpg key for download.docker + curl -fsSL https://download.docker.com/linux/${DISTRO}/gpg | sudo gpg --dearmor -o /etc/apt/trusted.gpg.d/download.docker.gpg + # set source list + sudo tee <<<"deb [arch=${ARCH}] https://download.docker.com/linux/${DISTRO} ${CODENAME} stable" /etc/apt/sources.list.d/download.docker.list + # update package list + sudo apt update + # install docker CLI and Docker Compose v2 + sudo apt install docker-ce-cli docker-compose-plugin - setup_remote_docker: reusable: true # default - false exclusive: true # default - true - # Temporary workaround for Circle CI issue - # https://discuss.circleci.com/t/setup-remote-docker-connection-failures/26434 - version: 18.05.0-ce + version: docker24 - run: name: Docker version command: | set -x docker --version - docker-compose --version + docker compose version - run: name: Download st2-packages repository command: | set -x + PIP_VERSION=$(grep ^PIP_VERSION Makefile) git clone ${ST2_PACKAGES_REPO} ~/st2-packages cd ~/st2-packages git checkout ${CIRCLE_BRANCH} || true + sed -i -e "s/^PIP_VERSION .*$/${PIP_VERSION}/" packages/st2/debian/rules - run: name: Initialize packages Build Environment command: | @@ -169,14 +185,14 @@ jobs: name: Copy st2-packages files to build containers command: | # creating dummy container which will hold a volume with data files - docker create -v /root/st2-packages -v ${ST2_GITDIR} -v /root/build -v /var/log/st2 -v /root/.cache/pip -v /tmp/wheelhouse --name st2-packages-vol alpine:3.4 /bin/true + docker create -v /root/st2-packages -v ${ST2_GITDIR} -v /root/build -v /var/log/st2 -v /root/.cache/pip -v /tmp/wheelhouse --name st2-packages-vol alpine:3.12 /bin/true # copy st2-packages data files into this volume docker cp ~/st2-packages st2-packages-vol:/root # copy st2 source files into this volume docker cp . st2-packages-vol:${ST2_GITDIR} - run: name: Pull dependent Docker Images - command: .circle/docker-compose2.sh pull ${DISTRO} + command: .circle/docker-compose2.sh pull ${DISTRO} || .circle/docker-compose2.sh pull ${DISTRO} working_directory: ~/st2-packages - run: name: Build the ${DISTRO} Packages @@ -186,14 +202,6 @@ jobs: mkdir -p ~/st2/packages/${DISTRO}/log/ docker cp st2-packages-vol:/root/build/. ~/st2/packages/${DISTRO} working_directory: ~/st2-packages -# # TODO: It works! (~0.5-1min speed-up) Enable CircleCI2.0 cache for pip and wheelhouse later -# - run: -# name: Build the ${DISTRO} Packages 2nd time (compare with pip/wheelhouse cached) -# command: | -# .circle/docker-compose2.sh build ${DISTRO} -# # Once build container finishes we can copy packages directly from it -# docker cp st2-packages-vol:/root/build /tmp/st2-packages -# working_directory: ~/st2-packages - run: name: Test the Packages command: .circle/docker-compose2.sh test ${DISTRO} @@ -229,10 +237,10 @@ jobs: deploy: docker: # The primary container is an instance of the first list image listed. Your build commands run in this container. - - image: circleci/ruby:2.4 + - image: circleci/ruby:2.7 working_directory: /tmp/deploy environment: - - DISTROS: "bionic focal el7 el8" + - DISTROS: "focal el8 el9" steps: - attach_workspace: at: . diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..c79b9dd172 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,8 @@ +# See: https://www.mankier.com/5/gitattributes + +# lockfile merge conflicts: do not manually merge. +# The "-merge" makes git leave the current branch's lockfile as-is, like a binary file. +# To resolve the conflict, resolve any conflicts in requirements files, +# and then regenerste the lockfile with (resolve names are 'st2', 'black', etc): +# pants generate-lockfiles --resolve= +/lockfiles/*.lock -merge diff --git a/.github/actions/apt-packages/action.yaml b/.github/actions/apt-packages/action.yaml new file mode 100644 index 0000000000..db0695a9bc --- /dev/null +++ b/.github/actions/apt-packages/action.yaml @@ -0,0 +1,33 @@ +--- +name: Cache and Install APT Dependencies +description: + Light wrapper around the actions/cache action and our script + to maintain the input vars in only one place for all workflows. + +runs: + using: "composite" + steps: + - name: Cache APT Dependencies + id: cache-apt-deps + uses: actions/cache@v4 + with: + path: | + ~/apt_cache + key: ${{ runner.os }}-v8-apt-${{ hashFiles('scripts/github/apt-packages.txt') }} + restore-keys: | + ${{ runner.os }}-v8-apt- + + - name: Install APT Depedencies + shell: bash + env: + CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} + run: | + # install dev dependencies for Python YAML and LDAP packages + # https://github.com/StackStorm/st2-auth-ldap + ./scripts/github/install-apt-packages-use-cache.sh + + - name: Install Mongo Shell + shell: bash + run: | + # Required as part of switch to GHA image ubuntu-22.04 + ./scripts/github/install-mongosh.sh diff --git a/.github/actions/init-pants/action.yaml b/.github/actions/init-pants/action.yaml new file mode 100644 index 0000000000..a1735b2916 --- /dev/null +++ b/.github/actions/init-pants/action.yaml @@ -0,0 +1,38 @@ +--- +name: Initialize Pants and its GHA caches +description: + Light wrapper around the pantsbuild/actions/init-pants action + to maintain the input vars in only one place for all workflows. + +inputs: + gha-cache-key: + description: Qualify all cache keys with this string. Useful for invalidating everything. + required: true + +runs: + using: "composite" + steps: + - name: Initialize Pants and its GHA caches + uses: pantsbuild/actions/init-pants@v9 + # This action adds an env var to make pants use both pants.ci.toml & pants.toml. + # This action also creates 3 GHA caches (1 is optional). + # - `pants-setup` has the bootsrapped pants install + # - `pants-named-caches` has pip/wheel and PEX caches + # - `pants-lmdb-store` has the fine-grained process cache. + # If we ever use a remote cache, then we can drop this. + # Otherwise, we may need an additional workflow or job to delete old caches + # if they are not expiring fast enough, and we hit the GHA 10GB per repo max. + with: + base-branch: master + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: ${{ inputs.gha-cache-key }} + # This hash should include all of our lockfiles so that the pip/pex caches + # get invalidated on any transitive dependency update. + named-caches-hash: ${{ hashFiles('lockfiles/*.lock') }} + # enable the optional lmdb_store cache since we can't rely on remote caching yet. + cache-lmdb-store: 'true' + # Try the experimental support for using GHA cache as a pants remote cache. + experimental-remote-cache-via-gha: 'true' + # install whatever version of python we need for our in-repo pants-plugins + setup-python-for-plugins: 'true' + diff --git a/.github/actions/setup-python/action.yaml b/.github/actions/setup-python/action.yaml new file mode 100644 index 0000000000..551cf6736d --- /dev/null +++ b/.github/actions/setup-python/action.yaml @@ -0,0 +1,34 @@ +--- +name: Install Python and Cache Deps +description: + Light wrapper around the actions/setup-python and actions/cache actions + to maintain the input vars in only one place for all workflows. + +inputs: + python-version: + description: Which version of python to install. + required: true + +runs: + using: "composite" + steps: + - name: 'Set up Python (${{ inputs.python-version }})' + uses: actions/setup-python@v5 + with: + python-version: '${{ inputs.python-version }}' + + - name: Cache Python Dependencies + uses: actions/cache@v4 + with: + path: | + ~/.cache/pip + virtualenv + ~/virtualenv + # TODO: maybe make the virtualenv a partial cache to exclude st2*? + # !virtualenv/lib/python*/site-packages/st2* + # !virtualenv/bin/st2* + key: ${{ runner.os }}-v5-python-${{ inputs.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} + # Don't use alternative key as if requirements.txt has altered we + # don't want to retrieve previous cache + #restore-keys: | + # ${{ runner.os }}-v5-python-${{ inputs.python }}- diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml new file mode 100644 index 0000000000..23ae018965 --- /dev/null +++ b/.github/workflows/checks.yaml @@ -0,0 +1,25 @@ +name: Checks + +on: + pull_request: + types: [assigned, opened, synchronize, reopened, labeled, unlabeled] + branches: + - master + - v[0-9]+.[0-9]+ + +jobs: + # Changelog checker will verify if CHANGELOG.rst was updated for every PR + # See: https://keepachangelog.com/en/1.0.0/ + changelog-checker: + name: Add CHANGELOG.rst + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - name: Changelog check + # https://github.com/marketplace/actions/changelog-checker + uses: Zomzog/changelog-checker@v1.2.0 + with: + fileName: CHANGELOG.rst + checkNotification: Simple + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ec135060be..7333869040 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,7 +11,7 @@ on: # also version tags, which include bugfix releases (eg: v3.4.0) - v[0-9]+.[0-9]+.[0-9]+ pull_request: - type: [opened, reopened, edited] + types: [opened, reopened, synchronize] branches: # Only for PRs targeting those branches - master @@ -21,11 +21,14 @@ on: - cron: '0 0 * * *' jobs: + # TODO: Fix the required checks! + # When the pre_job triggers and skips builds, it prevents merging the PR because + # the required checks are reported as skipped instead of passed. # Special job which automatically cancels old runs for the same branch, prevents runs for the # same file set which has already passed, etc. pre_job: name: Skip Duplicate Jobs Pre Job - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 outputs: should_skip: ${{ steps.skip_check.outputs.should_skip }} steps: @@ -40,9 +43,9 @@ jobs: needs: pre_job # NOTE: We always want to run job on master since we run some additional checks there (code # coverage, etc) - if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-latest + # if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} + name: '${{ matrix.make.name }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: @@ -50,67 +53,32 @@ jobs: # each job runs subset of tests. # NOTE: We need to use full Python version as part of Python deps cache key otherwise # setup virtualenv step will fail. - include: + python: + - {version-short: '3.8', version: '3.8.12'} + - {version-short: '3.9', version: '3.9.14'} + - {version-short: '3.10', version: '3.10.15'} + - {version-short: '3.11', version: '3.11.10'} + make: - name: 'Lint Checks (black, flake8, etc.)' task: 'ci-checks' - python-version-short: '3.6' - python-version: '3.6.13' - name: 'Compile (pip deps, pylint, etc.)' task: 'ci-compile' - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Lint Checks (black, flake8, etc.)' - task: 'ci-checks' - python-version-short: '3.8' - python-version: '3.8.10' - - name: 'Compile (pip deps, pylint, etc.)' - task: 'ci-compile' - python-version-short: '3.8' - python-version: '3.8.10' - env: - TASK: '${{ matrix.task }}' + TASK: '${{ matrix.make.task }}' COLUMNS: '120' - PYLINT_CONCURRENCY: '4' + PYLINT_CONCURRENCY: '6' steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 - with: - python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v2 + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v3-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt') }} - restore-keys: | - ${{ runner.os }}-v2-python-${{ matrix.python }}- - - name: Cache APT Dependencies - id: cache-apt-deps - uses: actions/cache@v2 - with: - path: | - ~/apt_cache - key: ${{ runner.os }}-apt-v7-${{ hashFiles('scripts/github/apt-packages.txt') }} - restore-keys: | - ${{ runner.os }}-apt-v7- - - name: Install APT Depedencies - env: - CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - # install dev dependencies for Python YAML and LDAP packages - # https://github.com/StackStorm/st2-auth-ldap - ./scripts/github/install-apt-packages-use-cache.sh + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -131,54 +99,188 @@ jobs: run: | ./scripts/ci/run-nightly-make-task-if-exists.sh "${TASK}" + self-check: + needs: pre_job + # FIXME: dropping the repetition in this name requires changing required checks on GitHub + name: 'Self-check on Python ${{ matrix.python.version-short }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + python: + - {version-short: '3.8', version: '3.8.12'} + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + rabbitmq: + image: rabbitmq:3.8-management + options: >- + --name rabbitmq + ports: + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + env: + # CI st2.conf (with ST2_CI_USER user instead of stanley) + ST2_CONF: 'conf/st2.ci.conf' + + # Name of the user who is running the CI (on GitHub Actions this is 'runner') + ST2_CI_USER: 'runner' + + # GitHub is juggling how to set vars for multiple shells. Protect our PATH assumptions. + PATH: /home/runner/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + + # Space separated list of tests to be skipped if the self-check is running in GitHub Actions + TESTS_TO_SKIP: "tests.test_quickstart_rules tests.test_run_pack_tests_tool" + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Custom Environment Setup + run: | + ./scripts/github/setup-environment.sh + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python + with: + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + - name: Install virtualenv + run: | + ./scripts/github/install-virtualenv.sh + - name: Install requirements + run: | + ./scripts/ci/install-requirements.sh + - name: Setup Tests + run: | + # prep a ci-specific dev conf file that uses runner instead of stanley + # this user is the username of the user in GitHub actions, used for SSH, etc during + # integration tests (important) + cp conf/st2.dev.conf "${ST2_CONF}" + sed -i -e "s,/home/vagrant/.ssh/stanley_rsa,/home/stanley/.ssh/stanley_rsa," "${ST2_CONF}" + + sudo -E ./scripts/ci/add-itest-user-key.sh + - name: Permissions Workaround + run: | + sudo ST2_CI_REPO_PATH="${ST2_CI_REPO_PATH}" scripts/ci/permissions-workaround.sh + - name: Reconfigure RabbitMQ + # bitnami image allows (see bitnami/rabbitmq readme): + # Here we're copying a rabbitmq.config file which won't do anything. + # We need to switch to custom.conf or advanced.config. + timeout-minutes: 2 # may die if rabbitmq fails to start + run: | + ./scripts/github/configure-rabbitmq.sh + - name: Print versions + run: | + ./scripts/ci/print-versions.sh + - name: make + timeout-minutes: 14 # may die if rabbitmq fails to start + # use: script -e -c to print colors + run: | + script -e -c "make .ci-prepare-integration" && exit 0 + - name: Extend the path for upcoming tasks + # pants uses PEP 660 editable wheels to add our code to the virtualenv. + # But PEP 660 editable wheels do not include 'scripts'. + # https://peps.python.org/pep-0660/#limitations + # So, we need to include each bin dir in PATH instead of virtualenv/bin. + run: | + for component_bin in ${GITHUB_WORKSPACE}/st2*/bin; do + echo ${component_bin} | tee -a $GITHUB_PATH + done + echo ${GITHUB_WORKSPACE}/virtualenv/bin | tee -a $GITHUB_PATH + - name: Create symlinks to find the binaries when running st2 actions + # st2 is actually a console_script entry point, not just a 'script' + # so it IS included in the virtualenv. But, st2-run-pack-tests might not be included. + run: | + ln -s ${GITHUB_WORKSPACE}/virtualenv/bin/st2 /usr/local/bin/st2 + ln -s ${GITHUB_WORKSPACE}/st2common/bin/st2-run-pack-tests /usr/local/bin/st2-run-pack-tests + - name: Install st2client + timeout-minutes: 5 + run: | + cd ./st2client + pip3 install --upgrade pip + python3 setup.py develop + - name: Run self-verification script + env: + ST2_CONF: /home/runner/work/st2/st2/conf/st2.ci.conf + run: | + sudo -E ST2_AUTH_TOKEN=$(st2 auth testu -p 'testp' -t) PATH=${PATH} st2common/bin/st2-self-check + - name: Compress Service Logs Before upload + if: ${{ failure() }} + run: | + ./tools/launchdev.sh stop # stop st2 before collecting logs + tar cvzpf logs.tar.gz logs/* + - name: Upload StackStorm services Logs + if: ${{ failure() }} + uses: actions/upload-artifact@v4 + with: + name: logs-py${{ matrix.python.version }} + path: logs.tar.gz + retention-days: 7 + unit-tests: needs: pre_job # NOTE: We always want to run job on master since we run some additional checks there (code # coverage, etc) - if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-latest + # NB: disabled. See TODO above pre_job + # if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} + name: '${{ matrix.make.name }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: # NOTE: To speed the CI run, we split unit and integration tests into multiple jobs where # each job runs subset of tests. - include: + python: + - {version-short: '3.8', version: '3.8.12'} + - {version-short: '3.9', version: '3.9.14'} + - {version-short: '3.10', version: '3.10.15'} + - {version-short: '3.11', version: '3.11.10'} + make: - name: 'Unit Tests (chunk 1)' task: 'ci-unit' - nosetests_node_total: 2 - nosetests_node_index: 0 - python-version-short: '3.6' - python-version: '3.6.13' + shard: {k: 0, n: 2} - name: 'Unit Tests (chunk 2)' task: 'ci-unit' - nosetests_node_total: 2 - nosetests_node_index: 1 - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Unit Tests (chunk 1)' - task: 'ci-unit' - nosetests_node_total: 2 - nosetests_node_index: 0 - python-version-short: '3.8' - python-version: '3.8.10' - - name: 'Unit Tests (chunk 2)' - task: 'ci-unit' - nosetests_node_total: 2 - nosetests_node_index: 1 - python-version-short: '3.8' - python-version: '3.8.10' + shard: {k: 1, n: 2} + # This job is slow so we only run in on a daily basis # - name: 'Micro Benchmarks' # task: 'micro-benchmarks' - # python-version: '3.6.13' - # nosetests_node_total: 1 - # nosetests_node_ index: 0 + # shard: {k: 0, n: 1} services: mongo: - image: mongo:4.4 + image: mongo:7.0 ports: - 27017:27017 + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + rabbitmq: image: rabbitmq:3.8-management @@ -190,10 +292,10 @@ jobs: - 15672:15672/tcp # Management: HTTP, CLI env: - TASK: '${{ matrix.task }}' - - NODE_TOTAL: '${{ matrix.nosetests_node_total }}' - NODE_INDEX: '${{ matrix.nosetests_node_index }}' + TASK: '${{ matrix.make.task }}' + PYTHON_VERSION_SHORT: '${{ matrix.python.version-short }}' + NODE_TOTAL: '${{ matrix.make.shard.n }}' + NODE_INDEX: '${{ matrix.make.shard.k }}' # We need to explicitly specify terminal width otherwise some CLI tests fail on container # environments where small terminal size is used. @@ -215,43 +317,16 @@ jobs: PATH: /home/runner/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python with: - python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v2 - with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v3-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt') }} - restore-keys: | - ${{ runner.os }}-python-${{ matrix.python }}- - - name: Cache APT Dependencies - id: cache-apt-deps - uses: actions/cache@v2 - with: - path: | - ~/apt_cache - key: ${{ runner.os }}-apt-v5-${{ hashFiles('scripts/github/apt-packages.txt') }} - restore-keys: | - ${{ runner.os }}-apt-v5- - - name: Install APT Depedencies - env: - CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - # install dev dependencies for Python YAML and LDAP packages - # https://github.com/StackStorm/st2-auth-ldap - ./scripts/github/install-apt-packages-use-cache.sh + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -296,64 +371,44 @@ jobs: ./scripts/ci/run-nightly-make-task-if-exists.sh "${TASK}" - name: Codecov # NOTE: We only generate and submit coverage report for master and version branches and only when the build succeeds (default on GitHub Actions, this was not the case on Travis so we had to explicitly check success) - if: "${{ success() && (env.ENABLE_COVERAGE == 'yes') }}" + if: "${{ success() && (env.ENABLE_COVERAGE == 'yes') && (env.PYTHON_VERSION_SHORT == '3.8')}}" run: | ./scripts/ci/submit-codecov-coverage.sh + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} integration-tests: needs: pre_job # NOTE: We always want to run job on master since we run some additional checks there (code # coverage, etc) - if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-latest + # if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} + name: '${{ matrix.make.name }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: # NOTE: To speed the CI run, we split unit and integration tests into multiple jobs where # each job runs subset of tests. - include: + python: + - {version-short: '3.8', version: '3.8.12'} + - {version-short: '3.9', version: '3.9.14'} + - {version-short: '3.10', version: '3.10.15'} + - {version-short: '3.11', version: '3.11.10'} + make: # We run pack tests here since they rely on some integration tests set # up (aka stanley user being present, etc.) - name: 'Pack Tests' task: 'ci-packs-tests' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Integration Tests (chunk 1)' - task: 'ci-integration' - nosetests_node_total: 2 - nosetests_node_index: 0 - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Integration Tests (chunk 2)' - task: 'ci-integration' - nosetests_node_total: 2 - nosetests_node_index: 1 - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Pack Tests' - task: 'ci-packs-tests' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.8' - python-version: '3.8.10' + shard: {k: 0, n: 1} - name: 'Integration Tests (chunk 1)' task: 'ci-integration' - nosetests_node_total: 2 - nosetests_node_index: 0 - python-version-short: '3.8' - python-version: '3.8.10' + shard: {k: 0, n: 2} - name: 'Integration Tests (chunk 2)' task: 'ci-integration' - nosetests_node_total: 2 - nosetests_node_index: 1 - python-version-short: '3.8' - python-version: '3.8.10' + shard: {k: 1, n: 2} services: mongo: - image: mongo:4.4 + image: mongo:7.0 ports: - 27017:27017 @@ -388,27 +443,24 @@ jobs: #- 4369:4369/tcp # epmd # - # Used for the coordination backend for integration tests - # NOTE: To speed things up, we only start redis for integration tests - # where it's needed - # redis: - # # Docker Hub image - # image: redis - # # Set health checks to wait until redis has started - # options: >- - # --name "redis" - # --health-cmd "redis-cli ping" - # --health-interval 10s - # --health-timeout 5s - # --health-retries 5 - # ports: - # - 6379:6379/tcp + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp env: - TASK: '${{ matrix.task }}' - - NODE_TOTAL: '${{ matrix.nosetests_node_total }}' - NODE_INDEX: '${{ matrix.nosetests_node_index }}' + TASK: '${{ matrix.make.task }}' + PYTHON_VERSION_SHORT: '${{ matrix.python.version-short }}' + NODE_TOTAL: '${{ matrix.make.shard.n }}' + NODE_INDEX: '${{ matrix.make.shard.k }}' # We need to explicitly specify terminal width otherwise some CLI tests fail on container # environments where small terminal size is used. @@ -428,45 +480,19 @@ jobs: # GitHub is juggling how to set vars for multiple shells. Protect our PATH assumptions. PATH: /home/runner/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 - with: - python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v2 + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v3-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt') }} - restore-keys: | - ${{ runner.os }}-python-${{ matrix.python }}- - - name: Cache APT Dependencies - id: cache-apt-deps - uses: actions/cache@v2 - with: - path: | - ~/apt_cache - key: ${{ runner.os }}-apt-v5-${{ hashFiles('scripts/github/apt-packages.txt') }} - restore-keys: | - ${{ runner.os }}-apt-v5- - - name: Install APT Depedencies - env: - CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - # install dev dependencies for Python YAML and LDAP packages - # https://github.com/StackStorm/st2-auth-ldap - ./scripts/github/install-apt-packages-use-cache.sh + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -481,11 +507,6 @@ jobs: cp conf/st2.dev.conf "${ST2_CONF}" ; sed -i -e "s/stanley/${ST2_CI_USER}/" "${ST2_CONF}" sudo -E ./scripts/ci/add-itest-user-key.sh - - name: Run Redis Service Container - timeout-minutes: 2 - run: | - docker run --rm --detach -p 127.0.0.1:6379:6379/tcp --name redis redis:latest - until [ "$(docker inspect -f {{.State.Running}} redis)" == "true" ]; do sleep 0.1; done - name: Permissions Workaround run: | echo "$ST2_CI_REPO_PATH" @@ -502,7 +523,6 @@ jobs: ./scripts/ci/print-versions.sh - name: make - if: "${{ env.TASK == 'ci-integration' }}" #timeout-minutes: 7 # TODO: Use dynamic timeout value based on the branch - for master we # need to use timeout x2 due to coverage overhead @@ -512,32 +532,33 @@ jobs: script -e -c "make ${TASK}" && exit 0 - name: Codecov # NOTE: We only generate and submit coverage report for master and version branches and only when the build succeeds (default on GitHub Actions, this was not the case on Travis so we had to explicitly check success) - if: "${{ success() && env.ENABLE_COVERAGE == 'yes' && env.TASK == 'ci-integration' }}" + if: "${{ success() && (env.ENABLE_COVERAGE == 'yes') && (env.TASK == 'ci-integration') && (env.PYTHON_VERSION_SHORT == '3.8')}}" run: | ./scripts/ci/submit-codecov-coverage.sh + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - name: Compress Service Logs Before upload if: ${{ failure() && env.TASK == 'ci-integration' }} run: | + ./tools/launchdev.sh stop # stop st2 before collecting logs tar cvzpf logs.tar.gz logs/* - name: Upload StackStorm services Logs if: ${{ failure() && env.TASK == 'ci-integration' }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: - name: logs + name: logs-py${{ matrix.python.version }}-pytest-${{ matrix.make.shard.k }} path: logs.tar.gz retention-days: 7 - - name: Stop Redis Service Container - if: "${{ always() }}" - run: docker rm --force redis || true slack-notification: name: Slack notification for failed master builds if: always() needs: - lint-checks + - self-check - unit-tests - integration-tests - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Workflow conclusion # this step creates an environment variable WORKFLOW_CONCLUSION and is the most reliable way to check the status of previous jobs diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml new file mode 100644 index 0000000000..7ab8b17f8d --- /dev/null +++ b/.github/workflows/lint.yaml @@ -0,0 +1,88 @@ +--- +# This Lint workflow uses pants +name: Lint + +on: + push: + branches: + # only on merges to master branch + - master + # and version branches, which only include minor versions (eg: v3.4) + - v[0-9]+.[0-9]+ + tags: + # also version tags, which include bugfix releases (eg: v3.4.0) + - v[0-9]+.[0-9]+.[0-9]+ + pull_request: + types: [opened, reopened, synchronize] + branches: + # Only for PRs targeting those branches + - master + - v[0-9]+.[0-9]+ + #schedule: + # # run every night at midnight + # - cron: '0 0 * * *' + +jobs: + # Lint checks which don't depend on any service containes, etc. to be running. + lint-checks: + name: 'Lint Checks (pants runs: shellcheck, bandit, black, flake8, pylint)' + runs-on: ubuntu-22.04 + + env: + COLUMNS: '120' + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'true' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0 + + - name: Lint + run: | + pants lint :: + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python-version }} + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + set_merge_ok: + name: Set Merge OK (Lint) + if: always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') + needs: + - lint-checks + outputs: + merge_ok: ${{ steps.set_merge_ok.outputs.merge_ok }} + runs-on: ubuntu-latest + steps: + - id: set_merge_ok + run: echo 'merge_ok=true' >> ${GITHUB_OUTPUT} + + merge_ok: + name: Merge OK (Lint) + if: always() + needs: + - set_merge_ok + runs-on: ubuntu-latest + steps: + - run: | + merge_ok="${{ needs.set_merge_ok.outputs.merge_ok }}" + if [[ "${merge_ok}" == "true" ]]; then + echo "Merge OK" + exit 0 + else + echo "Merge NOT OK" + exit 1 + fi diff --git a/.github/workflows/microbenchmarks.yaml b/.github/workflows/microbenchmarks.yaml index 7480c13b3a..32f1508b1a 100644 --- a/.github/workflows/microbenchmarks.yaml +++ b/.github/workflows/microbenchmarks.yaml @@ -5,13 +5,23 @@ name: Micro Benchmarks on: schedule: - cron: '30 3 * * *' + pull_request: + types: [opened, reopened, synchronize] + branches: + # Only for PRs targeting those branches + - master + - v[0-9]+.[0-9]+ + paths: + # Only for PRs that touch the benchmarks + - .github/workflows/microbenchmarks.yaml + - st2common/benchmarks/** jobs: # Special job which automatically cancels old runs for the same branch, prevents runs for the # same file set which has already passed, etc. pre_job: name: Skip Duplicate Jobs Pre Job - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 outputs: should_skip: ${{ steps.skip_check.outputs.should_skip }} steps: @@ -26,29 +36,25 @@ jobs: # NOTE: We always want to run job on master since we run some additional checks there (code # coverage, etc) if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-latest + name: '${{ matrix.make.name }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: # NOTE: We need to use full Python version as part of Python deps cache key otherwise # setup virtualenv step will fail. - include: + python: + - {version-short: '3.8', version: '3.8.12'} + - {version-short: '3.9', version: '3.9.14'} + - {version-short: '3.10', version: '3.10.15'} + - {version-short: '3.11', version: '3.11.10'} + make: - name: 'Microbenchmarks' task: 'micro-benchmarks' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Microbenchmarks' - task: 'micro-benchmarks' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.8' - python-version: '3.8.10' + shard: {k: 0, n: 1} services: mongo: - image: mongo:4.4 + image: mongo:7.0 ports: - 27017:27017 @@ -62,10 +68,10 @@ jobs: - 15672:15672/tcp # Management: HTTP, CLI env: - TASK: '${{ matrix.task }}' + TASK: '${{ matrix.make.task }}' - NODE_TOTAL: '${{ matrix.nosetests_node_total }}' - NODE_INDEX: '${{ matrix.nosetests_node_index }}' + NODE_TOTAL: '${{ matrix.make.shard.n }}' + NODE_INDEX: '${{ matrix.make.shard.k }}' COLUMNS: '120' ST2_CI: 'true' @@ -74,35 +80,13 @@ jobs: PATH: /home/runner/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin steps: - name: Checkout repository - uses: actions/checkout@v2 - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 - with: - python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v2 + uses: actions/checkout@v4 + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt') }} - restore-keys: | - ${{ runner.os }}-python-${{ matrix.python }}- - - name: Cache APT Dependencies - id: cache-apt-deps - uses: actions/cache@v2 - with: - path: | - ~/apt_cache - key: ${{ runner.os }}-apt-v7-${{ hashFiles('scripts/github/apt-packages.txt') }} - restore-keys: | - ${{ runner.os }}-apt-v7- - - name: Install APT Dependencies - env: - CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - ./scripts/github/install-apt-packages-use-cache.sh + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -118,9 +102,9 @@ jobs: run: | script -e -c "make ${TASK}" && exit 0 - name: Upload Histograms - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: - name: benchmark_histograms + name: benchmark_histograms-py${{ matrix.python.version }} path: benchmark_histograms/ retention-days: 30 @@ -129,7 +113,7 @@ jobs: if: always() needs: - micro-benchmarks - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Workflow conclusion # this step creates an environment variable WORKFLOW_CONCLUSION and is the most reliable way to check the status of previous jobs diff --git a/.github/workflows/orquesta-integration-tests.yaml b/.github/workflows/orquesta-integration-tests.yaml index a7733b6512..40bda8f6cb 100644 --- a/.github/workflows/orquesta-integration-tests.yaml +++ b/.github/workflows/orquesta-integration-tests.yaml @@ -15,7 +15,7 @@ on: # also version tags, which include bugfix releases (eg: v3.4.0) - v[0-9]+.[0-9]+.[0-9]+ pull_request: - type: [opened, reopened, edited] + types: [opened, reopened, synchronize] branches: # Only for PRs targeting those branches - master @@ -25,11 +25,14 @@ on: - cron: '0 0 * * *' jobs: + # TODO: Fix the required checks! + # When the pre_job triggers and skips builds, it prevents merging the PR because + # the required checks are reported as skipped instead of passed. # Special job which automatically cancels old runs for the same branch, prevents runs for the # same file set which has already passed, etc. pre_job: name: Skip Duplicate Jobs Pre Job - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 outputs: should_skip: ${{ steps.skip_check.outputs.should_skip }} steps: @@ -43,30 +46,26 @@ jobs: needs: pre_job # NOTE: We always want to run job on master since we run some additional checks there (code # coverage, etc) - if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-latest + # if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} + name: '${{ matrix.make.name }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: # NOTE: We need to use full Python version as part of Python deps cache key otherwise # setup virtualenv step will fail. - include: + python: + - {version-short: '3.8', version: '3.8.12'} + - {version-short: '3.9', version: '3.9.14'} + - {version-short: '3.10', version: '3.10.15'} + - {version-short: '3.11', version: '3.11.10'} + make: - name: 'Integration Tests (Orquesta)' task: 'ci-orquesta' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version: '3.6.13' - python-version-short: '3.6' - - name: 'Integration Tests (Orquesta)' - task: 'ci-orquesta' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.8' - python-version: '3.8.10' + shard: {k: 0, n: 1} services: mongo: - image: mongo:4.4 + image: mongo:7.0 ports: - 27017:27017 @@ -79,27 +78,23 @@ jobs: - 5672:5672/tcp # AMQP standard port - 15672:15672/tcp # Management: HTTP, CLI - # Used for the coordination backend for integration tests - # NOTE: To speed things up, we only start redis for integration tests - # where it's needed - # redis: - # # Docker Hub image - # image: redis - # # Set health checks to wait until redis has started - # options: >- - # --name "redis" - # --health-cmd "redis-cli ping" - # --health-interval 10s - # --health-timeout 5s - # --health-retries 5 - # ports: - # - 6379:6379/tcp + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp env: - TASK: '${{ matrix.task }}' - - NODE_TOTAL: '${{ matrix.nosetests_node_total }}' - NODE_INDEX: '${{ matrix.nosetests_node_index }}' + TASK: '${{ matrix.make.task }}' + NODE_TOTAL: '${{ matrix.make.shard.n }}' + NODE_INDEX: '${{ matrix.make.shard.k }}' # We need to explicitly specify terminal width otherwise some CLI tests fail on container # environments where small terminal size is used. @@ -121,43 +116,16 @@ jobs: PATH: /home/runner/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 - with: - python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v2 - with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v3-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt') }} - restore-keys: | - ${{ runner.os }}-python-${{ matrix.python }}- - - name: Cache APT Dependencies - id: cache-apt-deps - uses: actions/cache@v2 + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python with: - path: | - ~/apt_cache - key: ${{ runner.os }}-apt-v7-${{ hashFiles('scripts/github/apt-packages.txt') }} - restore-keys: | - ${{ runner.os }}-apt-v7- - - name: Install APT Depedencies - env: - CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - # install dev dependencies for Python YAML and LDAP packages - # https://github.com/StackStorm/st2-auth-ldap - ./scripts/github/install-apt-packages-use-cache.sh + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -172,11 +140,6 @@ jobs: cp conf/st2.dev.conf "${ST2_CONF}" ; sed -i -e "s/stanley/${ST2_CI_USER}/" "${ST2_CONF}" sudo -E ./scripts/ci/add-itest-user-key.sh - - name: Run Redis Service Container - timeout-minutes: 2 - run: | - docker run --rm --detach -p 127.0.0.1:6379:6379/tcp --name redis redis:latest - until [ "$(docker inspect -f {{.State.Running}} redis)" == "true" ]; do sleep 0.1; done - name: Permissions Workaround run: | echo "$ST2_CI_REPO_PATH" @@ -185,7 +148,7 @@ jobs: run: | ./scripts/ci/print-versions.sh - name: make - timeout-minutes: 31 + timeout-minutes: 41 env: MAX_ATTEMPTS: 3 RETRY_DELAY: 5 @@ -207,38 +170,25 @@ jobs: set -e echo "Failed after ${MAX_ATTEMPTS} attempts, failing the job." exit 1 - - name: Upload StackStorm services Logs - #if: ${{ failure() }} - uses: actions/upload-artifact@v2 - with: - name: logs - path: logs/ - - name: Codecov - # NOTE: We only generate and submit coverage report for master and version branches and only when the build succeeds (default on GitHub Actions, this was not the case on Travis so we had to explicitly check success) - if: "${{ success() && env.ENABLE_COVERAGE == 'yes' }}" - run: | - ./scripts/ci/submit-codecov-coverage.sh - name: Compress Service Logs Before upload if: ${{ failure() }} run: | + ./tools/launchdev.sh stop # stop st2 before collecting logs tar cvzpf logs.tar.gz logs/* - name: Upload StackStorm services Logs if: ${{ failure() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: - name: logs + name: logs-py${{ matrix.python.version }} path: logs.tar.gz retention-days: 7 - - name: Stop Redis Service Container - if: "${{ always() }}" - run: docker rm --force redis || true slack-notification: name: Slack notification for failed master builds if: always() needs: - integration-tests - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Workflow conclusion # this step creates an environment variable WORKFLOW_CONCLUSION and is the most reliable way to check the status of previous jobs diff --git a/.github/workflows/pants.yaml b/.github/workflows/pants.yaml new file mode 100644 index 0000000000..94a60df45e --- /dev/null +++ b/.github/workflows/pants.yaml @@ -0,0 +1,47 @@ +--- +name: Validate Pants Metadata + +on: + push: + branches: + # only on merges to master branch + - master + # and version branches, which only include minor versions (eg: v3.4) + - v[0-9]+.[0-9]+ + tags: + # also version tags, which include bugfix releases (eg: v3.4.0) + - v[0-9]+.[0-9]+.[0-9]+ + pull_request: + types: [opened, reopened, synchronize] + branches: + # Only for PRs targeting those branches + - master + - v[0-9]+.[0-9]+ + +jobs: + pants-tailor: + name: Make sure pants BUILD files are up-to-date + runs-on: ubuntu-22.04 + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'true' + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-BUILD + + - name: Check BUILD files + run: | + pants tailor --check update-build-files --check :: + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python-version }} + path: .pants.d/pants.log + if: always() # We want the log even on failures. diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000000..da77799c0c --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,738 @@ +--- +# This Test workflow uses pants +name: Test + +on: + push: + branches: + # only on merges to master branch + - master + # and version branches, which only include minor versions (eg: v3.4) + - v[0-9]+.[0-9]+ + tags: + # also version tags, which include bugfix releases (eg: v3.4.0) + - v[0-9]+.[0-9]+.[0-9]+ + pull_request: + types: [opened, reopened, synchronize] + branches: + # Only for PRs targeting those branches + - master + - v[0-9]+.[0-9]+ + #schedule: + # # run every night at midnight + # - cron: '0 0 * * *' + +env: + COLUMNS: '120' + + # Tell StackStorm that we are indeed in CI mode, using our CI-provider agnostic var. + ST2_CI: 'true' + + # GitHub Actions uses the 'runner' user, so use that instead of stanley. + ST2TESTS_SYSTEM_USER: 'runner' + + # This is the host:port provided by services.redis + ST2TESTS_REDIS_HOST: '127.0.0.1' + ST2TESTS_REDIS_PORT: '6379' + +jobs: + pants-plugins-tests: + name: 'Pants Plugins Tests (pants runs: pytest) - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + # NOTE: We need to use full Python version as part of Python deps cache key otherwise + # setup virtualenv step will fail. + python: + # Pants itself uses only 3.9 + - {version-short: '3.9', version: '3.9.14'} + + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + rabbitmq: + image: rabbitmq:3.8-management + options: >- + --name rabbitmq + ports: + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' + + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python.version }}' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-py${{ matrix.python.version }} + + - name: Test pants-plugins + run: | + pants test pants-plugins/:: + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python.version }}-pants-plugins-tests + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + unit-tests: + name: 'Unit Tests Shard ${{ matrix.shard.k }}/${{ matrix.shard.n }} (pants runs: pytest) - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + # NOTE: We need to use full Python version as part of Python deps cache key otherwise + # setup virtualenv step will fail. + python: + - {version-short: '3.8', version: '3.8.12'} + - {version-short: '3.9', version: '3.9.14'} + - {version-short: '3.10', version: '3.10.15'} + - {version-short: '3.11', version: '3.11.10'} + shard: + # Sharding of tests is handled by pants: + # https://www.pantsbuild.org/stable/docs/using-pants/advanced-target-selection#sharding-the-input-targets + - {k: '0', n: '4'} + - {k: '1', n: '4'} + - {k: '2', n: '4'} + - {k: '3', n: '4'} + + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + rabbitmq: + image: rabbitmq:3.8-management + options: >- + --name rabbitmq + ports: + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' + + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python.version }}' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-py${{ matrix.python.version }} + + - name: Unit Tests + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + --tag=unit + --test-shard=${{ matrix.shard.k }}/${{ matrix.shard.n }} + test '::' + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python.version }}-unit-tests-shard-${{ matrix.shard.k }}_${{ matrix.shard.n }} + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + pack-tests: + name: 'Pack Tests (pants runs: pytest) - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + # NOTE: We need to use full Python version as part of Python deps cache key otherwise + # setup virtualenv step will fail. + python: + - {version-short: '3.8', version: '3.8.12'} + - {version-short: '3.9', version: '3.9.14'} + - {version-short: '3.10', version: '3.10.15'} + - {version-short: '3.11', version: '3.11.10'} + + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + rabbitmq: + image: rabbitmq:3.8-management + options: >- + --name rabbitmq + ports: + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' + + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python.version }}' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-py${{ matrix.python.version }} + + - name: Pack Tests + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + --tag=pack + test '::' + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python.version }}-pack-tests + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + integration-tests: + name: 'Integration Tests (pants runs: pytest) - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + # NOTE: We need to use full Python version as part of Python deps cache key otherwise + # setup virtualenv step will fail. + python: + - {version-short: '3.8', version: '3.8.12'} + - {version-short: '3.9', version: '3.9.14'} + - {version-short: '3.10', version: '3.10.15'} + - {version-short: '3.11', version: '3.11.10'} + + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + # In GHA, these services are started first before the code is checked out. + # We use bitnami images to facilitate reconfiguring RabbitMQ during integration tests. + # We rely on custom config and SSL certs that are in the repo. + # Many images require config in env vars (which we can't change during the test job) + # or they require config in entrypoint args (which we can't override for GHA services) + # bitnami builds ways to get config files from mounted volumes. + rabbitmq: + image: bitnami/rabbitmq:3.8 + volumes: + - /home/runner/rabbitmq_conf:/bitnami/conf # RABBITMQ_MOUNTED_CONF_DIR + env: + # tell bitnami/rabbitmq to enable this by default + RABBITMQ_PLUGINS: rabbitmq_management + RABBITMQ_USERNAME: guest + RABBITMQ_PASSWORD: guest + RABBITMQ_LOGS: '-' + # bitnami's default relative limit was too high + RABBITMQ_DISK_FREE_ABSOLUTE_LIMIT: 50MB + + # These are strictly docker options, not entrypoint args (GHA restriction) + options: >- + --name rabbitmq + ports: + # These 6 ports are exposed by bitnami/rabbitmq (see https://www.rabbitmq.com/networking.html#ports) + # host_port:container_port/protocol + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + #- 15671:15671/tcp # Management: SSL port + #- 25672:25672/tcp # inter-node or CLI + #- 4369:4369/tcp # epmd + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' + + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python.version }}' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-py${{ matrix.python.version }} + + # This is only required for st2common/tests/integration/test_rabbitmq_ssl_listener.py + - name: Reconfigure RabbitMQ + # bitnami image allows (see bitnami/rabbitmq readme): + # Here we're copying a rabbitmq.config file which won't do anything. + # We need to switch to custom.conf or advanced.config. + timeout-minutes: 2 # may die if rabbitmq fails to start + run: | + ./scripts/github/configure-rabbitmq.sh + + - name: Integration Tests + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + --tag=integration + --tag=-st2cluster + test '::' + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python.version }}-integration-tests + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + integration-st2cluster-tests: + name: 'Integration Tests (Full Cluster)- Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + # NOTE: We need to use full Python version as part of Python deps cache key otherwise + # setup virtualenv step will fail. + python: + - {version-short: '3.8', version: '3.8.12'} + - {version-short: '3.9', version: '3.9.14'} + - {version-short: '3.10', version: '3.10.15'} + - {version-short: '3.11', version: '3.11.10'} + + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + rabbitmq: + image: rabbitmq:3.8-management + options: >- + --name rabbitmq + ports: + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' + + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python.version }}' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-py${{ matrix.python.version }} + + - name: Export virtualenv to run Dev ST2 Cluster + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + export + --resolve=st2 + + - name: Launch Dev ST2 Cluster + env: + VIRTUALENV_DIR: ./dist/export/python/virtualenvs/st2/${{ steps.python.outputs.python-version }} + run: | + sudo -E ./scripts/github/prepare-integration.sh + + - name: Integration Tests + env: + ST2_CI_RUN_ORQUESTA_PAUSE_RESUME_TESTS: 'true' + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + --tag=integration + --tag=st2cluster + test '::' + + - name: Compress Service Logs Before upload + if: failure() + run: | + ./tools/launchdev.sh stop # stop st2 before collecting logs + tar cvzpf logs.tar.gz logs/* + + - name: Upload StackStorm services Logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: logs-py${{ matrix.python-version }}-st2cluster-integration-tests + path: logs.tar.gz + retention-days: 7 + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python.version }}-st2cluster-integration-tests + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + self-check: + name: 'Self-Check - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + # NOTE: We need to use full Python version as part of Python deps cache key otherwise + # setup virtualenv step will fail. + python: + - {version-short: '3.8', version: '3.8.12'} + - {version-short: '3.9', version: '3.9.14'} + - {version-short: '3.10', version: '3.10.15'} + - {version-short: '3.11', version: '3.11.10'} + + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + rabbitmq: + image: rabbitmq:3.8-management + options: >- + --name rabbitmq + ports: + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' + + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python.version }}' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-py${{ matrix.python.version }} + + - name: Export virtualenv to run Dev ST2 Cluster + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + export + --resolve=st2 + + - name: Add ST2 System User Key + env: + ST2_CI_USER: ${{ env.ST2TESTS_SYSTEM_USER }} + ST2_SYSTEM_USER__USER: ${{ env.ST2TESTS_SYSTEM_USER }} + ST2_SYSTEM_USER__SSH_KEY_FILE: /home/${{ env.ST2TESTS_SYSTEM_USER }}/.ssh/stanley_rsa + run: | + sudo -E ./scripts/ci/add-itest-user-key.sh + + - name: Launch Dev ST2 Cluster + env: + # NOTE: ST2_CONF defaults to ${GITHUB_WORKSPACE}/conf/st2.dev.conf + VIRTUALENV_DIR: ./dist/export/python/virtualenvs/st2/${{ steps.python.outputs.python-version }} + ST2_SYSTEM_USER__USER: ${{ env.ST2TESTS_SYSTEM_USER }} + ST2_SYSTEM_USER__SSH_KEY_FILE: /home/${{ env.ST2TESTS_SYSTEM_USER }}/.ssh/stanley_rsa + run: | + sudo -E ./scripts/github/prepare-integration.sh + + - name: Extend the path for upcoming tasks + # pants uses PEP 660 editable wheels to add our code to the virtualenv. + # But PEP 660 editable wheels do not include 'scripts'. + # https://peps.python.org/pep-0660/#limitations + # So, we need to include each bin dir in PATH instead of virtualenv/bin. + env: + VIRTUALENV_DIR: dist/export/python/virtualenvs/st2/${{ steps.python.outputs.python-version }} + run: | + for component_bin in ${GITHUB_WORKSPACE}/st2*/bin; do + echo ${component_bin} | tee -a $GITHUB_PATH + done + echo ${GITHUB_WORKSPACE}/${VIRTUALENV_DIR}/bin | tee -a $GITHUB_PATH + + - name: Create symlinks to find the binaries when running st2 actions + # st2 is actually a console_script entry point, not just a 'script' + # so it IS included in the virtualenv. But, st2-run-pack-tests might not be included. + env: + VIRTUALENV_DIR: dist/export/python/virtualenvs/st2/${{ steps.python.outputs.python-version }} + run: | + ln -s ${GITHUB_WORKSPACE}/${VIRTUALENV_DIR}/bin/st2 /usr/local/bin/st2 + ln -s ${GITHUB_WORKSPACE}/st2common/bin/st2-run-pack-tests /usr/local/bin/st2-run-pack-tests + + - name: Run st2-self-check + env: + # Space separated list of tests to be skipped if the self-check is running in GitHub Actions + TESTS_TO_SKIP: "tests.test_quickstart_rules tests.test_run_pack_tests_tool" + ST2_SYSTEM_USER__USER: ${{ env.ST2TESTS_SYSTEM_USER }} + ST2_SYSTEM_USER__SSH_KEY_FILE: /home/${{ env.ST2TESTS_SYSTEM_USER }}/.ssh/stanley_rsa + run: > + sudo + -E + ST2_AUTH_TOKEN=$(st2 auth testu -p 'testp' -t) + ST2_CONF=${GITHUB_WORKSPACE}/conf/st2.dev.conf + PATH=${PATH} + st2common/bin/st2-self-check + + - name: Compress Service Logs Before upload + if: failure() + run: | + ./tools/launchdev.sh stop # stop st2 before collecting logs + tar cvzpf logs.tar.gz logs/* + + - name: Upload StackStorm services Logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: logs-py${{ matrix.python-version }}-self-check + path: logs.tar.gz + retention-days: 7 + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python.version }}-self-check + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + set_merge_ok: + name: Set Merge OK (Tests) + if: always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') + needs: + - pants-plugins-tests + - unit-tests + - pack-tests + - integration-tests + - integration-st2cluster-tests + - self-check + outputs: + merge_ok: ${{ steps.set_merge_ok.outputs.merge_ok }} + runs-on: ubuntu-latest + steps: + - id: set_merge_ok + run: echo 'merge_ok=true' >> ${GITHUB_OUTPUT} + + merge_ok: + name: Merge OK (Tests) + if: always() + needs: + - set_merge_ok + runs-on: ubuntu-latest + steps: + - run: | + merge_ok="${{ needs.set_merge_ok.outputs.merge_ok }}" + if [[ "${merge_ok}" == "true" ]]; then + echo "Merge OK" + exit 0 + else + echo "Merge NOT OK" + exit 1 + fi diff --git a/.gitignore b/.gitignore index c43480996a..dc1b6aec20 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ *.py[cod] *.sqlite *.log +*.orig .stamp* # C extensions @@ -10,7 +11,6 @@ *.egg *.egg-info dist -build .venv eggs parts @@ -50,6 +50,12 @@ nosetests.xml htmlcov benchmark_histograms/ +# Pants workspace files +/.pants.d/ +/dist/ +/.pids +/.pants.workdir.file_lock* + # Mr Developer .idea .DS_Store @@ -61,5 +67,9 @@ benchmark_histograms/ # Editor Saves *~ \#*\# -[._]*.sw[a-p] -[._]sw[a-p] +[._]*.sw[a-px] +[._]sw[a-px] +[._]*.sw[a-p]x +[._]sw[a-p]x + +**/build/lib/** diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bdd3a3aee6..45e5daaf44 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -37,7 +37,7 @@ repos: language: script types: [file, python] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.5.0 + rev: v5.0.0 hooks: - id: trailing-whitespace exclude: (^conf/|^st2common/st2common/openapi.yaml|^st2client/tests/fixtures|^st2tests/st2tests/fixtures) diff --git a/ADOPTERS.md b/ADOPTERS.md index dabeaadf1d..50c3834f79 100644 --- a/ADOPTERS.md +++ b/ADOPTERS.md @@ -1,10 +1,15 @@ +# Who uses StackStorm? +As the StackStorm Community evolves, we'd like to keep track of our users. Please submit a PR with your organization and a brief use case description below. + This is an alphabetical list of known [StackStorm](https://stackstorm.com/) adopters: * [Adobe](https://www.adobe.com/) - Multinational computer software company. After evaluating both SaltStack and Rundeck, Adobe chose StackStorm towards their journey to self-healing infrastructure. As a result, SRE team could resolve thousands of alerts and fix 70% of the outages automatically without human intervention. [[ DevOpsDays Notes ](https://threadreaderapp.com/thread/1098901714567081984.html)] [[ DevOpsCon Talk ](https://devopscon.io/monitoring-traceability-diagnostics/workflow-engines-our-journey-towards-a-self-healing-infrastructure/)] * [Bitovi](https://www.bitovi.com/) - Consulting company, implemented an Automation solution based on StackStorm API with HA capabilities and custom UI for a Fortune top 10 organization. [[ Blog ](https://www.bitovi.com/blog/stackstorm-solves-devops-automation-for-enterprise-client)] [[ Case study ](https://stackstorm.com/case-study-bitovi/)] +* [CERN](https://home.cern) - CERN's Batch team uses StackStorm for Auto-Remediation Workflows for their compute farm, handling AFS storage overloads, and other automation for maintaining the research infrastructure. [[ HEPIX Presentation ](https://codimd.web.cern.ch/p/r6lbybhXy#/1)] [[ CHEP Presentation ](https://indico.jlab.org/event/459/contributions/11638/attachments/9708/14174/chep23_stackstorm.pptx)] * [DMM.com](https://dmm-corp.com/en/) - Large content provider in Japan. StackStorm is used in Operations helping to maintain online services and development at scale. [[ Case study ](https://stackstorm.com/case-study-dmm/)] +* [DigitalOcean](https://www.digitalocean.com/about) - DigitalOcean simplifies cloud computing so builders can spend more time creating software that changes the world. Internally, StackStorm is used as a consistent frontend to our numerous operational tools, and it also plays the part of the orchestration and automation engine driving the machine lifecycle of our vast fleet of machines spread across the globe. * [Dimension Data](https://www.dimensiondata.com/en/about-us) - Global systems integrator and IT services provider, using StackStorm for Datacenter Orchestration as well as Infrastructure, Networking, Security Automation for their large clients and government projects. [[ Case study ](https://stackstorm.com/case-study-dimension-data/)] * [Encore](https://www.encore.tech/) - Data Center, Cloud Computing, IT solutions company ​leverages StackStorm in enterprise scale IT infrastructure for VM & server provisioning, automation, network diagnosis, configuration and orchestration​ on customers' public and private clouds. [[ Blog ](https://encoretechnologies.github.io/blog/2018/03/stackstorm-changed-our-lives/)] [[ Case study ](https://stackstorm.com/case-study-encore/)] * [Fastly](https://www.fastly.com) - Edge Cloud Platform, implemented StackStorm as part of a bigger global network automation architecture aimed at providing an interface to network operations and traffic engineering changes triggered both manually or in response to events on hundreds of devices spread across dozens of sites. [[ Blog ](https://www.fastly.com/blog/network-automation-helps-support-worlds-biggest-live-streaming-moments)] @@ -12,6 +17,7 @@ This is an alphabetical list of known [StackStorm](https://stackstorm.com/) adop * [NL-ix](https://www.nl-ix.net/about/company/) - One of the top five internet exchange in the world where StackStorm is used as Automation Orchestrator, event-driven engine for route server configuration. [[ Case study ](https://stackstorm.com/case-study-nlix/)] * [Netflix](https://media.netflix.com/en/about-netflix) - Worldwide media services provider relies on Event-Driven Automation when remediation tasks and runbooks executed in response to alerts. Custom solution built on top StackStorm helped to self-heal NFLX infra at a big scale, saving SRE's sleep. [[ Slides ](https://www.slideshare.net/InfoQ/winston-helping-netflix-engineers-sleep-at-night)] [[ Blog ](https://medium.com/netflix-techblog/introducing-winston-event-driven-diagnostic-and-remediation-platform-46ce39aa81cc)] [[ Case study ](https://stackstorm.com/case-study-netflix/)] * [Pearson](https://www.pearson.com/corporate/about-pearson.html) - An international education company serving more than 75 million learners uses containers, Kubernetes, StackStorm and other open source technologies to streamline their development, operations and delivery of the new products. [[ Case study ](https://stackstorm.com/case-study-pearson/)] +* [Schwarz Digits](https://gruppe.schwarz/en) - We are passionate retailers. The company's Lidl, Kaufland and Schwarz Digits uses StackStorm for self-healing infrastructure in response to alerts, scheduled maintenance, routine tasks and DevOps Automation. It runs on its own [STACKIT](https://www.stackit.de/en/) cloud. [[ Blog ](https://techblog.schwarz/posts/getting-rid-of-operational-tasks-using-stackstorm/)] * [SciLifeLab](https://www.scilifelab.se/about-us/) - [The Arteria project](https://arteria-project.github.io/) provides components to automate analysis and data-management tasks at a next-generation bigdata genomics sequencing center based on StackStorm workflows. StackStorm helps with genomic computation in a cancer research. [[ Blog ](https://stackstorm.com/2016/11/15/genomics-sequencing-stackstorm-reading-source-code-biology/)] [[ Case study ](https://stackstorm.com/case-study-scilifelab)] * [Target](https://stackstorm.com/case-study-target/) - one of the largest department store retailers in the US uses StackStorm as an orchestrator within a Target Cloud Platform Engineering group to ensure that integrity, policies and regulatory compliance are maintained via event-driven security automation. [[ Case study ](https://stackstorm.com/case-study-target/)] * [Verizon](https://www.verizon.com/about/) - One of the world's largest telecommunications companies which offers wireless products and services. StackStorm helps dealing with massive scale by automating support for tens of thousands of servers across 100+ datacenters and reducing engineer time spent following a manual series of steps. StackStorm automation, infrastructure-as-code and chatops transformed how Verizon teams deploy, change, repair and decommission server infrastructure with a globally-consistent performance. [[ Blog ](https://medium.com/@VZMediaPlatform/using-stackstorm-to-automate-support-for-20-000-servers-4b47ae3a4e98)] diff --git a/BUILD b/BUILD new file mode 100644 index 0000000000..f33988a645 --- /dev/null +++ b/BUILD @@ -0,0 +1,120 @@ +python_requirements( + name="reqs", + source="requirements-pants.txt", + overrides={ + # flex and stevedore uses pkg_resources w/o declaring the dep + ("flex", "stevedore"): dict( + dependencies=[ + "//:reqs#setuptools", + ] + ), + # do not use the prance[flex] extra as that pulls in an old version of flex + "prance": dict( + dependencies=[ + "//:reqs#flex", + ] + ), + # tooz needs one or more backends (tooz is used by the st2 coordination backend) + "tooz": dict( + dependencies=[ + "//:reqs#redis", + "//:reqs#zake", + ] + ), + # make sure anything that uses st2-auth-ldap gets the st2auth constant + "st2-auth-ldap": dict( + dependencies=[ + "st2auth/st2auth/backends/constants.py", + ] + ), + # make sure anything that uses st2-rbac-backend gets its deps + "st2-rbac-backend": dict( + dependencies=[ + # alphabetical order + "st2common/st2common/config.py", + "st2common/st2common/constants/keyvalue.py", + "st2common/st2common/constants/triggers.py", + "st2common/st2common/content/loader.py", + "st2common/st2common/exceptions/db.py", + "st2common/st2common/exceptions/rbac.py", + "st2common/st2common/log.py", + "st2common/st2common/models/api/rbac.py", + "st2common/st2common/models/db/action.py", + "st2common/st2common/models/db/auth.py", + "st2common/st2common/models/db/pack.py", + "st2common/st2common/models/db/rbac.py", + "st2common/st2common/models/db/webhook.py", + "st2common/st2common/models/system/common.py", + "st2common/st2common/persistence/auth.py", + "st2common/st2common/persistence/execution.py", + "st2common/st2common/persistence/rbac.py", + "st2common/st2common/rbac/backends/__init__.py", + "st2common/st2common/rbac/backends/base.py", + "st2common/st2common/rbac/types.py", + "st2common/st2common/script_setup.py", + "st2common/st2common/util/action_db.py", + "st2common/st2common/util/misc.py", + "st2common/st2common/util/uid.py", + ] + ), + }, +) + +target( + name="auth_backends", + dependencies=[ + "//:reqs#st2-auth-backend-flat-file", + "//:reqs#st2-auth-ldap", + ], +) + +target( + name="rbac_backends", + dependencies=[ + "//:reqs#st2-rbac-backend", + ], +) + +python_test_utils( + name="test_utils", + skip_pylint=True, +) + +file( + name="license", + source="LICENSE", +) + +shell_sources( + name="root", +) + +file( + name="logs_directory", + source="logs/.gitignore", +) + +files( + name="gitmodules", + sources=[ + ".gitmodules", + "**/.git", + ], +) + +shell_command( + name="capture_git_modules", + environment="in_repo_workspace", + command="cp -r .git/modules {chroot}/.git", + tools=["cp"], + # execution_dependencies allows pants to invalidate the output + # of this command if the .gitmodules file changes (for example: + # if a submodule gets updated to a different repo). + # Sadly this does not get invalidated if the submodule commit + # is updated. In our case, that should be rare. To work around + # this, kill the `pantsd` process after updating a submodule. + execution_dependencies=[":gitmodules"], + output_dependencies=[":gitmodules"], + output_directories=[".git/modules"], + workdir="/", +) diff --git a/BUILD.environment b/BUILD.environment new file mode 100644 index 0000000000..f549e53f3e --- /dev/null +++ b/BUILD.environment @@ -0,0 +1,23 @@ +# Everything listed in pants.toml [evironments-preview.names] should be defined here. +# Relevant docs: +# - https://www.pantsbuild.org/stable/docs/using-pants/environments +# - https://www.pantsbuild.org/stable/reference/targets/experimental_workspace_environment +# - https://www.pantsbuild.org/stable/reference/targets/local_environment +# - https://www.pantsbuild.org/stable/reference/targets/docker_environment + +# This file MUST NOT use any macros. + +experimental_workspace_environment( + name="in_repo_workspace", + description=( + """ + This allows shell_command and similar to run in the repo, instead of in a sandbox. + Only use this environment for commands or goals that are idempotent. + Ideally, such commands do NOT change anything in the repo. + + If you need to capture output, note that output gets captured from a temporary + sandbox, not from the repo root. So, you may need to copy output files into + the sandbox with something like `cp path/to/file {chroot}/path/to/file`. + """ + ), +) diff --git a/BUILD.tools b/BUILD.tools new file mode 100644 index 0000000000..5035397a63 --- /dev/null +++ b/BUILD.tools @@ -0,0 +1,63 @@ +# This BUILD file has requirements for most of the tools resolves + +python_requirement( + name="bandit-reqs", + resolve="bandit", + requirements=[ + # https://github.com/pantsbuild/pants/blob/release_2.23.0rc0/src/python/pants/backend/python/lint/bandit/subsystem.py#L44-L52 + "bandit>=1.7.0,<1.8", + "setuptools", + "GitPython>=3.1.24", + ], +) + +python_requirement( + name="black-reqs", + resolve="black", + requirements=[ + "black==22.3.0", + "typing-extensions>=3.10.0.0;python_version<'3.10'", + ], +) + +python_requirement( + name="flake8-reqs", + resolve="flake8", + requirements=[ + "flake8==7.0.0", # st2flake8 does not support flake8 v5 + # license check plugin + "st2flake8>0.1.0", # TODO: remove in favor of regex-lint or preamble + ], +) + +# for pants-plugins, see //pants-plugins/BUILD +# for pylint, see //pylint_plugins/BUILD + +python_requirement( + name="pytest-reqs", + resolve="st2", + requirements=[ + "pytest==7.0.1", # copied from https://www.pantsbuild.org/v2.14/docs/reference-pytest#version + "pytest-benchmark[histogram]==3.4.1", # used for st2common/benchmarks + # "pytest-timer[colorama]", # report test timing (--with-timer ala nose-timer) + "pytest-icdiff", # make diff output easier to read + # "pygments", # highlight code in tracebacks (already included in requirements-pants.txt) + # + # other possible plugins + # "pytest-timeout", # time limit on tests + # "pytest-mock", # more convenient mocking + # + # needed by pants + "pytest-cov>=2.12,!=2.12.1,<3.1", # coverage + "pytest-xdist>=2.5,<3", # parallel test runs (pants uses this if [pytest].xdist_enabled) + ], +) + +python_requirement( + name="twine-reqs", + resolve="twine", + requirements=[ + "twine>=3.7.1,<3.8", + "colorama>=0.4.3", + ], +) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index f3b0d1d470..623ebc4e6b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,19 +4,358 @@ Changelog in development -------------- +Python 3.6 is no longer supported; Stackstorm requires at least Python 3.8. +This release adds support for Python 3.10 and 3.11, so StackStorm supports python 3.8 - 3.11. + +Newer MongoDB versions are now supported. CI uses MongoDB 7.0. + +Several st2.conf database options have been renamed or deprecated. Most of the options will continue to work using their old name. +However, if you use `[database].ssl_keyfile` and/or `[database].ssl_certfile`, you MUST migrate to `[database].tls_certificate_key_file`. +This new option expects the key and certificate in the same file. Use something like the following to create that file from your old files: + +``` +cat path/to/ssl_keyfile path/to/ssl_certfile > path/to/tls_certificate_key_file +``` + +Other options that were renamed under `[database]` are (more details available in `st2.conf.sample`): + +* `ssl` -> `tls` +* `ssl_cert_reqs` -> `tls_allow_invalid_certificates` (opt type change: string -> boolean) +* `ssl_ca_certs` -> `tls_ca_file` +* `ssl_match_hostnames` -> `tls_allow_invalid_hostnames` (meaning is inverted: the new option is the opposite of the old) + +Fixed +~~~~~ +* Fixed #6021 and #5327 by adding max_page_size to api_opts and added limit and offset to list_values() methods of + both action_service and sensor_service +* Fix `packs.get` action. Assumed `master` is primary branch on all packs. #6225 +* Restore Pack integration testing (it was inadvertently skipped) and stop testing against `bionic` and `el7`. #6135 +* Fix Popen.pid typo in st2tests. #6184 +* Bump tooz package to `6.2.0` to fix TLS. #6220 (@jk464) +* Shells via `pywinrm` are initialized with the 65001 codepage to ensure raw string responses are UTF-8. #6034 (@stealthii) + +Changed +~~~~~~~ +* Removed code in all dist_utils.py that was sanitizing the `python_version` environment marker that limited packages in the requirements.txt only being installed on lower python versions. (by @skiedude) +* Bumped `jsonschema` 2.6.0 -> 3.2.0 now that python3.6 is not supported. #6118 +* Bumped many deps based on the lockfiles generated by pants+pex. #6181 #6227 #6200 #6252 #6268 (by @cognifloyd and @nzlosh) +* Switch to python3's standard lib unittest from unittest2, a backport of python3 unittest features for python2. #6187 (by @nzlosh) +* Drop Python 3.6 testing in CircleCI. #6080 + Contributed by (@philipphomberger Schwarz IT KG) +* Refactor `tools/launchdev.sh` to use `tmux` instead of `screen`. #6186 (by @nzlosh and @cognifloyd) +* Updated package build container environment to use py3.8 and mongo4.4 #6129 +* Fix misc DeprecationWarnings to prepare for python 3.10 support. #6188 (by @nzlosh) +* Update st2client deps: editor and prompt-toolkit. #6189 (by @nzlosh) +* Updated dependency oslo.config to prepare for python 3.10 support. #6193 (by @nzlosh) + +* Updated unit tests to use redis for coordination instead of the NoOp driver. This will hopefully make CI more stable. #6245 + Contributed by @FileMagic, @guzzijones, and @cognifloyd + +* Renamed `[database].ssl*` options to support pymongo 4, which we have to update to support newer MongoDB servers. + Please see the note above about migrating to the newer options, especially if you use `[database].ssl_keyfile` + and/or `[database].ssl_certfile`, as those options are ignored in StackStorm 3.9.0. #6250 + Contributed by @cognifloyd + +* Update mongoengine to 0.29 and pymongo to 4.6.3. The pymongo bump (from 3.x to 4.x) is a major update. #6252 + Contributed by @cognifloyd + +* Update CI from testing with mongo 4.4 to testing with MongoDB 7.0. #6246 + Contributed by @guzzijones + +* Relaxed `dnspython` pinning for compatibility with python 3.10 and greater. #6265 + Contributed by @nzlosh + +* Switched tests from `nosetest` to `pytest`. `st2-run-pack-tests` also uses pytest. + So, all pack tests must be runnable by `pytest`, which may require migration. #6291 + Contributed by @nzlosh, @FileMagic, @guzzijones, and @cognifloyd. + +* Migrated github actions from image ubunutu 20.04 with python 3.8.10 to image ubuntu 22.04 with python 3.8.12. #6301 + Contributed by @nzlosh + +Added +~~~~~ +* Continue introducing `pants `_ to improve DX (Developer Experience) + working on StackStorm, improve our security posture, and improve CI reliability thanks in part + to pants' use of PEX lockfiles. This is not a user-facing addition. + #6118 #6141 #6133 #6120 #6181 #6183 #6200 #6237 #6229 #6240 #6241 #6244 #6251 #6253 + #6254 #6258 #6259 #6260 #6269 #6275 #6279 #6278 #6282 #6283 #6273 #6287 + Contributed by @cognifloyd +* Build of ST2 EL9 packages #6153 + Contributed by @amanda11 +* Ensure `.pth` files in the st2 virtualenv get loaded by pack virtualenvs. #6183 + Contributed by @cognifloyd +* Allow `st2-rule-tester` to run without a mongo connection if user is testing against local `rule`/`trigger-instance` files. #6208 + Contributed by @jk464 + +* Added a `get_result` method to the `ExecutionResourceManager` Class for st2client + Contributed by @skiedude + +* Added new env var for tests: `ST2TESTS_SYSTEM_USER`. When set, this will override `system_user.user` in st2 conf + so that you can run tests on systems that do not have the `stanley` user. When running tests locally, use the + following to set system user to the current user: `export ST2TESTS_SYSTEM_USER=$(id -un)` #6242 + Contributed by @cognifloyd + +* Added experimental support for setting conf vars via environment variables. All settings in `st2.conf` can be + overriden via enviornment vars in the format: `ST2___