diff --git a/.github/workflows/build_with_conda.yml b/.github/workflows/build_with_conda.yml index 935f2fd3ab8..33e9f46878e 100644 --- a/.github/workflows/build_with_conda.yml +++ b/.github/workflows/build_with_conda.yml @@ -13,1242 +13,86 @@ on: push: branches: - master - # For Pull-Requests, this runs the CI on merge commit - # of HEAD with the target branch instead on HEAD, allowing - # testing against potential new states which might have - # been introduced in the target branch last commits. - # See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request + pull_request: workflow_dispatch: inputs: - run_on_arm_mac: - description: 'Run on arm macos' - type: boolean - required: false - default: false - run_cpp_tests: - description: 'Run C++ tests' - type: boolean - required: true - default: true - persistent_storage: - description: "Run against what persistent storage type? (no is LMDB/default)" - type: choice - options: - - 'no' - - 'AWS_S3' - - 'GCPXML' - - 'AZURE' - default: 'no' - debug_enabled: - type: boolean - description: 'Run the build with debugging enabled' - required: false - default: false - run_enable_logging: - description: 'Enabled debug logging' - type: boolean - required: false - default: false - run_commandline: - description: 'Run custom commandline before tests, Like: export ARCTICDB_STORAGE_AZURE=1; ....' - type: string - required: false - default: "" - run_custom_pytest_command: - description: '*Run custom pytest command, instead of standard(Note: curdir is project root), or pass additional arguments to default command' - type: string - required: false - default: "" + disable_cpp_tests: {required: false, type: boolean, default: false, description: Disable C++ tests} + persistent_storage: {required: false, type: choice, default: 'no', description: Persistent storage type, options: ['no', 'AWS_S3', 'GCPXML', 'AZURE']} + debug_enabled: {required: false, type: boolean, default: false, description: Run the build with debugging enabled} + run_enable_logging: {required: false, type: boolean, default: false, description: Enable debug logging} + run_commandline: {required: false, type: string, default: '', description: Run custom commandline before tests} + run_custom_pytest_command: {required: false, type: string, default: '', description: Run custom pytest command or additional arguments} jobs: - - compile_linux_64: - name: Compile (linux_64) - if: | - always() && - !cancelled() - runs-on: ubuntu-22.04 - env: - ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - SCCACHE_GHA_ENABLED: "true" - defaults: - run: - shell: bash -l {0} - steps: - - uses: actions/checkout@v6.0.1 - # Do not use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Get number of CPU cores - uses: SimenB/github-actions-cpu-cores@v2.0.0 - id: cpu-cores - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - environment-name: arcticdb - init-shell: >- - bash - cache-environment: true - cache-environment-key: conda-env-linux_64 - post-cleanup: 'none' - - - name: Build ArcticDB with conda (ARCTICDB_USING_CONDA=1) - run: | - # Protocol buffers compilation require not using build isolation. - # We should always retry due to unstable nature of connections and environments - python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . - env: - ARCTICDB_USING_CONDA: 1 - ARCTICDB_BUILD_CPP_TESTS: 1 - - - name: Show sccache stats (linux_64) - run: ${SCCACHE_PATH} --show-stats || sccache --show-stats - - - name: Archive build artifacts - uses: actions/upload-artifact@v4 - if: always() - with: - name: build-linux_64 - retention-days: 7 - path: | - cpp/out/linux-conda-release-build/ - python/arcticdb_ext* - python/**/*.so - python/**/*.pyd - - compile_linux_aarch64: - name: Compile (linux_aarch64) - if: | - always() && - !cancelled() - runs-on: ubuntu-22.04-arm - env: - ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - SCCACHE_GHA_ENABLED: "true" - defaults: - run: - shell: bash -l {0} - steps: - - uses: actions/checkout@v6.0.1 - # Do not use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Get number of CPU cores - uses: SimenB/github-actions-cpu-cores@v2.0.0 - id: cpu-cores - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - environment-name: arcticdb - init-shell: >- - bash - cache-environment: true - cache-environment-key: conda-env-linux_aarch64 - post-cleanup: 'none' - - - name: Build ArcticDB with conda (ARCTICDB_USING_CONDA=1) - run: | - # Protocol buffers compilation require not using build isolation. - # We should always retry due to unstable nature of connections and environments - python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . - env: - ARCTICDB_USING_CONDA: 1 - ARCTICDB_BUILD_CPP_TESTS: 1 - - - name: Show sccache stats (linux_aarch64) - run: ${SCCACHE_PATH} --show-stats || sccache --show-stats - - - name: Archive build artifacts - uses: actions/upload-artifact@v4 - if: always() - with: - name: build-linux_aarch64 - retention-days: 7 - path: | - cpp/out/linux-conda-release-build/ - python/arcticdb_ext* - python/**/*.so - python/**/*.pyd - - compile_osx_arm64: - name: Compile (osx_arm64) - if: | - always() && - !cancelled() - runs-on: macos-14 - env: - ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - SCCACHE_GHA_ENABLED: "true" - defaults: - run: - shell: bash -l {0} - steps: - - uses: actions/checkout@v6.0.1 - # Do not use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Get number of CPU cores - uses: SimenB/github-actions-cpu-cores@v2.0.0 - id: cpu-cores - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - environment-name: arcticdb - init-shell: >- - bash - cache-environment: true - cache-environment-key: conda-env-osx_arm64 - post-cleanup: 'none' - - - name: Build ArcticDB with conda (ARCTICDB_USING_CONDA=1) - run: | - # Protocol buffers compilation require not using build isolation. - # We should always retry due to unstable nature of connections and environments - python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . - env: - ARCTICDB_USING_CONDA: 1 - ARCTICDB_BUILD_CPP_TESTS: 1 - - - name: Show sccache stats (osx_arm64) - run: ${SCCACHE_PATH} --show-stats || sccache --show-stats - - - name: Archive build artifacts - uses: actions/upload-artifact@v4 - if: always() - with: - name: build-osx_arm64 - retention-days: 7 - path: | - cpp/out/macos-conda-release-build/ - python/arcticdb_ext* - python/**/*.so - python/**/*.pyd - - cpp_tests_linux_64: - name: C++ Tests (linux_64) - if: | - always() && - !cancelled() && - (inputs.run_cpp_tests == true || github.event_name != 'workflow_dispatch') - needs: [compile_linux_64] - runs-on: ubuntu-22.04 - env: - ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - SCCACHE_GHA_ENABLED: "true" - defaults: - run: - shell: bash -l {0} - steps: - - uses: actions/checkout@v6.0.1 - # Do not use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Free Disk Space (Ubuntu) - uses: jlumbroso/free-disk-space@v1.3.1 - with: - tool-cache: false - large-packages: false # Time-consuming but doesn't save that much space (4GB) - docker-images: false # We're using docker images we don't want to clear - - - name: Download build artifacts - uses: actions/download-artifact@v8 - with: - name: build-linux_64 - path: . - - - name: Get number of CPU cores - uses: SimenB/github-actions-cpu-cores@v2.0.0 - id: cpu-cores - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - environment-name: arcticdb - init-shell: >- - bash - cache-environment: true - cache-environment-key: conda-env-linux_64 - post-cleanup: 'none' - - - name: Configure C++ Tests (linux_64) - run: | - cd cpp - # Only reconfigure if TEST is not already ON in the CMake cache - # This avoids unnecessary reconfiguration when the compile job already built with TEST=ON - if [ -f out/linux-conda-release-build/CMakeCache.txt ] && grep -q "TEST:BOOL=ON" out/linux-conda-release-build/CMakeCache.txt; then - echo "CMake cache already has TEST=ON, skipping reconfiguration" - else - cmake --preset linux-conda-release -DTEST=ON - fi - env: - ARCTICDB_USING_CONDA: 1 - - - name: Build C++ Tests (linux_64) - run: | - cd cpp - cmake --build --preset linux-conda-release --target arcticdb_rapidcheck_tests -j ${{ steps.cpu-cores.outputs.count }} - cmake --build --preset linux-conda-release --target test_unit_arcticdb -j ${{ steps.cpu-cores.outputs.count }} - env: - ARCTICDB_USING_CONDA: 1 - - - name: Show sccache stats after C++ tests build (linux_64) - run: ${SCCACHE_PATH} --show-stats || sccache --show-stats - - - name: Run C++ Tests (linux_64) - run: | - cd cpp/out/linux-conda-release-build/ - ctest --output-on-failure - env: - ARCTICDB_USING_CONDA: 1 - - cpp_tests_linux_aarch64: - name: C++ Tests (linux_aarch64) - if: | - always() && - !cancelled() && - (inputs.run_cpp_tests == true || github.event_name != 'workflow_dispatch') - needs: [compile_linux_aarch64] - runs-on: ubuntu-22.04-arm - env: - ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - SCCACHE_GHA_ENABLED: "true" - defaults: - run: - shell: bash -l {0} - steps: - - uses: actions/checkout@v6.0.1 - # Do not use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Free Disk Space (Ubuntu) - uses: jlumbroso/free-disk-space@v1.3.1 - with: - tool-cache: false - large-packages: false # Time-consuming but doesn't save that much space (4GB) - docker-images: false # We're using docker images we don't want to clear - - - name: Download build artifacts - uses: actions/download-artifact@v8 - with: - name: build-linux_aarch64 - path: . - - - name: Get number of CPU cores - uses: SimenB/github-actions-cpu-cores@v2.0.0 - id: cpu-cores - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - environment-name: arcticdb - init-shell: >- - bash - cache-environment: true - cache-environment-key: conda-env-linux_aarch64 - post-cleanup: 'none' - - - name: Configure C++ Tests (linux_aarch64) - run: | - cd cpp - # Only reconfigure if TEST is not already ON in the CMake cache - # This avoids unnecessary reconfiguration when the compile job already built with TEST=ON - if [ -f out/linux-conda-release-build/CMakeCache.txt ] && grep -q "TEST:BOOL=ON" out/linux-conda-release-build/CMakeCache.txt; then - echo "CMake cache already has TEST=ON, skipping reconfiguration" - else - cmake --preset linux-conda-release -DTEST=ON - fi - env: - ARCTICDB_USING_CONDA: 1 - - - name: Build C++ Tests (linux_aarch64) - run: | - cd cpp - cmake --build --preset linux-conda-release --target arcticdb_rapidcheck_tests -j ${{ steps.cpu-cores.outputs.count }} - cmake --build --preset linux-conda-release --target test_unit_arcticdb -j ${{ steps.cpu-cores.outputs.count }} - env: - ARCTICDB_USING_CONDA: 1 - - - name: Show sccache stats after C++ tests build (linux_aarch64) - run: ${SCCACHE_PATH} --show-stats || sccache --show-stats - - - name: Run C++ Tests (linux_aarch64) - run: | - cd cpp/out/linux-conda-release-build/ - ctest --output-on-failure - env: - ARCTICDB_USING_CONDA: 1 - - cpp_tests_osx_arm64: - name: C++ Tests (osx_arm64) - if: | - always() && - !cancelled() && - (inputs.run_cpp_tests == true || github.event_name != 'workflow_dispatch') - needs: [compile_osx_arm64] - runs-on: macos-14 - env: - ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - SCCACHE_GHA_ENABLED: "true" - defaults: - run: - shell: bash -l {0} - steps: - - uses: actions/checkout@v6.0.1 - # Do not use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Download build artifacts - uses: actions/download-artifact@v8 - with: - name: build-osx_arm64 - path: . - - - name: Get number of CPU cores - uses: SimenB/github-actions-cpu-cores@v2.0.0 - id: cpu-cores - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - environment-name: arcticdb - init-shell: >- - bash - cache-environment: true - cache-environment-key: conda-env-osx_arm64 - post-cleanup: 'none' - - - name: Configure C++ Tests (osx_arm64) - run: | - cd cpp - # Only reconfigure if TEST is not already ON in the CMake cache - # This avoids unnecessary reconfiguration when the compile job already built with TEST=ON - if [ -f out/macos-conda-release-build/CMakeCache.txt ] && grep -q "TEST:BOOL=ON" out/macos-conda-release-build/CMakeCache.txt; then - echo "CMake cache already has TEST=ON, skipping reconfiguration" - else - cmake --preset macos-conda-release -DTEST=ON - fi - env: - ARCTICDB_USING_CONDA: 1 - - - name: Build C++ Tests (osx_arm64) - run: | - cd cpp - cmake --build --preset macos-conda-release --target arcticdb_rapidcheck_tests -j ${{ steps.cpu-cores.outputs.count }} - cmake --build --preset macos-conda-release --target test_unit_arcticdb -j ${{ steps.cpu-cores.outputs.count }} - env: - ARCTICDB_USING_CONDA: 1 - - - name: Show sccache stats after C++ tests build (osx_arm64) - run: ${SCCACHE_PATH} --show-stats || sccache --show-stats - - - name: Run C++ Tests (osx_arm64) - run: | - cd cpp/out/macos-conda-release-build/ - ctest --output-on-failure - env: - ARCTICDB_USING_CONDA: 1 - - python_tests_linux_64: - name: Python Tests (linux_64) - ${{matrix.type}} - if: | - always() && - !cancelled() - needs: [compile_linux_64] - strategy: - fail-fast: false - matrix: - type: [unit, integration, hypothesis, stress, compat, enduser] - runs-on: ubuntu-22.04 - env: - ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - services: - mongodb: - image: mongo:4.4 - defaults: - run: - shell: bash -l {0} - steps: - - name: Free Disk Space (Ubuntu) - uses: jlumbroso/free-disk-space@v1.3.1 - with: - tool-cache: false - large-packages: false # Time-consuming but doesn't save that much space (4GB) - docker-images: false # We're using docker images we don't want to clear - - - uses: actions/checkout@v6.0.1 - # Do not use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Download build artifacts - uses: actions/download-artifact@v8 - with: - name: build-linux_64 - path: . - - - name: Get number of CPU cores - uses: SimenB/github-actions-cpu-cores@v2.0.0 - id: cpu-cores - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - environment-name: arcticdb - init-shell: >- - bash - cache-environment: true - cache-environment-key: conda-env-linux_64 - post-cleanup: 'none' - - - name: Install ArcticDB from artifacts - run: | - # Protocol buffers compilation require not using build isolation. - # We should always retry due to unstable nature of connections and environments - # This reuses the build artifacts from the compile_linux_64 step and make ArcticDB available for testing. - # Skip CMake configuration/build if artifacts are already present to speed up installation - if [ -d "cpp/out/linux-conda-release-build" ] && (ls python/arcticdb_ext*.so python/arcticdb_ext*.pyd 2>/dev/null | head -1 | grep -q .); then - echo "Build artifacts found, skipping CMake build" - export ARCTIC_CMAKE_PRESET=skip - fi - python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . - env: - ARCTICDB_USING_CONDA: 1 - - # Note: mongo tests are skipped in the macos workflow - - name: Install MongoDB - uses: ./.github/actions/install_mongodb - - - name: Install npm # Linux github runner image does not come with npm - uses: actions/setup-node@v6.1.0 - with: - node-version: '24' - - - name: Install Azurite - uses: nick-fields/retry@v3 - with: - # We should always retry due to unstable nature of connections and environments - timeout_minutes: 10 - max_attempts: 3 - command: npm install -g azurite - - - name: Check no arcticdb file depend on tests package - run: | - build_tooling/checks.sh - - - name: Set persistent storage variables - # Should be executed for all persistent storages but not for LMDB - if: ${{ inputs.persistent_storage != 'no' }} - uses: ./.github/actions/set_persistent_storage_env_vars - with: - aws_access_key: "${{ secrets.AWS_S3_ACCESS_KEY }}" - aws_secret_key: "${{ secrets.AWS_S3_SECRET_KEY }}" - gcp_access_key: "${{ secrets.GCP_S3_ACCESS_KEY }}" - gcp_secret_key: "${{ secrets.GCP_S3_SECRET_KEY }}" - azure_container: "githubblob" # DEFAULT BUCKET FOR AZURE - azure_connection_string: "${{ secrets.AZURE_CONNECTION_STRING }}" - persistent_storage: ${{ inputs.persistent_storage || 'no' }} - - - name: Set ArcticDB Debug Logging - if: ${{ inputs.run_enable_logging }} - uses: ./.github/actions/enable_logging - - - name: Setup tmate session - uses: mxschmitt/action-tmate@v3 - if: ${{ inputs.debug_enabled }} - - - name: Install pytest-repeat - run: | - python -m pip --retries 3 --timeout 180 install pytest-repeat - - - name: Test with pytest - run: | - # find ssl directory where cacerts are (for Azure) - openssl version -d - # list file descriptors and other limits of the runner - ulimit -a - echo "Run commandline: $COMMANDLINE" - eval "$COMMANDLINE" - export ARCTICDB_WARN_ON_WRITING_EMPTY_DATAFRAME=0 - if [[ "$(echo "$ARCTICDB_PYTEST_ARGS" | xargs)" == pytest* ]]; then - python -m pip install pytest-repeat setuptools wheel - python setup.py protoc --build-lib python - echo "Run custom pytest command: $ARCTICDB_PYTEST_ARGS" - eval "$ARCTICDB_PYTEST_ARGS" - else - cd python - python -m pytest --timeout=3600 -v --tb=line -n logical --dist worksteal tests/${{matrix.type}} $ARCTICDB_PYTEST_ARGS - fi - env: - ARCTICDB_USING_CONDA: 1 - COMMANDLINE: ${{ inputs.run_commandline }} - # Use the Mongo created in the service container above to test against - CI_MONGO_HOST: mongodb - HYPOTHESIS_PROFILE: ci_linux - ARCTICDB_PYTEST_ARGS: ${{ inputs.run_custom_pytest_command }} - STORAGE_TYPE: ${{ inputs.persistent_storage == 'no' && 'LMDB' || inputs.persistent_storage }} - NODE_OPTIONS: --openssl-legacy-provider - - python_tests_osx_arm64: - name: Python Tests (osx_arm64) - ${{matrix.type}} - if: | - always() && - !cancelled() - needs: [compile_osx_arm64] - strategy: - fail-fast: false - matrix: - type: [unit, integration, hypothesis, stress, compat, enduser] - runs-on: macos-14 - env: - ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - SCCACHE_GHA_ENABLED: "true" - defaults: - run: - shell: bash -l {0} - steps: - - uses: actions/checkout@v6.0.1 - # Do not use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Download build artifacts - uses: actions/download-artifact@v8 - with: - name: build-osx_arm64 - path: . - - - name: Get number of CPU cores - uses: SimenB/github-actions-cpu-cores@v2.0.0 - id: cpu-cores - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - environment-name: arcticdb - init-shell: >- - bash - cache-environment: true - cache-environment-key: conda-env-osx_arm64 - post-cleanup: 'none' - - - name: Install ArcticDB from artifacts - run: | - # Protocol buffers compilation require not using build isolation. - # We should always retry due to unstable nature of connections and environments - # This reuses the build artifacts from the compile_osx_arm64 step and make ArcticDB available for testing. - # Skip CMake configuration/build if artifacts are already present to speed up installation - if [ -d "cpp/out/macos-conda-release-build" ] && (ls python/arcticdb_ext*.so python/arcticdb_ext*.pyd 2>/dev/null | head -1 | grep -q .); then - echo "Build artifacts found, skipping CMake build" - export ARCTIC_CMAKE_PRESET=skip - fi - python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . - env: - ARCTICDB_USING_CONDA: 1 - - - name: Install npm - uses: actions/setup-node@v6.1.0 - with: - node-version: '24' - - - name: Install Azurite - uses: nick-fields/retry@v3 - with: - # We should always retry due to unstable nature of connections and environments - timeout_minutes: 10 - max_attempts: 3 - command: npm install -g azurite - - - name: Check no arcticdb file depend on tests package - run: | - build_tooling/checks.sh - - - name: Set persistent storage variables - # Should be executed for all persistent storages but not for LMDB - if: ${{ inputs.persistent_storage != 'no' }} - uses: ./.github/actions/set_persistent_storage_env_vars - with: - aws_access_key: "${{ secrets.AWS_S3_ACCESS_KEY }}" - aws_secret_key: "${{ secrets.AWS_S3_SECRET_KEY }}" - gcp_access_key: "${{ secrets.GCP_S3_ACCESS_KEY }}" - gcp_secret_key: "${{ secrets.GCP_S3_SECRET_KEY }}" - azure_container: "githubblob" # DEFAULT BUCKET FOR AZURE - azure_connection_string: "${{ secrets.AZURE_CONNECTION_STRING }}" - persistent_storage: ${{ inputs.persistent_storage || 'no' }} - - - name: Set ArcticDB Debug Logging - if: ${{ inputs.run_enable_logging }} - uses: ./.github/actions/enable_logging - - - name: Setup tmate session - uses: mxschmitt/action-tmate@v3 - if: ${{ inputs.debug_enabled }} - - - name: Install pytest-repeat - run: | - python -m pip --retries 3 --timeout 180 install pytest-repeat - - - name: Test with pytest - run: | - # find ssl directory where cacerts are (for Azure) - openssl version -d - # list file descriptors and other limits of the runner - ulimit -a - echo "Run commandline: $COMMANDLINE" - eval "$COMMANDLINE" - export ARCTICDB_WARN_ON_WRITING_EMPTY_DATAFRAME=0 - if [[ "$(echo "$ARCTICDB_PYTEST_ARGS" | xargs)" == pytest* ]]; then - python -m pip install pytest-repeat setuptools wheel - python setup.py protoc --build-lib python - echo "Run custom pytest command: $ARCTICDB_PYTEST_ARGS" - eval "$ARCTICDB_PYTEST_ARGS" - else - cd python - python -m pytest --timeout=3600 -v --tb=line -n logical --dist worksteal tests/${{matrix.type}} $ARCTICDB_PYTEST_ARGS - fi - env: - ARCTICDB_USING_CONDA: 1 - COMMANDLINE: ${{ inputs.run_commandline }} - HYPOTHESIS_PROFILE: ci_macos - ARCTICDB_PYTEST_ARGS: ${{ inputs.run_custom_pytest_command }} - STORAGE_TYPE: ${{ inputs.persistent_storage == 'no' && 'LMDB' || inputs.persistent_storage }} - NODE_OPTIONS: --openssl-legacy-provider - - python_tests_linux_aarch64: - name: Python Tests (linux_aarch64) - ${{matrix.type}} - if: | - always() && - !cancelled() - needs: [compile_linux_aarch64] - strategy: - fail-fast: false - matrix: - type: [unit, integration, hypothesis, stress, compat, enduser] - runs-on: ubuntu-22.04-arm - env: - ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - services: - mongodb: - image: mongo:4.4 - defaults: - run: - shell: bash -l {0} - steps: - - uses: actions/checkout@v6.0.1 - # Do not use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Download build artifacts - uses: actions/download-artifact@v8 - with: - name: build-linux_aarch64 - path: . - - - name: Get number of CPU cores - uses: SimenB/github-actions-cpu-cores@v2.0.0 - id: cpu-cores - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - environment-name: arcticdb - init-shell: >- - bash - cache-environment: true - cache-environment-key: conda-env-linux_aarch64 - post-cleanup: 'none' - - - name: Install ArcticDB from artifacts - run: | - # Protocol buffers compilation require not using build isolation. - # We should always retry due to unstable nature of connections and environments - # This reuses the build artifacts from the compile_linux_aarch64 step and make ArcticDB available for testing. - # Skip CMake configuration/build if artifacts are already present to speed up installation - if [ -d "cpp/out/linux-conda-release-build" ] && (ls python/arcticdb_ext*.so python/arcticdb_ext*.pyd 2>/dev/null | head -1 | grep -q .); then - echo "Build artifacts found, skipping CMake build" - export ARCTIC_CMAKE_PRESET=skip - fi - python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . - env: - ARCTICDB_USING_CONDA: 1 - - # Note: mongo tests are skipped in the macos workflow - - name: Install MongoDB - run: | - curl --retry 5 --retry-delay 5 --retry-connrefused -LO https://fastdl.mongodb.org/linux/mongodb-linux-aarch64-ubuntu2204-7.0.0.tgz - tar -xzf mongodb-linux-aarch64-ubuntu2204-7.0.0.tgz - cp mongodb-linux-aarch64-ubuntu2204-7.0.0/bin/* /usr/local/bin/ - mongod --version - rm -rf mongodb-linux-aarch64-ubuntu2204-7.0.0.tgz mongodb-linux-aarch64-ubuntu2204-7.0.0 - - - name: Install npm # Linux github runner image does not come with npm - uses: actions/setup-node@v6.1.0 - with: - node-version: '24' - - - name: Install Azurite - uses: nick-fields/retry@v3 - with: - # We should always retry due to unstable nature of connections and environments - timeout_minutes: 10 - max_attempts: 3 - command: npm install -g azurite - - - name: Check no arcticdb file depend on tests package - run: | - build_tooling/checks.sh - - - name: Set persistent storage variables - # Should be executed for all persistent storages but not for LMDB - if: ${{ inputs.persistent_storage != 'no' }} - uses: ./.github/actions/set_persistent_storage_env_vars - with: - aws_access_key: "${{ secrets.AWS_S3_ACCESS_KEY }}" - aws_secret_key: "${{ secrets.AWS_S3_SECRET_KEY }}" - gcp_access_key: "${{ secrets.GCP_S3_ACCESS_KEY }}" - gcp_secret_key: "${{ secrets.GCP_S3_SECRET_KEY }}" - azure_container: "githubblob" # DEFAULT BUCKET FOR AZURE - azure_connection_string: "${{ secrets.AZURE_CONNECTION_STRING }}" - persistent_storage: ${{ inputs.persistent_storage || 'no' }} - - - name: Set ArcticDB Debug Logging - if: ${{ inputs.run_enable_logging }} - uses: ./.github/actions/enable_logging - - - name: Setup tmate session - uses: mxschmitt/action-tmate@v3 - if: ${{ inputs.debug_enabled }} - - - name: Install pytest-repeat - run: | - python -m pip --retries 3 --timeout 180 install pytest-repeat - - - name: Test with pytest - run: | - # find ssl directory where cacerts are (for Azure) - openssl version -d - # list file descriptors and other limits of the runner - ulimit -a - echo "Run commandline: $COMMANDLINE" - eval "$COMMANDLINE" - export ARCTICDB_WARN_ON_WRITING_EMPTY_DATAFRAME=0 - if [[ "$(echo "$ARCTICDB_PYTEST_ARGS" | xargs)" == pytest* ]]; then - python -m pip install pytest-repeat setuptools wheel - python setup.py protoc --build-lib python - echo "Run custom pytest command: $ARCTICDB_PYTEST_ARGS" - eval "$ARCTICDB_PYTEST_ARGS" - else - cd python - python -m pytest --timeout=3600 -v --tb=line -n logical --dist worksteal tests/${{matrix.type}} $ARCTICDB_PYTEST_ARGS - fi - env: - ARCTICDB_USING_CONDA: 1 - COMMANDLINE: ${{ inputs.run_commandline }} - ARCTICDB_PYTEST_ARGS: ${{ inputs.run_custom_pytest_command }} - STORAGE_TYPE: ${{ inputs.persistent_storage == 'no' && 'LMDB' || inputs.persistent_storage }} - - compile_win_64: - name: Compile (win_64) - if: | - always() && - !cancelled() - runs-on: windows-latest - env: - ACTIONS_ALLOW_UNSECURE_COMMANDS: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - SCCACHE_GHA_ENABLED: "true" - steps: - - uses: actions/checkout@v6.0.1 - # DONT use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Get number of CPU cores - uses: SimenB/github-actions-cpu-cores@v2.0.0 - id: cpu-cores - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - init-shell: bash cmd.exe - cache-environment: true - cache-environment-key: conda-env-win_64 - post-cleanup: 'none' - - - name: Build ArcticDB with conda (ARCTICDB_USING_CONDA=1) - shell: cmd /C call {0} - run: | - REM Some `CMAKE_*` variables (in particular CMAKE_GENERATOR_{PLATFORM,TOOLSET}) are set by mamba / micromamba / conda - REM when the environment is activated. - REM See: https://github.com/conda-forge/vc-feedstock/blob/c6bb71096319ff21ac8b75f7d91183be914c3d6b/recipe/activate.bat#L87-L131 - REM The values which are chosen prevent Ninja to be used as a generator with MSVC. - REM We override those values so that we can. - set CMAKE_GENERATOR_PLATFORM= - set CMAKE_GENERATOR_TOOLSET= - REM Protocol buffers compilation require not using build isolation. - REM We should always retry due to unstable nature of connections and environments - python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . - env: - ARCTICDB_USING_CONDA: 1 - ARCTICDB_BUILD_CPP_TESTS: 1 - ARCTIC_CMAKE_PRESET: windows-cl-conda-release - - - name: Show sccache stats (win_64) - shell: cmd /C call {0} - run: | - if defined SCCACHE_PATH ( - %SCCACHE_PATH% --show-stats - ) else ( - sccache --show-stats - ) - - - name: Archive build artifacts - uses: actions/upload-artifact@v4 - if: always() - with: - name: build-win_64 - retention-days: 7 - path: | - cpp/out/windows-cl-conda-release-build/ - python/arcticdb_ext* - python/**/*.so - python/**/*.pyd - - cpp_tests_win_64: - name: C++ Tests (win_64) - if: | - always() && - !cancelled() && - (inputs.run_cpp_tests == true || github.event_name != 'workflow_dispatch') - needs: [compile_win_64] - runs-on: windows-latest - env: - ACTIONS_ALLOW_UNSECURE_COMMANDS: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - SCCACHE_GHA_ENABLED: "true" - steps: - - uses: actions/checkout@v6.0.1 - # DONT use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Download build artifacts - uses: actions/download-artifact@v8 - with: - name: build-win_64 - path: . - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - init-shell: bash cmd.exe - cache-environment: true - cache-environment-key: conda-env-win_64 - post-cleanup: 'none' - - - name: Configure C++ Tests (win_64) - shell: cmd /C call {0} - # Rapidcheck tests are currently disabled on Windows due to a linking issue we need to investigate. - if: false # ${{ inputs.run_cpp_tests == true || github.event_name != 'workflow_dispatch' }} - run: | - REM Some `CMAKE_*` variables (in particular CMAKE_GENERATOR_{PLATFORM,TOOLSET}) are set by mamba / micromamba / conda - REM when the environment is activated. - REM See: https://github.com/conda-forge/vc-feedstock/blob/c6bb71096319ff21ac8b75f7d91183be914c3d6b/recipe/activate.bat#L87-L131 - REM The values which are chosen prevent Ninja to be used as a generator with MSVC. - REM We override those values so that we can. - set CMAKE_GENERATOR_PLATFORM= - set CMAKE_GENERATOR_TOOLSET= - cd cpp - cmake --preset windows-cl-conda-release -DTEST=ON - env: - ARCTICDB_USING_CONDA: 1 - ARCTICDB_BUILD_CPP_TESTS: 1 - ARCTIC_CMAKE_PRESET: windows-cl-conda-release - - - name: Build C++ Tests (win_64) - shell: cmd /C call {0} - # Rapidcheck tests are currently disabled on Windows due to a linking issue we need to investigate. - if: false # ${{ inputs.run_cpp_tests == true || github.event_name != 'workflow_dispatch' }} - run: | - REM Some `CMAKE_*` variables (in particular CMAKE_GENERATOR_{PLATFORM,TOOLSET}) are set by mamba / micromamba / conda - REM when the environment is activated. - REM See: https://github.com/conda-forge/vc-feedstock/blob/c6bb71096319ff21ac8b75f7d91183be914c3d6b/recipe/activate.bat#L87-L131 - REM The values which are chosen prevent Ninja to be used as a generator with MSVC. - REM We override those values so that we can. - set CMAKE_GENERATOR_PLATFORM= - set CMAKE_GENERATOR_TOOLSET= - cd cpp - cmake --build --preset windows-cl-conda-release --target arcticdb_rapidcheck_tests -j ${{ steps.cpu-cores.outputs.count }} - cmake --build --preset windows-cl-conda-release --target test_unit_arcticdb -j ${{ steps.cpu-cores.outputs.count }} - env: - ARCTICDB_USING_CONDA: 1 - - - name: Show sccache stats after C++ tests build (win_64) - shell: cmd /C call {0} - run: | - if defined SCCACHE_PATH ( - %SCCACHE_PATH% --show-stats - ) else ( - sccache --show-stats - ) - - - name: Run C++ Tests (win_64) - shell: cmd /C call {0} - # Rapidcheck tests are currently disabled on Windows due to a linking issue we need to investigate. - if: false # ${{ inputs.run_cpp_tests == true || github.event_name != 'workflow_dispatch' }} - run: | - cd cpp/out/windows-cl-conda-release-build/ - ctest --output-on-failure - env: - CTEST_OUTPUT_ON_FAILURE: 1 - ARCTICDB_USING_CONDA: 1 - ARCTICDB_BUILD_CPP_TESTS: 1 - ARCTIC_CMAKE_PRESET: windows-cl-conda-release - - python_tests_win_64: - name: Python Tests (win_64) - ${{matrix.type}} - if: | - always() && - !cancelled() - needs: [compile_win_64] - strategy: - fail-fast: false - matrix: - type: [unit, integration, hypothesis, stress, compat, enduser] - runs-on: windows-latest - env: - ACTIONS_ALLOW_UNSECURE_COMMANDS: true - SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} - SCCACHE_GHA_ENABLED: "true" - steps: - - uses: actions/checkout@v6.0.1 - # DONT use recursive submodules checkout to simulate conda feedstock build - # with: - # submodules: recursive - - - name: Download build artifacts - uses: actions/download-artifact@v8 - with: - name: build-win_64 - path: . - - - name: Configure sccache - uses: mozilla-actions/sccache-action@v0.0.9 - with: - version: v0.12.0 - disable_annotations: 'true' # supress noisy report that pollutes the summary page - - - name: Install Conda environment from environment-dev.yml - uses: mamba-org/setup-micromamba@v2.0.6 - with: - environment-file: environment-dev.yml - init-shell: bash cmd.exe - cache-environment: true - cache-environment-key: conda-env-win_64 - post-cleanup: 'none' - - - name: Install ArcticDB from artifacts - shell: cmd /C call {0} - run: | - REM Protocol buffers compilation require not using build isolation. - REM We should always retry due to unstable nature of connections and environments - REM This reuses the build artifacts from the compile_win_64 step and make ArcticDB available for testing. - REM Skip CMake configuration/build if artifacts are already present to speed up installation - set ARCTIC_CMAKE_PRESET= - if exist "cpp\out\windows-cl-conda-release-build" ( - dir /b python\arcticdb_ext*.pyd >nul 2>&1 - if not errorlevel 1 ( - echo Build artifacts found, skipping CMake build - set ARCTIC_CMAKE_PRESET=skip - ) - ) - python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . - env: - ARCTICDB_USING_CONDA: 1 - - - name: Install npm - uses: actions/setup-node@v6.1.0 - with: - node-version: '24' - - - name: Install azurite - shell: bash -elo pipefail {0} - run: | - npm install -g azurite - # Ensure npm global bin is in PATH for subsequent steps - # On Windows, npm global installs go to %APPDATA%\npm which may not be in PATH - npm_config_prefix=$(npm config get prefix) - echo "npm prefix: $npm_config_prefix" - # Add npm global bin to PATH (works for both Unix and Windows paths in bash) - npm_bin_dir="$npm_config_prefix" - if [[ "$RUNNER_OS" == "Windows" ]]; then - # Convert Windows path to Unix-style for bash (C:\Users\... -> /c/Users/...) - npm_bin_dir=$(echo "$npm_bin_dir" | sed 's|^\([A-Z]\):|/\1|' | tr '[:upper:]' '[:lower:]' | sed 's|\\|/|g') - fi - npm_bin_dir="${npm_bin_dir}/bin" - echo "$npm_bin_dir" >> $GITHUB_PATH - export PATH="$npm_bin_dir:$PATH" - # Verify azurite is accessible - echo "PATH includes: $npm_bin_dir" - which azurite && echo "Azurite found: $(which azurite)" || echo "Warning: azurite not found in PATH" - - - name: Check no arcticdb file depend on tests package - shell: bash -elo pipefail {0} - run: | - build_tooling/checks.sh - - - name: Set persistent storage variables - # Should be executed for all persistent storages but not for LMDB - if: ${{ inputs.persistent_storage != 'no' }} - uses: ./.github/actions/set_persistent_storage_env_vars - with: - aws_access_key: "${{ secrets.AWS_S3_ACCESS_KEY }}" - aws_secret_key: "${{ secrets.AWS_S3_SECRET_KEY }}" - gcp_access_key: "${{ secrets.GCP_S3_ACCESS_KEY }}" - gcp_secret_key: "${{ secrets.GCP_S3_SECRET_KEY }}" - azure_container: "githubblob" # DEFAULT BUCKET FOR AZURE - azure_connection_string: "${{ secrets.AZURE_CONNECTION_STRING }}" - persistent_storage: ${{ inputs.persistent_storage || 'no' }} - - - name: Set ArcticDB Debug Logging - if: ${{ inputs.run_enable_logging }} - uses: ./.github/actions/enable_logging - - - name: Setup tmate session - uses: mxschmitt/action-tmate@v3 - if: ${{ inputs.debug_enabled }} - - - name: Install pytest-repeat - shell: bash -elo pipefail {0} - run: | - python -m pip --retries 3 --timeout 180 install pytest-repeat - - - name: Test with pytest - shell: bash -elo pipefail {0} - run: | - echo "Run commandline: $COMMANDLINE" - eval "$COMMANDLINE" - export ARCTICDB_RAND_SEED=$RANDOM - export ARCTICDB_WARN_ON_WRITING_EMPTY_DATAFRAME=0 - if [[ "$(echo "$ARCTICDB_PYTEST_ARGS" | xargs)" == *pytest* ]]; then - command="python -m $ARCTICDB_PYTEST_ARGS" - echo "Run custom pytest command: $command" - eval "$command" - else - cd python - # Skip LMDB tests on Windows because those tests fill the disk entirely and makes the test suite fail. - python -m pytest --timeout=3600 -v --tb=line -n logical --dist worksteal -m "not lmdb" tests/${{matrix.type}} $ARCTICDB_PYTEST_ARGS - fi - env: - ARCTICDB_USING_CONDA: 1 - COMMANDLINE: ${{ inputs.run_commandline }} - ARCTICDB_PYTEST_ARGS: ${{ inputs.run_custom_pytest_command }} - NODE_OPTIONS: --openssl-legacy-provider + linux_64: + uses: ./.github/workflows/compile_and_test_with_conda_unix.yml + secrets: inherit + with: + os: ubuntu-22.04 + platform: linux_64 + preset: linux-conda-release + hypothesis_profile: ci_linux + run_cpp_tests: ${{inputs.disable_cpp_tests != true}} + persistent_storage: ${{inputs.persistent_storage || 'no'}} + debug_enabled: ${{inputs.debug_enabled == true}} + run_enable_logging: ${{inputs.run_enable_logging == true}} + run_commandline: ${{format('{0}', inputs.run_commandline)}} + run_custom_pytest_command: ${{format('{0}', inputs.run_custom_pytest_command)}} + + linux_aarch64: + uses: ./.github/workflows/compile_and_test_with_conda_unix.yml + secrets: inherit + with: + os: ubuntu-22.04-arm + platform: linux_aarch64 + preset: linux-conda-release + run_cpp_tests: ${{inputs.disable_cpp_tests != true}} + persistent_storage: ${{inputs.persistent_storage || 'no'}} + debug_enabled: ${{inputs.debug_enabled == true}} + run_enable_logging: ${{inputs.run_enable_logging == true}} + run_commandline: ${{format('{0}', inputs.run_commandline)}} + run_custom_pytest_command: ${{format('{0}', inputs.run_custom_pytest_command)}} + + osx_arm64: + uses: ./.github/workflows/compile_and_test_with_conda_unix.yml + secrets: inherit + with: + os: macos-14 + platform: osx_arm64 + preset: macos-conda-release + hypothesis_profile: ci_macos + install_mongodb: false # MongoDB is not available for macOS arm64 + free_disk_space: false # Do not clean on macOS + run_cpp_tests: ${{inputs.disable_cpp_tests != true}} + persistent_storage: ${{inputs.persistent_storage || 'no'}} + debug_enabled: ${{inputs.debug_enabled == true}} + run_enable_logging: ${{inputs.run_enable_logging == true}} + run_commandline: ${{format('{0}', inputs.run_commandline)}} + run_custom_pytest_command: ${{format('{0}', inputs.run_custom_pytest_command)}} + + win_64: + uses: ./.github/workflows/compile_and_test_with_conda_win.yml + secrets: inherit + with: + os: windows-latest + platform: win_64 + preset: windows-cl-conda-release + run_cpp_tests: ${{inputs.disable_cpp_tests != true}} + persistent_storage: ${{inputs.persistent_storage || 'no'}} + debug_enabled: ${{inputs.debug_enabled == true}} + run_enable_logging: ${{inputs.run_enable_logging == true}} + run_commandline: ${{format('{0}', inputs.run_commandline)}} + run_custom_pytest_command: ${{format('{0}', inputs.run_custom_pytest_command)}} can_merge_conda: name: All conda jobs completed - needs: [cpp_tests_linux_64, cpp_tests_linux_aarch64, cpp_tests_osx_arm64, cpp_tests_win_64, python_tests_linux_64, python_tests_linux_aarch64, python_tests_osx_arm64, python_tests_win_64] + needs: [linux_64, linux_aarch64, osx_arm64, win_64] if: | always() && !failure() && !cancelled() runs-on: ubuntu-22.04 steps: - - run: echo All conda jobs completed! + - run: echo All conda jobs completed! \ No newline at end of file diff --git a/.github/workflows/compile_and_test_with_conda_unix.yml b/.github/workflows/compile_and_test_with_conda_unix.yml new file mode 100644 index 00000000000..64e213368d1 --- /dev/null +++ b/.github/workflows/compile_and_test_with_conda_unix.yml @@ -0,0 +1,318 @@ +name: Conda Build and Test (Unix) + +permissions: + contents: read + +on: + workflow_call: + inputs: + os: {required: true, type: string, description: GitHub os to execute on} + platform: {required: true, type: string, description: Platform identifier e.g. linux_64 or osx_arm64} + preset: {required: true, type: string, description: CMake preset name e.g. linux-conda-release} + + free_disk_space: {required: false, type: boolean, default: true, description: Free disk space before build. Linux only} + install_mongodb: {required: false, type: boolean, default: true, description: Install MongoDB for tests. Linux only} + run_cpp_tests: {required: false, type: boolean, default: true, description: Whether to run C++ tests} + hypothesis_profile: {required: false, type: string, default: '', description: Hypothesis profile e.g. ci_linux or ci_macos} + + persistent_storage: {required: false, type: string, default: 'no', description: Persistent storage type} + debug_enabled: {required: false, type: boolean, default: false, description: Enable tmate debug session} + run_enable_logging: {required: false, type: boolean, default: false, description: Enable ArcticDB debug logging} + run_commandline: {required: false, type: string, default: '', description: Custom commandline to run before tests} + run_custom_pytest_command: {required: false, type: string, default: '', description: Custom pytest command or additional arguments} + +jobs: + # ==================== JOB 1: COMPILE ==================== + compile: + name: Compile (${{inputs.platform}}) + if: | + always() && + !cancelled() + runs-on: ${{inputs.os}} + env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true + ACTIONS_ALLOW_UNSECURE_COMMANDS: true + SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} + SCCACHE_GHA_ENABLED: "true" + defaults: + run: + shell: bash -l {0} + steps: + - uses: actions/checkout@v6.0.1 + # Do not use recursive submodules checkout to simulate conda feedstock build + # with: + # submodules: recursive + + - name: Configure sccache + uses: mozilla-actions/sccache-action@v0.0.9 + with: + version: v0.12.0 + disable_annotations: 'true' + + - name: Get number of CPU cores + uses: SimenB/github-actions-cpu-cores@v2.0.0 + id: cpu-cores + + - name: Install Conda environment from environment-dev.yml + uses: mamba-org/setup-micromamba@v2.0.6 + with: + environment-file: environment-dev.yml + environment-name: arcticdb + init-shell: bash + cache-environment: true + cache-environment-key: conda-env-${{inputs.platform}} + post-cleanup: 'none' + + - name: Build ArcticDB with conda + run: | + python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . + env: + ARCTICDB_USING_CONDA: 1 + ARCTICDB_BUILD_CPP_TESTS: 1 + + - name: Show sccache stats + run: ${SCCACHE_PATH} --show-stats || sccache --show-stats + + - name: Archive build artifacts + uses: actions/upload-artifact@v4 + if: always() + with: + name: build-${{inputs.platform}} + retention-days: 7 + path: | + cpp/out/${{inputs.preset}}-build/ + python/arcticdb_ext* + python/**/*.so + + # ==================== JOB 2: C++ TESTS ==================== + cpp_tests: + name: C++ Tests (${{inputs.platform}}) + needs: [compile] + if: | + always() && + !cancelled() && + inputs.run_cpp_tests + runs-on: ${{inputs.os}} + env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true + SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} + SCCACHE_GHA_ENABLED: "true" + defaults: + run: + shell: bash -l {0} + steps: + - uses: actions/checkout@v6.0.1 + # Do not use recursive submodules checkout to simulate conda feedstock build + # with: + # submodules: recursive + + - name: Configure sccache + uses: mozilla-actions/sccache-action@v0.0.9 + with: + version: v0.12.0 + disable_annotations: 'true' + + - name: Free Disk Space + if: inputs.free_disk_space + uses: jlumbroso/free-disk-space@v1.3.1 + with: + tool-cache: false + large-packages: false + docker-images: false + + - name: Download build artifacts + uses: actions/download-artifact@v8 + with: + name: build-${{inputs.platform}} + path: . + + - name: Get number of CPU cores + uses: SimenB/github-actions-cpu-cores@v2.0.0 + id: cpu-cores + + - name: Install Conda environment from environment-dev.yml + uses: mamba-org/setup-micromamba@v2.0.6 + with: + environment-file: environment-dev.yml + environment-name: arcticdb + init-shell: bash + cache-environment: true + cache-environment-key: conda-env-${{inputs.platform}} + post-cleanup: 'none' + + - name: Configure C++ Tests + run: | + cd cpp + if [ -f out/${{inputs.preset}}-build/CMakeCache.txt ] && grep -q "TEST:BOOL=ON" out/${{inputs.preset}}-build/CMakeCache.txt; then + echo "CMake cache already has TEST=ON, skipping reconfiguration" + else + cmake --preset ${{inputs.preset}} -DTEST=ON + fi + env: + ARCTICDB_USING_CONDA: 1 + + - name: Build C++ Tests + run: | + cd cpp + cmake --build --preset ${{inputs.preset}} --target arcticdb_rapidcheck_tests -j ${{ steps.cpu-cores.outputs.count }} + cmake --build --preset ${{inputs.preset}} --target test_unit_arcticdb -j ${{ steps.cpu-cores.outputs.count }} + env: + ARCTICDB_USING_CONDA: 1 + + - name: Show sccache stats + run: ${SCCACHE_PATH} --show-stats || sccache --show-stats + + - name: Run C++ Tests + run: | + cd cpp/out/${{inputs.preset}}-build/ + ctest --output-on-failure + env: + ARCTICDB_USING_CONDA: 1 + + # ==================== JOB 3: PYTHON TESTS ==================== + python_tests: + name: Python Tests (${{inputs.platform}}) - ${{matrix.type}} + needs: [compile] + if: | + always() && + !cancelled() + runs-on: ${{inputs.os}} + strategy: + fail-fast: false + matrix: + type: [unit, integration, hypothesis, stress, compat, enduser] + env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true + ACTIONS_ALLOW_UNSECURE_COMMANDS: true + SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} + SCCACHE_GHA_ENABLED: "true" + defaults: + run: + shell: bash -l {0} + steps: + - name: Free Disk Space + if: inputs.free_disk_space + uses: jlumbroso/free-disk-space@v1.3.1 + with: + tool-cache: false + large-packages: false + docker-images: false + + - uses: actions/checkout@v6.0.1 + # Do not use recursive submodules checkout to simulate conda feedstock build + # with: + # submodules: recursive + + - name: Configure sccache + uses: mozilla-actions/sccache-action@v0.0.9 + with: + version: v0.12.0 + disable_annotations: 'true' + + - name: Download build artifacts + uses: actions/download-artifact@v8 + with: + name: build-${{inputs.platform}} + path: . + + - name: Get number of CPU cores + uses: SimenB/github-actions-cpu-cores@v2.0.0 + id: cpu-cores + + - name: Install Conda environment from environment-dev.yml + uses: mamba-org/setup-micromamba@v2.0.6 + with: + environment-file: environment-dev.yml + environment-name: arcticdb + init-shell: bash + cache-environment: true + cache-environment-key: conda-env-${{inputs.platform}} + post-cleanup: 'none' + + - name: Install ArcticDB from artifacts + run: | + if [ -d "cpp/out/${{inputs.preset}}-build" ] && (ls python/arcticdb_ext*.so 2>/dev/null | head -1 | grep -q .); then + echo "Build artifacts found, skipping CMake build" + export ARCTIC_CMAKE_PRESET=skip + fi + python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . + env: + ARCTICDB_USING_CONDA: 1 + + - name: Install MongoDB + if: inputs.install_mongodb && !contains(inputs.platform, 'aarch64') + uses: ./.github/actions/install_mongodb + + - name: Install MongoDB (ARM manual) + if: inputs.install_mongodb && contains(inputs.platform, 'aarch64') + run: | + curl --retry 5 --retry-delay 5 --retry-connrefused -LO https://fastdl.mongodb.org/linux/mongodb-linux-aarch64-ubuntu2204-7.0.0.tgz + tar -xzf mongodb-linux-aarch64-ubuntu2204-7.0.0.tgz + cp mongodb-linux-aarch64-ubuntu2204-7.0.0/bin/* /usr/local/bin/ + mongod --version + rm -rf mongodb-linux-aarch64-ubuntu2204-7.0.0.tgz mongodb-linux-aarch64-ubuntu2204-7.0.0 + + - name: Install npm + uses: actions/setup-node@v6.1.0 + with: + node-version: '24' + + - name: Install Azurite + uses: nick-fields/retry@v3 + with: + timeout_minutes: 10 + max_attempts: 3 + command: npm install -g azurite + + - name: Check no arcticdb file depend on tests package + run: build_tooling/checks.sh + + - name: Set persistent storage variables + if: inputs.persistent_storage != 'no' + uses: ./.github/actions/set_persistent_storage_env_vars + with: + aws_access_key: "${{ secrets.AWS_S3_ACCESS_KEY }}" + aws_secret_key: "${{ secrets.AWS_S3_SECRET_KEY }}" + gcp_access_key: "${{ secrets.GCP_S3_ACCESS_KEY }}" + gcp_secret_key: "${{ secrets.GCP_S3_SECRET_KEY }}" + azure_container: "githubblob" + azure_connection_string: "${{ secrets.AZURE_CONNECTION_STRING }}" + persistent_storage: ${{inputs.persistent_storage}} + + - name: Set ArcticDB Debug Logging + if: inputs.run_enable_logging + uses: ./.github/actions/enable_logging + + - name: Setup tmate session + if: inputs.debug_enabled + uses: mxschmitt/action-tmate@v3 + + - name: Install pytest-repeat + run: python -m pip --retries 3 --timeout 180 install pytest-repeat + + - name: Test with pytest + run: | + openssl version -d || true + ulimit -a || true + echo "Run commandline: $COMMANDLINE" + eval "$COMMANDLINE" + export ARCTICDB_WARN_ON_WRITING_EMPTY_DATAFRAME=0 + if [[ "$(echo "$ARCTICDB_PYTEST_ARGS" | xargs)" == pytest* ]]; then + python -m pip install pytest-repeat setuptools wheel + python setup.py protoc --build-lib python + echo "Run custom pytest command: $ARCTICDB_PYTEST_ARGS" + eval "$ARCTICDB_PYTEST_ARGS" + else + python setup.py protoc --build-lib python + cd python + python -m pytest --timeout=3600 -v --tb=line -n logical --dist worksteal \ + tests/${{matrix.type}} $ARCTICDB_PYTEST_ARGS + fi + env: + ARCTICDB_USING_CONDA: 1 + COMMANDLINE: ${{inputs.run_commandline}} + CI_MONGO_HOST: ${{inputs.install_mongodb && 'mongodb' || ''}} + HYPOTHESIS_PROFILE: ${{inputs.hypothesis_profile || 'dev'}} + ARCTICDB_PYTEST_ARGS: ${{inputs.run_custom_pytest_command}} + STORAGE_TYPE: ${{inputs.persistent_storage == 'no' && 'LMDB' || inputs.persistent_storage}} + NODE_OPTIONS: --openssl-legacy-provider \ No newline at end of file diff --git a/.github/workflows/compile_and_test_with_conda_win.yml b/.github/workflows/compile_and_test_with_conda_win.yml new file mode 100644 index 00000000000..1711ce46fd7 --- /dev/null +++ b/.github/workflows/compile_and_test_with_conda_win.yml @@ -0,0 +1,236 @@ +name: Conda Build and Test (Windows) + +permissions: + contents: read + +on: + workflow_call: + inputs: + os: {required: true, type: string, description: GitHub os to execute on} + platform: {required: true, type: string, description: Platform identifier e.g. win_64} + preset: {required: true, type: string, description: CMake preset name e.g. windows-cl-conda-release} + + run_cpp_tests: {required: false, type: boolean, default: true, description: Whether to run C++ tests} + persistent_storage: {required: false, type: string, default: 'no', description: Persistent storage type} + debug_enabled: {required: false, type: boolean, default: false, description: Enable tmate debug session} + run_enable_logging: {required: false, type: boolean, default: false, description: Enable ArcticDB debug logging} + run_commandline: {required: false, type: string, default: '', description: Custom commandline to run before tests} + run_custom_pytest_command: {required: false, type: string, default: '', description: Custom pytest command or additional arguments} + +jobs: + # ==================== JOB 1: COMPILE ==================== + compile: + name: Compile (${{inputs.platform}}) + runs-on: ${{inputs.os}} + env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true + ACTIONS_ALLOW_UNSECURE_COMMANDS: true + SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} + SCCACHE_GHA_ENABLED: "true" + defaults: + run: + shell: bash -l {0} + steps: + - uses: actions/checkout@v6.0.1 + # Do not use recursive submodules checkout to simulate conda feedstock build + # with: + # submodules: recursive + + - name: Configure sccache + uses: mozilla-actions/sccache-action@v0.0.9 + with: + version: v0.12.0 + disable_annotations: 'true' + + - name: Get number of CPU cores + uses: SimenB/github-actions-cpu-cores@v2.0.0 + id: cpu-cores + + - name: Install Conda environment from environment-dev.yml + uses: mamba-org/setup-micromamba@v2.0.6 + with: + environment-file: environment-dev.yml + environment-name: arcticdb + init-shell: bash cmd.exe + cache-environment: true + cache-environment-key: conda-env-${{inputs.platform}} + post-cleanup: 'none' + + - name: Build ArcticDB with conda + shell: cmd /C call {0} + + # Clear CMAKE_GENERATOR_PLATFORM and CMAKE_GENERATOR_TOOLSET to prevent them from overriding the CMake preset configuration. + # GitHub runner images or Conda activation scripts may set these, which can conflict + # with the generator, architecture, and toolset already defined in the preset. + run: | + set CMAKE_GENERATOR_PLATFORM= + set CMAKE_GENERATOR_TOOLSET= + python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . + env: + ARCTICDB_USING_CONDA: 1 + ARCTICDB_BUILD_CPP_TESTS: 1 + ARCTIC_CMAKE_PRESET: ${{inputs.preset}} + + - name: Show sccache stats + shell: cmd /C call {0} + run: | + if defined SCCACHE_PATH ( + %SCCACHE_PATH% --show-stats + ) else ( + sccache --show-stats + ) + + - name: Archive build artifacts + uses: actions/upload-artifact@v4 + if: always() + with: + name: build-${{inputs.platform}} + retention-days: 7 + path: | + cpp/out/${{inputs.preset}}-build/ + python/arcticdb_ext* + python/**/*.pyd + + # ==================== JOB 2: PYTHON TESTS ==================== + # Note: C++ tests (rapidcheck) are currently disabled on Windows due to a linking issue. + python_tests: + name: Python Tests (${{inputs.platform}}) - ${{matrix.type}} + needs: [compile] + if: | + always() && + !cancelled() && + !failure() + runs-on: ${{inputs.os}} + strategy: + fail-fast: false + matrix: + type: [unit, integration, hypothesis, stress, compat, enduser] + env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true + ACTIONS_ALLOW_UNSECURE_COMMANDS: true + SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} + SCCACHE_GHA_ENABLED: "true" + defaults: + run: + shell: bash -l {0} + steps: + - uses: actions/checkout@v6.0.1 + # Do not use recursive submodules checkout to simulate conda feedstock build + # with: + # submodules: recursive + + - name: Configure sccache + uses: mozilla-actions/sccache-action@v0.0.9 + with: + version: v0.12.0 + disable_annotations: 'true' + + - name: Download build artifacts + uses: actions/download-artifact@v8 + with: + name: build-${{inputs.platform}} + path: . + + - name: Get number of CPU cores + uses: SimenB/github-actions-cpu-cores@v2.0.0 + id: cpu-cores + + - name: Install Conda environment from environment-dev.yml + uses: mamba-org/setup-micromamba@v2.0.6 + with: + environment-file: environment-dev.yml + environment-name: arcticdb + init-shell: bash cmd.exe + cache-environment: true + cache-environment-key: conda-env-${{inputs.platform}} + post-cleanup: 'none' + + - name: Install ArcticDB from artifacts + shell: cmd /C call {0} + run: | + set ARCTIC_CMAKE_PRESET= + if exist "cpp\out\${{inputs.preset}}-build" ( + dir /b python\arcticdb_ext*.pyd >nul 2>&1 + if not errorlevel 1 ( + echo Build artifacts found, skipping CMake build + set ARCTIC_CMAKE_PRESET=skip + ) + ) + python -m pip install --no-build-isolation --no-deps --retries 3 --timeout 400 -v -e . + env: + ARCTICDB_USING_CONDA: 1 + + - name: Install npm + uses: actions/setup-node@v6.1.0 + with: + node-version: '24' + + - name: Install Azurite + shell: bash -elo pipefail {0} + run: | + npm install -g azurite + npm_config_prefix=$(npm config get prefix) + npm_bin_dir="$npm_config_prefix" + if [[ "$RUNNER_OS" == "Windows" ]]; then + npm_bin_dir=$(echo "$npm_bin_dir" | sed 's|\\|/|g') + if [[ "$npm_bin_dir" =~ ^([A-Za-z]):/(.*) ]]; then + drive_letter="${BASH_REMATCH[1]}" + rest="${BASH_REMATCH[2]}" + npm_bin_dir="/${drive_letter,,}/${rest}" + fi + fi + npm_bin_dir="${npm_bin_dir}/bin" + echo "$npm_bin_dir" >> $GITHUB_PATH + export PATH="$npm_bin_dir:$PATH" + which azurite && echo "Azurite found: $(which azurite)" || echo "Warning: azurite not found in PATH" + + - name: Check no arcticdb file depend on tests package + shell: bash -elo pipefail {0} + run: build_tooling/checks.sh + + - name: Set persistent storage variables + if: inputs.persistent_storage != 'no' + uses: ./.github/actions/set_persistent_storage_env_vars + with: + aws_access_key: "${{ secrets.AWS_S3_ACCESS_KEY }}" + aws_secret_key: "${{ secrets.AWS_S3_SECRET_KEY }}" + gcp_access_key: "${{ secrets.GCP_S3_ACCESS_KEY }}" + gcp_secret_key: "${{ secrets.GCP_S3_SECRET_KEY }}" + azure_container: "githubblob" + azure_connection_string: "${{ secrets.AZURE_CONNECTION_STRING }}" + persistent_storage: ${{inputs.persistent_storage}} + + - name: Set ArcticDB Debug Logging + if: inputs.run_enable_logging + uses: ./.github/actions/enable_logging + + - name: Setup tmate session + if: inputs.debug_enabled + uses: mxschmitt/action-tmate@v3 + + - name: Install pytest-repeat + shell: bash -elo pipefail {0} + run: python -m pip --retries 3 --timeout 180 install pytest-repeat + + - name: Test with pytest + shell: bash -elo pipefail {0} + run: | + echo "Run commandline: $COMMANDLINE" + eval "$COMMANDLINE" + export ARCTICDB_RAND_SEED=$RANDOM + export ARCTICDB_WARN_ON_WRITING_EMPTY_DATAFRAME=0 + if [[ "$(echo "$ARCTICDB_PYTEST_ARGS" | xargs)" == *pytest* ]]; then + command="python -m $ARCTICDB_PYTEST_ARGS" + echo "Run custom pytest command: $command" + eval "$command" + else + cd python + python -m pytest --timeout=3600 -v --tb=line -n logical --dist worksteal \ + -m "not lmdb" \ + tests/${{matrix.type}} $ARCTICDB_PYTEST_ARGS + fi + env: + ARCTICDB_USING_CONDA: 1 + COMMANDLINE: ${{inputs.run_commandline}} + ARCTICDB_PYTEST_ARGS: ${{inputs.run_custom_pytest_command}} + NODE_OPTIONS: --openssl-legacy-provider \ No newline at end of file diff --git a/python/test.py b/python/test.py new file mode 100644 index 00000000000..db82420fd8c --- /dev/null +++ b/python/test.py @@ -0,0 +1 @@ +import arcticdb \ No newline at end of file