Skip to content

Added input spark options and schema for reading from the storage and example of uniqueness check for composite key #701

Added input spark options and schema for reading from the storage and example of uniqueness check for composite key

Added input spark options and schema for reading from the storage and example of uniqueness check for composite key #701

Workflow file for this run

name: acceptance
on:
pull_request:
types: [ opened, synchronize, ready_for_review ]
merge_group:
types: [ checks_requested ]
push:
branches:
- main
permissions:
id-token: write
contents: read
pull-requests: write
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
integration:
# Only run this job for PRs from branches on the main repository and not from forks.
# Workflows triggered by PRs from forks don't have access to the tool environment.
# PRs from forks to be tested by the reviewer(s) / maintainer(s) before merging.
if: github.event_name == 'pull_request' && !github.event.pull_request.draft && !github.event.pull_request.head.repo.fork
environment: tool
runs-on: larger
steps:
- name: Checkout Code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install Python
uses: actions/setup-python@v5
with:
cache: 'pip'
cache-dependency-path: '**/pyproject.toml'
python-version: '3.10'
- name: Install hatch
run: pip install hatch==1.9.4
- name: Run unit tests and generate test coverage report
run: make test
# Integration tests are run from within tests/integration folder.
# We need to make sure .coveragerc is there so that code coverage is generated for the right modules.
- name: Prepare code coverage configuration for integration tests
run: cp .coveragerc tests/integration
# Run tests from `tests/integration` as defined in .codegen.json
# and generate code coverage for modules defined in .coveragerc
- name: Run integration tests and generate test coverage report
uses: databrickslabs/sandbox/acceptance@acceptance/v0.4.3
with:
vault_uri: ${{ secrets.VAULT_URI }}
timeout: 2h
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }}
ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }}
# collects all coverage reports: coverage.xml from integration tests, coverage-unit.xml from unit tests
- name: Publish test coverage
uses: codecov/codecov-action@v5
with:
use_oidc: true
serverless_integration:
# Only run this job for PRs from branches on the main repository and not from forks.
# Workflows triggered by PRs from forks don't have access to the tool environment.
# PRs from forks to be tested by the reviewer(s) / maintainer(s) before merging.
if: github.event_name == 'pull_request' && !github.event.pull_request.draft && !github.event.pull_request.head.repo.fork
environment: tool
runs-on: larger
env:
DATABRICKS_SERVERLESS_COMPUTE_ID: auto
steps:
- name: Checkout Code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install Python
uses: actions/setup-python@v5
with:
cache: 'pip'
cache-dependency-path: '**/pyproject.toml'
python-version: '3.10'
- name: Install hatch
run: pip install hatch==1.9.4
- name: Run integration tests on serverless cluster
uses: databrickslabs/sandbox/acceptance@acceptance/v0.4.3
with:
vault_uri: ${{ secrets.VAULT_URI }}
timeout: 2h
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }}
ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }}
DATABRICKS_SERVERLESS_COMPUTE_ID: ${{ env.DATABRICKS_SERVERLESS_COMPUTE_ID }}