diff --git a/.eslintrc.js b/.eslintrc.js
index 65f96764527b2..18f6abda6d7e8 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -89,6 +89,14 @@ module.exports = {
message:
"Use 'useLocation', 'useParams', 'useNavigate', 'useRoutes' from sentry/utils instead.",
},
+ {
+ name: 'qs',
+ message: 'Please use query-string instead of qs',
+ },
+ {
+ name: 'moment',
+ message: 'Please import moment-timezone instead of moment',
+ },
],
},
],
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 4683234338a78..586fdd67e6c8d 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -241,7 +241,7 @@ yarn.lock @getsentry/owners-js-de
/tests/snuba/search/test_backend.py @getsentry/visibility
-/src/sentry/search/events/ @getsentry/visibility @getsentry/issues
+/src/sentry/search/events/ @getsentry/visibility
/src/sentry/utils/performance_issues/ @getsentry/performance
@@ -323,10 +323,10 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge
## End of Profiling
-## Configurations
-/src/sentry/remote_config/ @getsentry/replay-backend
-/tests/sentry/remote_config/ @getsentry/replay-backend
-## End of Configurations
+## Flags
+/src/sentry/flags/ @getsentry/replay-backend
+/tests/sentry/flags/ @getsentry/replay-backend
+## End of Flags
## Replays
@@ -519,6 +519,8 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge
/src/sentry/grouping/ @getsentry/issues
/src/sentry/mediators/ @getsentry/issues
/src/sentry/ratelimits/ @getsentry/issues
+/src/sentry/search/events/builder/issue_platform.py @getsentry/issues
+/src/sentry/search/events/builder/errors.py @getsentry/issues
/src/sentry/search/snuba/ @getsentry/issues
/src/sentry/seer/similarity/ @getsentry/issues
/src/sentry/tasks/auto_ongoing_issues.py @getsentry/issues
diff --git a/.github/actions/test-setup-sentry-devservices/action.yml b/.github/actions/test-setup-sentry-devservices/action.yml
new file mode 100644
index 0000000000000..0995881e85bba
--- /dev/null
+++ b/.github/actions/test-setup-sentry-devservices/action.yml
@@ -0,0 +1,119 @@
+# NOTE: Do not rely on `make` commands here as this action is used across different repos
+# where the Makefile will not be available
+name: 'Sentry Setup'
+description: 'Sets up a Sentry test environment'
+inputs:
+ workdir:
+ description: 'Directory where the sentry source is located'
+ required: false
+ default: '.'
+
+outputs:
+ yarn-cache-dir:
+ description: 'Path to yarn cache'
+ value: ${{ steps.config.outputs.yarn-cache-dir }}
+ matrix-instance-number:
+ description: 'The matrix instance number (starting at 1)'
+ value: ${{ steps.config.outputs.matrix-instance-number }}
+ matrix-instance-total:
+ description: 'Reexport of MATRIX_INSTANCE_TOTAL.'
+ value: ${{ steps.config.outputs.matrix-instance-total }}
+
+runs:
+ using: 'composite'
+ steps:
+ - name: Setup default environment variables
+ # the default for "bash" is:
+ # bash --noprofile --norc -eo pipefail {0}
+ shell: bash --noprofile --norc -eo pipefail -ux {0}
+ env:
+ MATRIX_INSTANCE: ${{ matrix.instance }}
+ # XXX: We should be using something like len(strategy.matrix.instance) (not possible atm)
+ # If you have other things like python-version: [foo, bar, baz] then the sharding logic
+ # isn't right because job-total will be 3x larger and you'd never run 2/3 of the tests.
+ # MATRIX_INSTANCE_TOTAL: ${{ strategy.job-total }}
+ run: |
+ echo "PIP_DISABLE_PIP_VERSION_CHECK=on" >> $GITHUB_ENV
+ echo "PIP_INDEX_URL=https://pypi.devinfra.sentry.io/simple" >> $GITHUB_ENV
+ echo "SENTRY_SKIP_BACKEND_VALIDATION=1" >> $GITHUB_ENV
+
+ ### node configuration ###
+ echo "NODE_ENV=development" >> $GITHUB_ENV
+
+ ### pytest configuration ###
+ echo "PY_COLORS=1" >> "$GITHUB_ENV"
+ echo "PYTEST_ADDOPTS=--reruns=5 --durations=10 --fail-slow=60s" >> $GITHUB_ENV
+ echo "COVERAGE_CORE=sysmon" >> "$GITHUB_ENV"
+
+ ### pytest-sentry configuration ###
+ if [ "$GITHUB_REPOSITORY" = "getsentry/sentry" ]; then
+ echo "PYTEST_SENTRY_DSN=https://6fd5cfea2d4d46b182ad214ac7810508@sentry.io/2423079" >> $GITHUB_ENV
+ echo "PYTEST_SENTRY_TRACES_SAMPLE_RATE=0" >> $GITHUB_ENV
+
+ # This records failures on master to sentry in order to detect flakey tests, as it's
+ # expected that people have failing tests on their PRs
+ if [ "$GITHUB_REF" = "refs/heads/master" ]; then
+ echo "PYTEST_SENTRY_ALWAYS_REPORT=1" >> $GITHUB_ENV
+ fi
+ fi
+
+ # Configure a different release version, otherwise it defaults to the
+ # commit sha which will conflict with our actual prod releases. This is a
+ # confusing experience because it looks like these are "empty" releases
+ # because no commits are attached and associates the release with our
+ # javascript + sentry projects.
+ echo "SENTRY_RELEASE=ci@$GITHUB_SHA" >> $GITHUB_ENV
+
+ # this handles pytest test sharding
+ if [ "$MATRIX_INSTANCE" ]; then
+ if ! [ "${MATRIX_INSTANCE_TOTAL:-}" ]; then
+ echo "MATRIX_INSTANCE_TOTAL is required."
+ exit 1
+ fi
+ echo "TEST_GROUP=$MATRIX_INSTANCE" >> $GITHUB_ENV
+ echo "TOTAL_TEST_GROUPS=$MATRIX_INSTANCE_TOTAL" >> $GITHUB_ENV
+ fi
+
+ - uses: getsentry/action-setup-venv@a133e6fd5fa6abd3f590a1c106abda344f5df69f # v2.1.0
+ with:
+ python-version: ${{ inputs.python-version }}
+ cache-dependency-path: ${{ inputs.workdir }}/requirements-dev-frozen.txt
+ install-cmd: cd ${{ inputs.workdir }} && python3 -m tools.hack_pip && pip install -r requirements-dev-frozen.txt
+
+ - name: Set up outputs
+ id: config
+ env:
+ MATRIX_INSTANCE: ${{ matrix.instance }}
+ shell: bash --noprofile --norc -eo pipefail -ux {0}
+ run: |
+ echo "yarn-cache-dir=$(yarn cache dir)" >> "$GITHUB_OUTPUT"
+ echo "matrix-instance-number=$(($MATRIX_INSTANCE+1))" >> "$GITHUB_OUTPUT"
+ echo "matrix-instance-total=$((${MATRIX_INSTANCE_TOTAL:-}))" >> "$GITHUB_OUTPUT"
+
+ - name: Install python dependencies
+ shell: bash --noprofile --norc -eo pipefail -ux {0}
+ env:
+ # This is necessary when other repositories (e.g. relay) want to take advantage of this workflow
+ # without needing to fork it. The path needed is the one where setup.py is located
+ WORKDIR: ${{ inputs.workdir }}
+ run: |
+ cd "$WORKDIR"
+ # We need to install editable otherwise things like check migration will fail.
+ python3 -m tools.fast_editable --path .
+
+ - name: Start devservices
+ shell: bash --noprofile --norc -eo pipefail -ux {0}
+ env:
+ WORKDIR: ${{ inputs.workdir }}
+ ENABLE_AUTORUN_MIGRATION_SEARCH_ISSUES: '1'
+ run: |
+ sentry init
+
+ # have tests listen on the docker gateway ip so loopback can occur
+ echo "DJANGO_LIVE_TEST_SERVER_ADDRESS=$(docker network inspect bridge --format='{{(index .IPAM.Config 0).Gateway}}')" >> "$GITHUB_ENV"
+
+ docker ps -a
+
+ # This is necessary when other repositories (e.g. relay) want to take advantage of this workflow
+ # without needing to fork it. The path needed is the one where tools are located
+ cd "$WORKDIR"
diff --git a/.github/workflows/test_docker_compose_acceptance.yml b/.github/workflows/test_docker_compose_acceptance.yml
new file mode 100644
index 0000000000000..df15c17b9e273
--- /dev/null
+++ b/.github/workflows/test_docker_compose_acceptance.yml
@@ -0,0 +1,144 @@
+# Also note that this name *MUST* match the filename because GHA
+# only provides the workflow name (https://docs.github.com/en/free-pro-team@latest/actions/reference/environment-variables#default-environment-variables)
+# and GH APIs only support querying by workflow *FILENAME* (https://developer.github.com/v3/actions/workflows/#get-a-workflow)
+name: test-docker-compose-acceptance
+on:
+ schedule:
+ - cron: '30,0 * * * *'
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
+# hack for https://github.com/actions/cache/issues/810#issuecomment-1222550359
+env:
+ SEGMENT_DOWNLOAD_TIMEOUT_MINS: 3
+ NODE_OPTIONS: '--max-old-space-size=4096'
+
+jobs:
+ docker-compose-acceptance:
+ name: docker-compose-acceptance
+ runs-on: ubuntu-22.04
+ timeout-minutes: 30
+ permissions:
+ contents: read
+ id-token: write
+ strategy:
+ # This helps not having to run multiple jobs because one fails, thus, reducing resource usage
+ # and reducing the risk that one of many runs would turn red again (read: intermittent tests)
+ fail-fast: false
+ matrix:
+ # XXX: When updating this, make sure you also update MATRIX_INSTANCE_TOTAL.
+ instance: [0, 1, 2, 3, 4]
+ pg-version: ['14']
+ env:
+ # XXX: MATRIX_INSTANCE_TOTAL must be hardcoded to the length of strategy.matrix.instance.
+ MATRIX_INSTANCE_TOTAL: 5
+ TEST_GROUP_STRATEGY: roundrobin
+
+ steps:
+ - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ name: Checkout sentry
+
+ - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
+ id: setup-node
+ with:
+ node-version-file: '.volta.json'
+
+ - name: Step configurations
+ id: config
+ run: |
+ echo "webpack-path=.webpack_cache" >> "$GITHUB_OUTPUT"
+ echo "WEBPACK_CACHE_PATH=.webpack_cache" >> "$GITHUB_ENV"
+
+ - name: webpack cache
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
+ with:
+ path: ${{ steps.config.outputs.webpack-path }}
+ key: ${{ runner.os }}-v2-webpack-cache-${{ hashFiles('webpack.config.ts') }}
+
+ - name: node_modules cache
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
+ id: nodemodulescache
+ with:
+ path: node_modules
+ key: ${{ runner.os }}-node-modules-${{ hashFiles('yarn.lock', 'api-docs/yarn.lock', '.volta.json') }}
+
+ - name: Install Javascript Dependencies
+ if: steps.nodemodulescache.outputs.cache-hit != 'true'
+ run: yarn install --frozen-lockfile
+
+ - name: webpack
+ env:
+ # this is fine to not have for forks, it shouldn't fail
+ SENTRY_WEBPACK_WEBHOOK_SECRET: ${{ secrets.SENTRY_WEBPACK_WEBHOOK_SECRET }}
+ CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
+ # should set value either as `true` or `false`
+ CODECOV_ENABLE_BA: true
+ GH_COMMIT_SHA: ${{ github.event.pull_request.head.sha }}
+ run: |
+ yarn build-acceptance
+
+ - name: Build chartcuterie configuration module
+ run: |
+ make build-chartcuterie-config
+
+ - name: Setup sentry env
+ uses: ./.github/actions/test-setup-sentry-devservices
+ id: setup
+
+ - name: copy chartcuterie config to devservices chartcuterie directory
+ run: |
+ ls config/chartcuterie
+ cp -r config/chartcuterie devservices
+
+ - name: Bring up devservices
+ run: |
+ docker network create sentry
+ docker compose -f devservices/docker-compose-testing.yml up -d redis postgres snuba clickhouse chartcuterie
+
+ - name: Run acceptance tests (#${{ steps.setup.outputs.matrix-instance-number }} of ${{ steps.setup.outputs.matrix-instance-total }})
+ run: make run-acceptance
+
+ - name: Collect test data
+ uses: ./.github/actions/collect-test-data
+ if: ${{ !cancelled() }}
+ with:
+ artifact_path: .artifacts/pytest.acceptance.json
+ gcs_bucket: ${{ secrets.COLLECT_TEST_DATA_GCS_BUCKET }}
+ gcp_project_id: ${{ secrets.COLLECT_TEST_DATA_GCP_PROJECT_ID }}
+ workload_identity_provider: ${{ secrets.SENTRY_GCP_DEV_WORKLOAD_IDENTITY_POOL }}
+ service_account_email: ${{ secrets.COLLECT_TEST_DATA_SERVICE_ACCOUNT_EMAIL }}
+ matrix_instance_number: ${{ steps.setup.outputs.matrix-instance-number }}
+
+ # This job runs when FE or BE changes happen, however, we only upload coverage data for
+ # BE changes since it conflicts with codecov's carry forward functionality
+ # Upload coverage data even if running the tests step fails since
+ # it reduces large coverage fluctuations
+ - name: Handle artifacts
+ uses: ./.github/actions/artifacts
+ if: ${{ always() }}
+ with:
+ token: ${{ secrets.CODECOV_TOKEN }}
+ commit_sha: ${{ github.event.pull_request.head.sha }}
+
+ - name: Inspect failure
+ if: failure()
+ run: |
+ docker compose -f devservices/docker-compose-testing.yml ps
+ docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+
+ docker-compose-acceptance-required-checks:
+ # this is a required check so we need this job to always run and report a status.
+ if: always()
+ name: Docker Compose Acceptance
+ needs: [docker-compose-acceptance]
+ runs-on: ubuntu-22.04
+ timeout-minutes: 3
+ steps:
+ - name: Check for failures
+ if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')
+ run: |
+ echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test_docker_compose_backend.yml b/.github/workflows/test_docker_compose_backend.yml
new file mode 100644
index 0000000000000..25491b3566ab7
--- /dev/null
+++ b/.github/workflows/test_docker_compose_backend.yml
@@ -0,0 +1,295 @@
+name: test-docker-compose-backend
+
+on:
+ schedule:
+ - cron: '30,0 * * * *'
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
+# hack for https://github.com/actions/cache/issues/810#issuecomment-1222550359
+env:
+ SEGMENT_DOWNLOAD_TIMEOUT_MINS: 3
+
+jobs:
+ docker-compose-api-docs:
+ name: api docs test
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+
+ - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
+ id: setup-node
+ with:
+ node-version-file: '.volta.json'
+
+ - name: Setup sentry python env
+ uses: ./.github/actions/test-setup-sentry-devservices
+ id: setup
+
+ - name: Bring up devservices
+ run: |
+ docker network create sentry
+ docker compose -f devservices/docker-compose-testing.yml up -d redis postgres snuba clickhouse
+
+ - name: Run API docs tests
+ # install ts-node for ts build scripts to execute properly without potentially installing
+ # conflicting deps when running scripts locally
+ # see: https://github.com/getsentry/sentry/pull/32328/files
+ run: |
+ yarn add ts-node && make test-api-docs
+
+ - name: Inspect failure
+ if: failure()
+ run: |
+ docker compose -f devservices/docker-compose-testing.yml ps
+ docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+
+ docker-compose-backend-test:
+ name: backend test
+ runs-on: ubuntu-22.04
+ timeout-minutes: 60
+ permissions:
+ contents: read
+ id-token: write
+ strategy:
+ # This helps not having to run multiple jobs because one fails, thus, reducing resource usage
+ # and reducing the risk that one of many runs would turn red again (read: intermittent tests)
+ fail-fast: false
+ matrix:
+ # XXX: When updating this, make sure you also update MATRIX_INSTANCE_TOTAL.
+ instance: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+ pg-version: ['14']
+
+ env:
+ # XXX: `MATRIX_INSTANCE_TOTAL` must be hardcoded to the length of `strategy.matrix.instance`.
+ # If this increases, make sure to also increase `flags.backend.after_n_builds` in `codecov.yml`.
+ MATRIX_INSTANCE_TOTAL: 11
+
+ steps:
+ - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+
+ - name: Setup sentry env
+ uses: ./.github/actions/test-setup-sentry-devservices
+
+ - name: Bring up devservices
+ run: |
+ docker network create sentry
+ echo "BIGTABLE_EMULATOR_HOST=127.0.0.1:8086" >> $GITHUB_ENV
+ docker compose -f devservices/docker-compose-testing.yml up -d
+
+ - name: Run backend test (${{ steps.setup.outputs.matrix-instance-number }} of ${{ steps.setup.outputs.matrix-instance-total }})
+ run: |
+ make test-python-ci
+
+ - name: Collect test data
+ uses: ./.github/actions/collect-test-data
+ if: ${{ !cancelled() }}
+ with:
+ artifact_path: .artifacts/pytest.json
+ gcs_bucket: ${{ secrets.COLLECT_TEST_DATA_GCS_BUCKET }}
+ gcp_project_id: ${{ secrets.COLLECT_TEST_DATA_GCP_PROJECT_ID }}
+ workload_identity_provider: ${{ secrets.SENTRY_GCP_DEV_WORKLOAD_IDENTITY_POOL }}
+ service_account_email: ${{ secrets.COLLECT_TEST_DATA_SERVICE_ACCOUNT_EMAIL }}
+ matrix_instance_number: ${{ steps.setup.outputs.matrix-instance-number }}
+
+ # Upload coverage data even if running the tests step fails since
+ # it reduces large coverage fluctuations
+ - name: Handle artifacts
+ if: ${{ always() }}
+ uses: ./.github/actions/artifacts
+ with:
+ token: ${{ secrets.CODECOV_TOKEN }}
+ commit_sha: ${{ github.event.pull_request.head.sha }}
+
+ - name: Inspect failure
+ if: failure()
+ run: |
+ docker compose -f devservices/docker-compose-testing.yml ps
+ docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+
+ docker-compose-backend-migration-tests:
+ name: backend migration tests
+ runs-on: ubuntu-22.04
+ timeout-minutes: 30
+ strategy:
+ matrix:
+ pg-version: ['14']
+
+ steps:
+ - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+
+ - name: Setup sentry env
+ uses: ./.github/actions/test-setup-sentry-devservices
+ id: setup
+
+ - name: Bring up devservices
+ run: |
+ docker network create sentry
+ docker compose -f devservices/docker-compose-testing.yml up -d redis postgres snuba clickhouse
+
+ - name: run tests
+ run: |
+ PYTEST_ADDOPTS="$PYTEST_ADDOPTS -m migrations --migrations --reruns 0" make test-python-ci
+
+ # Upload coverage data even if running the tests step fails since
+ # it reduces large coverage fluctuations
+ - name: Handle artifacts
+ if: ${{ always() }}
+ uses: ./.github/actions/artifacts
+ with:
+ token: ${{ secrets.CODECOV_TOKEN }}
+ commit_sha: ${{ github.event.pull_request.head.sha }}
+
+ - name: Inspect failure
+ if: failure()
+ run: |
+ docker compose -f devservices/docker-compose-testing.yml ps
+ docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+
+ docker-compose-cli:
+ name: cli test
+ runs-on: ubuntu-22.04
+ timeout-minutes: 10
+ strategy:
+ matrix:
+ pg-version: ['14']
+ steps:
+ - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+
+ - name: Setup sentry env
+ uses: ./.github/actions/test-setup-sentry-devservices
+ id: setup
+
+ - name: Bring up devservices
+ run: |
+ docker network create sentry
+ docker compose -f devservices/docker-compose-testing.yml up -d redis postgres
+
+ - name: Run test
+ run: |
+ make test-cli
+
+ # Upload coverage data even if running the tests step fails since
+ # it reduces large coverage fluctuations
+ - name: Handle artifacts
+ if: ${{ always() }}
+ uses: ./.github/actions/artifacts
+ with:
+ token: ${{ secrets.CODECOV_TOKEN }}
+ commit_sha: ${{ github.event.pull_request.head.sha }}
+
+ - name: Inspect failure
+ if: failure()
+ run: |
+ docker compose -f devservices/docker-compose-testing.yml ps
+ docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+
+ docker-compose-migration:
+ name: check migration
+ runs-on: ubuntu-22.04
+ strategy:
+ matrix:
+ pg-version: ['14']
+
+ steps:
+ - name: Checkout sentry
+ uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+
+ - name: Setup sentry env
+ uses: ./.github/actions/test-setup-sentry-devservices
+ id: setup
+
+ - name: Bring up devservices
+ run: |
+ docker network create sentry
+ docker compose -f devservices/docker-compose-testing.yml up -d redis postgres
+
+ - name: Migration & lockfile checks
+ env:
+ SENTRY_LOG_LEVEL: ERROR
+ PGPASSWORD: postgres
+ run: |
+ ./.github/workflows/scripts/migration-check.sh
+
+ - name: Inspect failure
+ if: failure()
+ run: |
+ docker compose -f devservices/docker-compose-testing.yml ps
+ docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+
+ docker-compose-monolith-dbs:
+ name: monolith-dbs test
+ runs-on: ubuntu-22.04
+ timeout-minutes: 20
+ permissions:
+ contents: read
+ id-token: write
+ steps:
+ - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+
+ - name: Setup sentry env
+ uses: ./.github/actions/test-setup-sentry-devservices
+ id: setup
+
+ - name: Bring up devservices
+ run: |
+ docker network create sentry
+ docker compose -f devservices/docker-compose-testing.yml up -d redis postgres
+
+ - name: Run test
+ run: |
+ make test-monolith-dbs
+
+ - name: Collect test data
+ uses: ./.github/actions/collect-test-data
+ if: ${{ !cancelled() }}
+ with:
+ artifact_path: .artifacts/pytest.monolith-dbs.json
+ gcs_bucket: ${{ secrets.COLLECT_TEST_DATA_GCS_BUCKET }}
+ gcp_project_id: ${{ secrets.COLLECT_TEST_DATA_GCP_PROJECT_ID }}
+ workload_identity_provider: ${{ secrets.SENTRY_GCP_DEV_WORKLOAD_IDENTITY_POOL }}
+ service_account_email: ${{ secrets.COLLECT_TEST_DATA_SERVICE_ACCOUNT_EMAIL }}
+
+ # Upload coverage data even if running the tests step fails since
+ # it reduces large coverage fluctuations
+ - name: Handle artifacts
+ if: ${{ always() }}
+ uses: ./.github/actions/artifacts
+ with:
+ token: ${{ secrets.CODECOV_TOKEN }}
+ commit_sha: ${{ github.event.pull_request.head.sha }}
+
+ - name: Inspect failure
+ if: failure()
+ run: |
+ docker compose -f devservices/docker-compose-testing.yml ps
+ docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+
+ # This check runs once all dependent jobs have passed
+ # It symbolizes that all required Backend checks have succesfully passed (Or skipped)
+ # This step is the only required backend check
+ docker-compose-backend-required-check:
+ needs:
+ [
+ docker-compose-api-docs,
+ docker-compose-backend-test,
+ docker-compose-backend-migration-tests,
+ docker-compose-cli,
+ docker-compose-migration,
+ docker-compose-monolith-dbs,
+ ]
+ name: Docker Compose Backend
+ # This is necessary since a failed/skipped dependent job would cause this job to be skipped
+ if: always()
+ runs-on: ubuntu-22.04
+ steps:
+ # If any jobs we depend on fail, we will fail since this is a required check
+ # NOTE: A timeout is considered a failure
+ - name: Check for failures
+ if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')
+ run: |
+ echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 6f3f7701ec3ae..6ab5dca06423e 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -65,7 +65,7 @@ repos:
additional_dependencies: [packaging==21.3]
- id: requirements-overrides
name: use pinned archives (see comment in file)
- stages: [commit]
+ stages: [pre-commit]
language: pygrep
entry: |
(?x)
@@ -157,14 +157,14 @@ repos:
- id: python-check-blanket-type-ignore
- repo: https://github.com/python-jsonschema/check-jsonschema
- rev: 0.24.1
+ rev: 0.29.3
hooks:
- id: check-github-actions
- id: check-github-workflows
args: [--verbose]
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.3.0
+ rev: v5.0.0
hooks:
- id: check-case-conflict
- id: check-executables-have-shebangs
@@ -182,7 +182,7 @@ repos:
args: [--pytest-test-first]
- repo: https://github.com/shellcheck-py/shellcheck-py
- rev: v0.9.0.6
+ rev: v0.10.0.1
hooks:
- id: shellcheck
types: [file]
diff --git a/CHANGES b/CHANGES
index 9dd28fdf199e2..1fae1885ccc85 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,3 +1,35 @@
+24.10.0
+-------
+
+### Various fixes & improvements
+
+- feat(releases): Split release commit from shared component (#78538) by @scttcper
+- chore(alerts): Add info log when sending subscription update data to Seer (#79049) by @ceorourke
+- feat(anomaly detection): add preview chart to new alert form (#78238) by @natemoo-re
+- feat(issue-stream): Use stacked primary secondary counts designs (#79070) by @MichaelSun48
+- feat(alerts): Add new feature flag to enable EAP alerts (#78985) by @edwardgou-sentry
+- feat(discover): Update EAP dataset and entity key for discover builders (#78967) by @edwardgou-sentry
+- fix: add info to post process TypeErrors for debugging (#79099) by @mjq
+- ref(ingest): annotate transaction consumer with spans (#79101) by @mjq
+- ref(rr6): Replace many useRouter's with useNavigate's (#78804) by @evanpurkhiser
+- chore(feedback): Analytics for error rendering feedback item (#78978) by @c298lee
+- feat(explore): Linking to spans in traceview from all tables (#78984) by @Abdkhan14
+- fix(eap): Count takes arg (#79066) by @Zylphrex
+- feat(quick-start): Add analytics code to the backend to track quick start completion (#79089) by @priscilawebdev
+- feat(quick-start): Add new feature flag for the new updates (#79094) by @priscilawebdev
+- fix(dashboard): OnDemand widget creation also includes transaction type (#79059) by @narsaynorath
+- fix(dashboard): OnDemand extraction for Transaction widgets (#79055) by @narsaynorath
+- feat(quick-start): Add analytics code to the frontend to track quick start completion (#79092) by @priscilawebdev
+- ref(quick-start): Update 'project to set up' logic to default to the first project (#78460) by @priscilawebdev
+- ref(onboarding): Add pnpm to express js install step (#79093) by @priscilawebdev
+- feat(dynamic-sampling): add feature flag (#79084) by @constantinius
+- fix(loader): Catch errors in `sentryOnLoad` separately (#78993) by @mydea
+- ref(feedback): 401 for unauth'd POSTs to projectUserReports (#79069) by @aliu39
+- fix(issue-stream): reduce font size of events and user counts (#79028) by @MichaelSun48
+- fix(issue-stream): Fix bug where replay divider was shown despite no replays (#79068) by @MichaelSun48
+
+_Plus 1020 more_
+
24.9.0
------
diff --git a/api-docs/openapi.json b/api-docs/openapi.json
index 0fe59a3fda185..da16c3a2bf31d 100644
--- a/api-docs/openapi.json
+++ b/api-docs/openapi.json
@@ -132,31 +132,19 @@
"/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/": {
"$ref": "paths/events/project-event-details.json"
},
- "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/": {
- "$ref": "paths/events/project-events.json"
- },
"/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/issues/": {
"$ref": "paths/events/project-issues.json"
},
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/tags/{key}/values/": {
+ "/api/0/issues/{issue_id}/tags/{key}/values/": {
"$ref": "paths/events/tag-values.json"
},
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/tags/{key}/": {
+ "/api/0/issues/{issue_id}/tags/{key}/": {
"$ref": "paths/events/tag-details.json"
},
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/hashes/": {
+ "/api/0/issues/{issue_id}/hashes/": {
"$ref": "paths/events/issue-hashes.json"
},
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/events/oldest/": {
- "$ref": "paths/events/oldest-event.json"
- },
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/events/latest/": {
- "$ref": "paths/events/latest-event.json"
- },
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/events/": {
- "$ref": "paths/events/issue-events.json"
- },
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/": {
+ "/api/0/issues/{issue_id}/": {
"$ref": "paths/events/issue-details.json"
},
"/api/0/organizations/{organization_id_or_slug}/releases/": {
diff --git a/api-docs/paths/events/issue-details.json b/api-docs/paths/events/issue-details.json
index 77d2be8803084..ac00d75f3c5fc 100644
--- a/api-docs/paths/events/issue-details.json
+++ b/api-docs/paths/events/issue-details.json
@@ -212,7 +212,7 @@
"properties": {
"status": {
"type": "string",
- "description": "The new status for the issues. Valid values are `\"resolved\"`, `\"reprocessing\"`, `\"unresolved\"`, and `\"ignored\"`."
+ "description": "The new status for the issues. Valid values are `\"resolved\"`, `\"resolvedInNextRelease\"`, `\"unresolved\"`, and `\"ignored\"`."
},
"statusDetails": {
"type": "object",
diff --git a/api-docs/paths/events/issue-events.json b/api-docs/paths/events/issue-events.json
deleted file mode 100644
index 3e498fe26c7e9..0000000000000
--- a/api-docs/paths/events/issue-events.json
+++ /dev/null
@@ -1,105 +0,0 @@
-{
- "get": {
- "tags": ["Events"],
- "description": "This endpoint lists an issue's events.",
- "operationId": "List an Issue's Events",
- "parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issues belongs to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
- {
- "name": "issue_id",
- "in": "path",
- "description": "The ID of the issue to retrieve.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
- {
- "name": "full",
- "in": "query",
- "description": "If this is set to true then the event payload will include the full event body, including the stacktrace. \nSet to true to enable.",
- "schema": {
- "type": "boolean"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "Success",
- "content": {
- "application/json": {
- "schema": {
- "type": "array",
- "items": {
- "$ref": "../../components/schemas/event.json#/Event"
- }
- },
- "example": [
- {
- "eventID": "9fac2ceed9344f2bbfdd1fdacb0ed9b1",
- "tags": [
- {
- "key": "browser",
- "value": "Chrome 60.0"
- },
- {
- "key": "device",
- "value": "Other"
- },
- {
- "key": "environment",
- "value": "production"
- },
- {
- "value": "fatal",
- "key": "level"
- },
- {
- "key": "os",
- "value": "Mac OS X 10.12.6"
- },
- {
- "value": "CPython 2.7.16",
- "key": "runtime"
- },
- {
- "key": "release",
- "value": "17642328ead24b51867165985996d04b29310337"
- },
- {
- "key": "server_name",
- "value": "web1.example.com"
- }
- ],
- "dateCreated": "2020-09-11T17:46:36Z",
- "user": null,
- "message": "",
- "title": "This is an example Python exception",
- "id": "dfb1a2d057194e76a4186cc8a5271553",
- "platform": "python",
- "event.type": "error",
- "groupID": "1889724436"
- }
- ]
- }
- }
- },
- "403": {
- "description": "Forbidden"
- }
- },
- "security": [
- {
- "auth_token": ["event:read"]
- }
- ]
- }
-}
diff --git a/api-docs/paths/events/issue-hashes.json b/api-docs/paths/events/issue-hashes.json
index 6b3ba88548b6d..77acd0241f8cb 100644
--- a/api-docs/paths/events/issue-hashes.json
+++ b/api-docs/paths/events/issue-hashes.json
@@ -4,15 +4,6 @@
"description": "This endpoint lists an issue's hashes, which are the generated checksums used to aggregate individual events.",
"operationId": "List an Issue's Hashes",
"parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issue belong to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
{
"name": "issue_id",
"in": "path",
diff --git a/api-docs/paths/events/latest-event.json b/api-docs/paths/events/latest-event.json
deleted file mode 100644
index 42f6adba6b139..0000000000000
--- a/api-docs/paths/events/latest-event.json
+++ /dev/null
@@ -1,547 +0,0 @@
-{
- "get": {
- "tags": ["Events"],
- "description": "Retrieves the details of the latest event for an issue.",
- "operationId": "Retrieve the Latest Event for an Issue",
- "parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issue belong to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
- {
- "name": "issue_id",
- "in": "path",
- "description": "The ID of the issue.",
- "required": true,
- "schema": {
- "type": "string"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "Success",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "../../components/schemas/event.json#/EventDetailed"
- },
- "example": {
- "eventID": "9999aaaaca8b46d797c23c6077c6ff01",
- "dist": null,
- "userReport": null,
- "previousEventID": null,
- "message": "",
- "title": "This is an example Python exception",
- "id": "9999aaafcc8b46d797c23c6077c6ff01",
- "size": 107762,
- "errors": [
- {
- "data": {
- "column": 8,
- "source": "https://s1.sentry-cdn.com/_static/bloopbloop/sentry/dist/app.js.map",
- "row": 15
- },
- "message": "Invalid location in sourcemap",
- "type": "js_invalid_sourcemap_location"
- }
- ],
- "platform": "javascript",
- "nextEventID": "99f9e199e9a74a14bfef6196ad741619",
- "type": "error",
- "metadata": {
- "type": "ForbiddenError",
- "value": "GET /organizations/hellboy-meowmeow/users/ 403"
- },
- "tags": [
- {
- "value": "Chrome 83.0.4103",
- "key": "browser",
- "_meta": null
- },
- {
- "value": "Chrome",
- "key": "browser.name",
- "_meta": null
- },
- {
- "value": "prod",
- "key": "environment",
- "_meta": null
- },
- {
- "value": "yes",
- "key": "handled",
- "_meta": null
- },
- {
- "value": "error",
- "key": "level",
- "_meta": null
- },
- {
- "value": "generic",
- "key": "mechanism",
- "_meta": null
- }
- ],
- "dateCreated": "2020-06-17T22:26:56.098086Z",
- "dateReceived": "2020-06-17T22:26:56.428721Z",
- "user": {
- "username": null,
- "name": "Hell Boy",
- "ip_address": "192.168.1.1",
- "email": "hell@boy.cat",
- "data": {
- "isStaff": false
- },
- "id": "550747"
- },
- "entries": [
- {
- "type": "exception",
- "data": {
- "values": [
- {
- "stacktrace": {
- "frames": [
- {
- "function": "ignoreOnError",
- "errors": null,
- "colNo": 23,
- "vars": null,
- "package": null,
- "absPath": "webpack:////usr/src/getsentry/src/sentry/node_modules/@sentry/browser/esm/helpers.js",
- "inApp": false,
- "lineNo": 71,
- "module": "usr/src/getsentry/src/sentry/node_modules/@sentry/browser/esm/helpers",
- "filename": "/usr/src/getsentry/src/sentry/node_modules/@sentry/browser/esm/helpers.js",
- "platform": null,
- "instructionAddr": null,
- "context": [
- [66, " }"],
- [
- 67,
- " // Attempt to invoke user-land function"
- ],
- [
- 68,
- " // NOTE: If you are a Sentry user, and you are seeing this stack frame, it"
- ],
- [
- 69,
- " // means the sentry.javascript SDK caught an error invoking your application code. This"
- ],
- [
- 70,
- " // is expected behavior and NOT indicative of a bug with sentry.javascript."
- ],
- [
- 71,
- " return fn.apply(this, wrappedArguments);"
- ],
- [
- 72,
- " // tslint:enable:no-unsafe-any"
- ],
- [73, " }"],
- [74, " catch (ex) {"],
- [75, " ignoreNextOnError();"],
- [76, " withScope(function (scope) {"]
- ],
- "symbolAddr": null,
- "trust": null,
- "symbol": null
- },
- {
- "function": "apply",
- "errors": null,
- "colNo": 24,
- "vars": null,
- "package": null,
- "absPath": "webpack:////usr/src/getsentry/src/sentry/node_modules/reflux-core/lib/PublisherMethods.js",
- "inApp": false,
- "lineNo": 74,
- "module": "usr/src/getsentry/src/sentry/node_modules/reflux-core/lib/PublisherMethods",
- "filename": "/usr/src/getsentry/src/sentry/node_modules/reflux-core/lib/PublisherMethods.js",
- "platform": null,
- "instructionAddr": null,
- "context": [
- [69, " */"],
- [
- 70,
- " triggerAsync: function triggerAsync() {"
- ],
- [71, " var args = arguments,"],
- [72, " me = this;"],
- [73, " _.nextTick(function () {"],
- [74, " me.trigger.apply(me, args);"],
- [75, " });"],
- [76, " },"],
- [77, ""],
- [78, " /**"],
- [
- 79,
- " * Wraps the trigger mechanism with a deferral function."
- ]
- ],
- "symbolAddr": null,
- "trust": null,
- "symbol": null
- }
- ],
- "framesOmitted": null,
- "registers": null,
- "hasSystemFrames": true
- },
- "module": null,
- "rawStacktrace": {
- "frames": [
- {
- "function": "a",
- "errors": null,
- "colNo": 88800,
- "vars": null,
- "package": null,
- "absPath": "https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
- "inApp": false,
- "lineNo": 81,
- "module": null,
- "filename": "/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
- "platform": null,
- "instructionAddr": null,
- "context": [
- [76, "/*!"],
- [77, " Copyright (c) 2018 Jed Watson."],
- [
- 78,
- " Licensed under the MIT License (MIT), see"
- ],
- [
- 79,
- " http://jedwatson.github.io/react-select"
- ],
- [80, "*/"],
- [
- 81,
- "{snip} e,t)}));return e.handleEvent?e.handleEvent.apply(this,s):e.apply(this,s)}catch(e){throw c(),Object(o.m)((function(n){n.addEventProcessor((fu {snip}"
- ],
- [82, "/*!"],
- [83, " * JavaScript Cookie v2.2.1"],
- [
- 84,
- " * https://github.com/js-cookie/js-cookie"
- ],
- [85, " *"],
- [
- 86,
- " * Copyright 2006, 2015 Klaus Hartl & Fagner Brack"
- ]
- ],
- "symbolAddr": null,
- "trust": null,
- "symbol": null
- },
- {
- "function": null,
- "errors": null,
- "colNo": 149484,
- "vars": null,
- "package": null,
- "absPath": "https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
- "inApp": false,
- "lineNo": 119,
- "module": null,
- "filename": "/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
- "platform": null,
- "instructionAddr": null,
- "context": [
- [114, "/* @license"],
- [115, "Papa Parse"],
- [116, "v5.2.0"],
- [117, "https://github.com/mholt/PapaParse"],
- [118, "License: MIT"],
- [
- 119,
- "{snip} (){var e=arguments,t=this;r.nextTick((function(){t.trigger.apply(t,e)}))},deferWith:function(e){var t=this.trigger,n=this,r=function(){t.app {snip}"
- ],
- [120, "/**!"],
- [
- 121,
- " * @fileOverview Kickass library to create and place poppers near their reference elements."
- ],
- [122, " * @version 1.16.1"],
- [123, " * @license"],
- [
- 124,
- " * Copyright (c) 2016 Federico Zivolo and contributors"
- ]
- ],
- "symbolAddr": null,
- "trust": null,
- "symbol": null
- }
- ],
- "framesOmitted": null,
- "registers": null,
- "hasSystemFrames": true
- },
- "mechanism": {
- "type": "generic",
- "handled": true
- },
- "threadId": null,
- "value": "GET /organizations/hellboy-meowmeow/users/ 403",
- "type": "ForbiddenError"
- }
- ],
- "excOmitted": null,
- "hasSystemFrames": true
- }
- },
- {
- "type": "breadcrumbs",
- "data": {
- "values": [
- {
- "category": "tracing",
- "level": "debug",
- "event_id": null,
- "timestamp": "2020-06-17T22:26:55.266586Z",
- "data": null,
- "message": "[Tracing] pushActivity: idleTransactionStarted#1",
- "type": "debug"
- },
- {
- "category": "xhr",
- "level": "info",
- "event_id": null,
- "timestamp": "2020-06-17T22:26:55.619446Z",
- "data": {
- "url": "/api/0/internal/health/",
- "status_code": 200,
- "method": "GET"
- },
- "message": null,
- "type": "http"
- },
- {
- "category": "sentry.transaction",
- "level": "info",
- "event_id": null,
- "timestamp": "2020-06-17T22:26:55.945016Z",
- "data": null,
- "message": "7787a027f3fb46c985aaa2287b3f4d09",
- "type": "default"
- }
- ]
- }
- },
- {
- "type": "request",
- "data": {
- "fragment": null,
- "cookies": [],
- "inferredContentType": null,
- "env": null,
- "headers": [
- [
- "User-Agent",
- "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36"
- ]
- ],
- "url": "https://sentry.io/organizations/hellboy-meowmeow/issues/",
- "query": [["project", "5236886"]],
- "data": null,
- "method": null
- }
- }
- ],
- "packages": {},
- "sdk": {
- "version": "5.17.0",
- "name": "sentry.javascript.browser"
- },
- "_meta": {
- "user": null,
- "context": null,
- "entries": {},
- "contexts": null,
- "message": null,
- "packages": null,
- "tags": {},
- "sdk": null
- },
- "contexts": {
- "ForbiddenError": {
- "status": 403,
- "statusText": "Forbidden",
- "responseJSON": {
- "detail": "You do not have permission to perform this action."
- },
- "type": "default"
- },
- "browser": {
- "version": "83.0.4103",
- "type": "browser",
- "name": "Chrome"
- },
- "os": {
- "version": "10",
- "type": "os",
- "name": "Windows"
- },
- "trace": {
- "span_id": "83db1ad17e67dfe7",
- "type": "trace",
- "trace_id": "da6caabcd90e45fdb81f6655824a5f88",
- "op": "navigation"
- },
- "organization": {
- "type": "default",
- "id": "323938",
- "slug": "hellboy-meowmeow"
- }
- },
- "fingerprints": ["fbe908cc63d63ea9763fd84cb6bad177"],
- "context": {
- "resp": {
- "status": 403,
- "responseJSON": {
- "detail": "You do not have permission to perform this action."
- },
- "name": "ForbiddenError",
- "statusText": "Forbidden",
- "message": "GET /organizations/hellboy-meowmeow/users/ 403",
- "stack": "Error\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:480441\n at u (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:165:51006)\n at Generator._invoke (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:165:50794)\n at Generator.A.forEach.e. [as next] (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:165:51429)\n at n (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68684)\n at s (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68895)\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68954\n at new Promise ()\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68835\n at v (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:480924)\n at m (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:480152)\n at t.fetchMemberList (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:902983)\n at t.componentDidMount (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:900527)\n at t.componentDidMount (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:15597)\n at Pc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:101023)\n at t.unstable_runWithPriority (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:3462)\n at Ko (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45529)\n at Rc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:97371)\n at Oc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:87690)\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45820\n at t.unstable_runWithPriority (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:3462)\n at Ko (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45529)\n at Zo (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45765)\n at Jo (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45700)\n at gc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:84256)\n at Object.enqueueSetState (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:50481)\n at t.M.setState (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:173:1439)\n at t.onUpdate (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:543076)\n at a.n (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149090)\n at a.emit (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:6550)\n at p.trigger (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149379)\n at p.onInitializeUrlState (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:541711)\n at a.n (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149090)\n at a.emit (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:6550)\n at Function.trigger (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149379)\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149484\n at a (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:81:88800)"
- }
- },
- "release": {
- "dateReleased": "2020-06-17T19:21:02.186004Z",
- "newGroups": 4,
- "commitCount": 11,
- "url": "https://freight.getsentry.net/deploys/getsentry/production/8868/",
- "data": {},
- "lastDeploy": {
- "name": "b65bc521378269d3eaefdc964f8ef56621414943 to prod",
- "url": null,
- "environment": "prod",
- "dateStarted": null,
- "dateFinished": "2020-06-17T19:20:55.641748Z",
- "id": "6883490"
- },
- "deployCount": 1,
- "dateCreated": "2020-06-17T18:45:31.042157Z",
- "lastEvent": "2020-07-08T21:21:21Z",
- "version": "b65bc521378269d3eaefdc964f8ef56621414943",
- "firstEvent": "2020-06-17T22:25:14Z",
- "lastCommit": {
- "repository": {
- "status": "active",
- "integrationId": "2933",
- "externalSlug": "getsentry/getsentry",
- "name": "getsentry/getsentry",
- "provider": {
- "id": "integrations:github",
- "name": "GitHub"
- },
- "url": "https://github.com/getsentry/getsentry",
- "id": "2",
- "dateCreated": "2016-10-10T21:36:45.373994Z"
- },
- "releases": [
- {
- "dateReleased": "2020-06-23T13:26:18.427090Z",
- "url": "https://freight.getsentry.net/deploys/getsentry/staging/2077/",
- "dateCreated": "2020-06-23T13:22:50.420265Z",
- "version": "f3783e5fe710758724f14267439fd46cc2bf5918",
- "shortVersion": "f3783e5fe710758724f14267439fd46cc2bf5918",
- "ref": "perf/source-maps-test"
- },
- {
- "dateReleased": "2020-06-17T19:21:02.186004Z",
- "url": "https://freight.getsentry.net/deploys/getsentry/production/8868/",
- "dateCreated": "2020-06-17T18:45:31.042157Z",
- "version": "b65bc521378269d3eaefdc964f8ef56621414943",
- "shortVersion": "b65bc521378269d3eaefdc964f8ef56621414943",
- "ref": "master"
- }
- ],
- "dateCreated": "2020-06-17T18:43:37Z",
- "message": "feat(billing): Get a lot of money",
- "id": "b65bc521378269d3eaefdc964f8ef56621414943"
- },
- "shortVersion": "b65bc521378269d3eaefdc964f8ef56621414943",
- "authors": [
- {
- "username": "a37a1b4520ce46cea147ae2885a4e7e7",
- "lastLogin": "2020-09-14T22:34:55.550640Z",
- "isSuperuser": false,
- "isManaged": false,
- "experiments": {},
- "lastActive": "2020-09-15T22:13:20.503880Z",
- "isStaff": false,
- "id": "655784",
- "isActive": true,
- "has2fa": false,
- "name": "hell.boy@sentry.io",
- "avatarUrl": "https://secure.gravatar.com/avatar/eaa22e25b3a984659420831a77e4874e?s=32&d=mm",
- "dateJoined": "2020-04-20T16:21:25.365772Z",
- "emails": [
- {
- "is_verified": false,
- "id": "784574",
- "email": "hellboy@gmail.com"
- },
- {
- "is_verified": true,
- "id": "749185",
- "email": "hell.boy@sentry.io"
- }
- ],
- "avatar": {
- "avatarUuid": null,
- "avatarType": "letter_avatar"
- },
- "hasPasswordAuth": false,
- "email": "hell.boy@sentry.io"
- }
- ],
- "owner": null,
- "ref": "master",
- "projects": [
- {
- "name": "Sentry CSP",
- "slug": "sentry-csp"
- },
- {
- "name": "Backend",
- "slug": "sentry"
- },
- {
- "name": "Frontend",
- "slug": "javascript"
- }
- ]
- },
- "groupID": "1341191803"
- }
- }
- }
- },
- "403": {
- "description": "Forbidden"
- }
- },
- "security": [
- {
- "auth_token": ["event:read"]
- }
- ]
- }
-}
diff --git a/api-docs/paths/events/oldest-event.json b/api-docs/paths/events/oldest-event.json
deleted file mode 100644
index bcc625688907e..0000000000000
--- a/api-docs/paths/events/oldest-event.json
+++ /dev/null
@@ -1,547 +0,0 @@
-{
- "get": {
- "tags": ["Events"],
- "description": "Retrieves the details of the oldest event for an issue.",
- "operationId": "Retrieve the Oldest Event for an Issue",
- "parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issue belong to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
- {
- "name": "issue_id",
- "in": "path",
- "description": "The ID of the issue.",
- "required": true,
- "schema": {
- "type": "string"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "Success",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "../../components/schemas/event.json#/EventDetailed"
- },
- "example": {
- "eventID": "9999aaaaca8b46d797c23c6077c6ff01",
- "dist": null,
- "userReport": null,
- "previousEventID": null,
- "message": "",
- "title": "This is an example Python exception",
- "id": "9999aaafcc8b46d797c23c6077c6ff01",
- "size": 107762,
- "errors": [
- {
- "data": {
- "column": 8,
- "source": "https://s1.sentry-cdn.com/_static/bloopbloop/sentry/dist/app.js.map",
- "row": 15
- },
- "message": "Invalid location in sourcemap",
- "type": "js_invalid_sourcemap_location"
- }
- ],
- "platform": "javascript",
- "nextEventID": "99f9e199e9a74a14bfef6196ad741619",
- "type": "error",
- "metadata": {
- "type": "ForbiddenError",
- "value": "GET /organizations/hellboy-meowmeow/users/ 403"
- },
- "tags": [
- {
- "value": "Chrome 83.0.4103",
- "key": "browser",
- "_meta": null
- },
- {
- "value": "Chrome",
- "key": "browser.name",
- "_meta": null
- },
- {
- "value": "prod",
- "key": "environment",
- "_meta": null
- },
- {
- "value": "yes",
- "key": "handled",
- "_meta": null
- },
- {
- "value": "error",
- "key": "level",
- "_meta": null
- },
- {
- "value": "generic",
- "key": "mechanism",
- "_meta": null
- }
- ],
- "dateCreated": "2020-06-17T22:26:56.098086Z",
- "dateReceived": "2020-06-17T22:26:56.428721Z",
- "user": {
- "username": null,
- "name": "Hell Boy",
- "ip_address": "192.168.1.1",
- "email": "hell@boy.cat",
- "data": {
- "isStaff": false
- },
- "id": "550747"
- },
- "entries": [
- {
- "type": "exception",
- "data": {
- "values": [
- {
- "stacktrace": {
- "frames": [
- {
- "function": "ignoreOnError",
- "errors": null,
- "colNo": 23,
- "vars": null,
- "package": null,
- "absPath": "webpack:////usr/src/getsentry/src/sentry/node_modules/@sentry/browser/esm/helpers.js",
- "inApp": false,
- "lineNo": 71,
- "module": "usr/src/getsentry/src/sentry/node_modules/@sentry/browser/esm/helpers",
- "filename": "/usr/src/getsentry/src/sentry/node_modules/@sentry/browser/esm/helpers.js",
- "platform": null,
- "instructionAddr": null,
- "context": [
- [66, " }"],
- [
- 67,
- " // Attempt to invoke user-land function"
- ],
- [
- 68,
- " // NOTE: If you are a Sentry user, and you are seeing this stack frame, it"
- ],
- [
- 69,
- " // means the sentry.javascript SDK caught an error invoking your application code. This"
- ],
- [
- 70,
- " // is expected behavior and NOT indicative of a bug with sentry.javascript."
- ],
- [
- 71,
- " return fn.apply(this, wrappedArguments);"
- ],
- [
- 72,
- " // tslint:enable:no-unsafe-any"
- ],
- [73, " }"],
- [74, " catch (ex) {"],
- [75, " ignoreNextOnError();"],
- [76, " withScope(function (scope) {"]
- ],
- "symbolAddr": null,
- "trust": null,
- "symbol": null
- },
- {
- "function": "apply",
- "errors": null,
- "colNo": 24,
- "vars": null,
- "package": null,
- "absPath": "webpack:////usr/src/getsentry/src/sentry/node_modules/reflux-core/lib/PublisherMethods.js",
- "inApp": false,
- "lineNo": 74,
- "module": "usr/src/getsentry/src/sentry/node_modules/reflux-core/lib/PublisherMethods",
- "filename": "/usr/src/getsentry/src/sentry/node_modules/reflux-core/lib/PublisherMethods.js",
- "platform": null,
- "instructionAddr": null,
- "context": [
- [69, " */"],
- [
- 70,
- " triggerAsync: function triggerAsync() {"
- ],
- [71, " var args = arguments,"],
- [72, " me = this;"],
- [73, " _.nextTick(function () {"],
- [74, " me.trigger.apply(me, args);"],
- [75, " });"],
- [76, " },"],
- [77, ""],
- [78, " /**"],
- [
- 79,
- " * Wraps the trigger mechanism with a deferral function."
- ]
- ],
- "symbolAddr": null,
- "trust": null,
- "symbol": null
- }
- ],
- "framesOmitted": null,
- "registers": null,
- "hasSystemFrames": true
- },
- "module": null,
- "rawStacktrace": {
- "frames": [
- {
- "function": "a",
- "errors": null,
- "colNo": 88800,
- "vars": null,
- "package": null,
- "absPath": "https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
- "inApp": false,
- "lineNo": 81,
- "module": null,
- "filename": "/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
- "platform": null,
- "instructionAddr": null,
- "context": [
- [76, "/*!"],
- [77, " Copyright (c) 2018 Jed Watson."],
- [
- 78,
- " Licensed under the MIT License (MIT), see"
- ],
- [
- 79,
- " http://jedwatson.github.io/react-select"
- ],
- [80, "*/"],
- [
- 81,
- "{snip} e,t)}));return e.handleEvent?e.handleEvent.apply(this,s):e.apply(this,s)}catch(e){throw c(),Object(o.m)((function(n){n.addEventProcessor((fu {snip}"
- ],
- [82, "/*!"],
- [83, " * JavaScript Cookie v2.2.1"],
- [
- 84,
- " * https://github.com/js-cookie/js-cookie"
- ],
- [85, " *"],
- [
- 86,
- " * Copyright 2006, 2015 Klaus Hartl & Fagner Brack"
- ]
- ],
- "symbolAddr": null,
- "trust": null,
- "symbol": null
- },
- {
- "function": null,
- "errors": null,
- "colNo": 149484,
- "vars": null,
- "package": null,
- "absPath": "https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
- "inApp": false,
- "lineNo": 119,
- "module": null,
- "filename": "/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
- "platform": null,
- "instructionAddr": null,
- "context": [
- [114, "/* @license"],
- [115, "Papa Parse"],
- [116, "v5.2.0"],
- [117, "https://github.com/mholt/PapaParse"],
- [118, "License: MIT"],
- [
- 119,
- "{snip} (){var e=arguments,t=this;r.nextTick((function(){t.trigger.apply(t,e)}))},deferWith:function(e){var t=this.trigger,n=this,r=function(){t.app {snip}"
- ],
- [120, "/**!"],
- [
- 121,
- " * @fileOverview Kickass library to create and place poppers near their reference elements."
- ],
- [122, " * @version 1.16.1"],
- [123, " * @license"],
- [
- 124,
- " * Copyright (c) 2016 Federico Zivolo and contributors"
- ]
- ],
- "symbolAddr": null,
- "trust": null,
- "symbol": null
- }
- ],
- "framesOmitted": null,
- "registers": null,
- "hasSystemFrames": true
- },
- "mechanism": {
- "type": "generic",
- "handled": true
- },
- "threadId": null,
- "value": "GET /organizations/hellboy-meowmeow/users/ 403",
- "type": "ForbiddenError"
- }
- ],
- "excOmitted": null,
- "hasSystemFrames": true
- }
- },
- {
- "type": "breadcrumbs",
- "data": {
- "values": [
- {
- "category": "tracing",
- "level": "debug",
- "event_id": null,
- "timestamp": "2020-06-17T22:26:55.266586Z",
- "data": null,
- "message": "[Tracing] pushActivity: idleTransactionStarted#1",
- "type": "debug"
- },
- {
- "category": "xhr",
- "level": "info",
- "event_id": null,
- "timestamp": "2020-06-17T22:26:55.619446Z",
- "data": {
- "url": "/api/0/internal/health/",
- "status_code": 200,
- "method": "GET"
- },
- "message": null,
- "type": "http"
- },
- {
- "category": "sentry.transaction",
- "level": "info",
- "event_id": null,
- "timestamp": "2020-06-17T22:26:55.945016Z",
- "data": null,
- "message": "7787a027f3fb46c985aaa2287b3f4d09",
- "type": "default"
- }
- ]
- }
- },
- {
- "type": "request",
- "data": {
- "fragment": null,
- "cookies": [],
- "inferredContentType": null,
- "env": null,
- "headers": [
- [
- "User-Agent",
- "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36"
- ]
- ],
- "url": "https://sentry.io/organizations/hellboy-meowmeow/issues/",
- "query": [["project", "5236886"]],
- "data": null,
- "method": null
- }
- }
- ],
- "packages": {},
- "sdk": {
- "version": "5.17.0",
- "name": "sentry.javascript.browser"
- },
- "_meta": {
- "user": null,
- "context": null,
- "entries": {},
- "contexts": null,
- "message": null,
- "packages": null,
- "tags": {},
- "sdk": null
- },
- "contexts": {
- "ForbiddenError": {
- "status": 403,
- "statusText": "Forbidden",
- "responseJSON": {
- "detail": "You do not have permission to perform this action."
- },
- "type": "default"
- },
- "browser": {
- "version": "83.0.4103",
- "type": "browser",
- "name": "Chrome"
- },
- "os": {
- "version": "10",
- "type": "os",
- "name": "Windows"
- },
- "trace": {
- "span_id": "83db1ad17e67dfe7",
- "type": "trace",
- "trace_id": "da6caabcd90e45fdb81f6655824a5f88",
- "op": "navigation"
- },
- "organization": {
- "type": "default",
- "id": "323938",
- "slug": "hellboy-meowmeow"
- }
- },
- "fingerprints": ["fbe908cc63d63ea9763fd84cb6bad177"],
- "context": {
- "resp": {
- "status": 403,
- "responseJSON": {
- "detail": "You do not have permission to perform this action."
- },
- "name": "ForbiddenError",
- "statusText": "Forbidden",
- "message": "GET /organizations/hellboy-meowmeow/users/ 403",
- "stack": "Error\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:480441\n at u (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:165:51006)\n at Generator._invoke (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:165:50794)\n at Generator.A.forEach.e. [as next] (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:165:51429)\n at n (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68684)\n at s (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68895)\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68954\n at new Promise ()\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68835\n at v (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:480924)\n at m (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:480152)\n at t.fetchMemberList (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:902983)\n at t.componentDidMount (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:900527)\n at t.componentDidMount (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:15597)\n at Pc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:101023)\n at t.unstable_runWithPriority (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:3462)\n at Ko (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45529)\n at Rc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:97371)\n at Oc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:87690)\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45820\n at t.unstable_runWithPriority (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:3462)\n at Ko (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45529)\n at Zo (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45765)\n at Jo (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45700)\n at gc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:84256)\n at Object.enqueueSetState (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:50481)\n at t.M.setState (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:173:1439)\n at t.onUpdate (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:543076)\n at a.n (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149090)\n at a.emit (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:6550)\n at p.trigger (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149379)\n at p.onInitializeUrlState (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:541711)\n at a.n (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149090)\n at a.emit (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:6550)\n at Function.trigger (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149379)\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149484\n at a (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:81:88800)"
- }
- },
- "release": {
- "dateReleased": "2020-06-17T19:21:02.186004Z",
- "newGroups": 4,
- "commitCount": 11,
- "url": "https://freight.getsentry.net/deploys/getsentry/production/8868/",
- "data": {},
- "lastDeploy": {
- "name": "b65bc521378269d3eaefdc964f8ef56621414943 to prod",
- "url": null,
- "environment": "prod",
- "dateStarted": null,
- "dateFinished": "2020-06-17T19:20:55.641748Z",
- "id": "6883490"
- },
- "deployCount": 1,
- "dateCreated": "2020-06-17T18:45:31.042157Z",
- "lastEvent": "2020-07-08T21:21:21Z",
- "version": "b65bc521378269d3eaefdc964f8ef56621414943",
- "firstEvent": "2020-06-17T22:25:14Z",
- "lastCommit": {
- "repository": {
- "status": "active",
- "integrationId": "2933",
- "externalSlug": "getsentry/getsentry",
- "name": "getsentry/getsentry",
- "provider": {
- "id": "integrations:github",
- "name": "GitHub"
- },
- "url": "https://github.com/getsentry/getsentry",
- "id": "2",
- "dateCreated": "2016-10-10T21:36:45.373994Z"
- },
- "releases": [
- {
- "dateReleased": "2020-06-23T13:26:18.427090Z",
- "url": "https://freight.getsentry.net/deploys/getsentry/staging/2077/",
- "dateCreated": "2020-06-23T13:22:50.420265Z",
- "version": "f3783e5fe710758724f14267439fd46cc2bf5918",
- "shortVersion": "f3783e5fe710758724f14267439fd46cc2bf5918",
- "ref": "perf/source-maps-test"
- },
- {
- "dateReleased": "2020-06-17T19:21:02.186004Z",
- "url": "https://freight.getsentry.net/deploys/getsentry/production/8868/",
- "dateCreated": "2020-06-17T18:45:31.042157Z",
- "version": "b65bc521378269d3eaefdc964f8ef56621414943",
- "shortVersion": "b65bc521378269d3eaefdc964f8ef56621414943",
- "ref": "master"
- }
- ],
- "dateCreated": "2020-06-17T18:43:37Z",
- "message": "feat(billing): Get a lot of money",
- "id": "b65bc521378269d3eaefdc964f8ef56621414943"
- },
- "shortVersion": "b65bc521378269d3eaefdc964f8ef56621414943",
- "authors": [
- {
- "username": "a37a1b4520ce46cea147ae2885a4e7e7",
- "lastLogin": "2020-09-14T22:34:55.550640Z",
- "isSuperuser": false,
- "isManaged": false,
- "experiments": {},
- "lastActive": "2020-09-15T22:13:20.503880Z",
- "isStaff": false,
- "id": "655784",
- "isActive": true,
- "has2fa": false,
- "name": "hell.boy@sentry.io",
- "avatarUrl": "https://secure.gravatar.com/avatar/eaa22e25b3a984659420831a77e4874e?s=32&d=mm",
- "dateJoined": "2020-04-20T16:21:25.365772Z",
- "emails": [
- {
- "is_verified": false,
- "id": "784574",
- "email": "hellboy@gmail.com"
- },
- {
- "is_verified": true,
- "id": "749185",
- "email": "hell.boy@sentry.io"
- }
- ],
- "avatar": {
- "avatarUuid": null,
- "avatarType": "letter_avatar"
- },
- "hasPasswordAuth": false,
- "email": "hell.boy@sentry.io"
- }
- ],
- "owner": null,
- "ref": "master",
- "projects": [
- {
- "name": "Sentry CSP",
- "slug": "sentry-csp"
- },
- {
- "name": "Backend",
- "slug": "sentry"
- },
- {
- "name": "Frontend",
- "slug": "javascript"
- }
- ]
- },
- "groupID": "1341191803"
- }
- }
- }
- },
- "403": {
- "description": "Forbidden"
- }
- },
- "security": [
- {
- "auth_token": ["event:read"]
- }
- ]
- }
-}
diff --git a/api-docs/paths/events/project-events.json b/api-docs/paths/events/project-events.json
deleted file mode 100644
index af3376c16af70..0000000000000
--- a/api-docs/paths/events/project-events.json
+++ /dev/null
@@ -1,108 +0,0 @@
-{
- "get": {
- "tags": ["Events"],
- "description": "Return a list of error events bound to a project.",
- "operationId": "List a Project's Error Events",
- "parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the events belong to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
- {
- "name": "project_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the project the events belong to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
- {
- "name": "full",
- "in": "query",
- "description": "If this is set to true then the event payload will include the full event body, including the stacktrace. \nSet to true to enable.",
- "schema": {
- "type": "boolean"
- }
- },
- {
- "$ref": "../../components/parameters/pagination-cursor.json#/PaginationCursor"
- }
- ],
- "responses": {
- "200": {
- "description": "Success",
- "content": {
- "application/json": {
- "schema": {
- "type": "array",
- "items": {
- "$ref": "../../components/schemas/event.json#/Event"
- }
- },
- "example": [
- {
- "eventID": "9fac2ceed9344f2bbfdd1fdacb0ed9b1",
- "tags": [
- {
- "key": "browser",
- "value": "Chrome 60.0"
- },
- {
- "key": "device",
- "value": "Other"
- },
- {
- "key": "environment",
- "value": "production"
- },
- {
- "value": "fatal",
- "key": "level"
- },
- {
- "key": "os",
- "value": "Mac OS X 10.12.6"
- },
- {
- "value": "CPython 2.7.16",
- "key": "runtime"
- },
- {
- "key": "release",
- "value": "17642328ead24b51867165985996d04b29310337"
- },
- {
- "key": "server_name",
- "value": "web1.example.com"
- }
- ],
- "dateCreated": "2020-09-11T17:46:36Z",
- "user": null,
- "message": "",
- "title": "This is an example Python exception",
- "id": "dfb1a2d057194e76a4186cc8a5271553",
- "platform": "python",
- "event.type": "error",
- "groupID": "1889724436"
- }
- ]
- }
- }
- },
- "403": {
- "description": "Forbidden"
- }
- },
- "security": [
- {
- "auth_token": ["project:read"]
- }
- ]
- }
-}
diff --git a/api-docs/paths/events/project-issues.json b/api-docs/paths/events/project-issues.json
index 25baa3d6a81fc..1fab060ca1d6e 100644
--- a/api-docs/paths/events/project-issues.json
+++ b/api-docs/paths/events/project-issues.json
@@ -1,7 +1,7 @@
{
"get": {
"tags": ["Events"],
- "description": "Return a list of issues (groups) bound to a project. All parameters are supplied as query string parameters. \n\n A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results).\n\nThe ``statsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: ``\"\"`` (disable),``\"24h\"`` (default), ``\"14d\"``",
+ "description": "Return a list of issues (groups) bound to a project. All parameters are supplied as query string parameters. \n\n A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results).\n\nThe ``statsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: ``\"\"`` (disable),``\"24h\"`` (default), ``\"14d\"``\n\nUser feedback items from the [User Feedback Widget](https://docs.sentry.io/product/user-feedback/#user-feedback-widget) are built off the issue platform, so to return a list of user feedback items for a specific project, filter for `issue.category:feedback`.",
"operationId": "List a Project's Issues",
"parameters": [
{
diff --git a/api-docs/paths/events/tag-details.json b/api-docs/paths/events/tag-details.json
index bd286ac3fc57c..d2b1cc93ba44e 100644
--- a/api-docs/paths/events/tag-details.json
+++ b/api-docs/paths/events/tag-details.json
@@ -4,15 +4,6 @@
"description": "Returns details for given tag key related to an issue.",
"operationId": "Retrieve Tag Details",
"parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issue belongs to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
{
"name": "issue_id",
"in": "path",
diff --git a/api-docs/paths/events/tag-values.json b/api-docs/paths/events/tag-values.json
index f25210f905342..323b3d33bc8f8 100644
--- a/api-docs/paths/events/tag-values.json
+++ b/api-docs/paths/events/tag-values.json
@@ -4,15 +4,6 @@
"description": "Returns details for given tag key related to an issue. \n\nWhen [paginated](/api/pagination) can return at most 1000 values.",
"operationId": "List a Tag's Values Related to an Issue",
"parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issue belongs to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
{
"name": "issue_id",
"in": "path",
diff --git a/api-docs/paths/projects/user-feedback.json b/api-docs/paths/projects/user-feedback.json
index 4f52258237a19..deeaa6bf8a23c 100644
--- a/api-docs/paths/projects/user-feedback.json
+++ b/api-docs/paths/projects/user-feedback.json
@@ -1,7 +1,7 @@
{
"get": {
"tags": ["Projects"],
- "description": "Return a list of user feedback items within this project.\n\n*This list does not include submissions from the [User Feedback Widget](https://docs.sentry.io/product/user-feedback/#user-feedback-widget). This is because it is based on an older format called User Reports - read more [here](https://develop.sentry.dev/application/feedback-architecture/#user-reports).*",
+ "description": "Return a list of user feedback items within this project.\n\n*This list does not include submissions from the [User Feedback Widget](https://docs.sentry.io/product/user-feedback/#user-feedback-widget). This is because it is based on an older format called User Reports - read more [here](https://develop.sentry.dev/application/feedback-architecture/#user-reports). To return a list of user feedback items from the widget, please use the [issue API](https://docs.sentry.io/api/events/list-a-projects-issues/)* with the filter `issue.category:feedback`.*",
"operationId": "List a Project's User Feedback",
"parameters": [
{
diff --git a/biome.json b/biome.json
index a28089f817376..90eb877cb2f90 100644
--- a/biome.json
+++ b/biome.json
@@ -39,9 +39,7 @@
"noRestrictedImports": {
"level": "warn",
"options": {
- "paths": {
- "react-router": "Do not import from react-router. While we transition to 6 there are shims to import from"
- }
+ "paths": {}
}
}
},
diff --git a/devenv/config.ini b/devenv/config.ini
index 4eb73da0f3d85..fe11192399824 100644
--- a/devenv/config.ini
+++ b/devenv/config.ini
@@ -1,3 +1,6 @@
+[devenv]
+minimum_version = 1.13.0
+
[venv.sentry]
python = 3.12.6
path = .venv
@@ -51,6 +54,18 @@ linux_arm64_sha256 = 6ecba675e90d154f22e20200fa5684f20ad1495b73c0462f1bd7da4e9d0
# used for autoupdate
version = v0.6.6
+[lima]
+darwin_x86_64 = https://github.com/lima-vm/lima/releases/download/v0.19.1/lima-0.19.1-Darwin-x86_64.tar.gz
+darwin_x86_64_sha256 = ac8827479f66ef1b288b31f164b22f6433faa14c44ce5bbebe09e6e913582479
+darwin_arm64 = https://github.com/lima-vm/lima/releases/download/v0.19.1/lima-0.19.1-Darwin-arm64.tar.gz
+darwin_arm64_sha256 = 0dfcf3a39782baf1c2ea43cf026f8df0321c671d914c105fbb78de507aa8bda4
+linux_x86_64 = https://github.com/lima-vm/lima/releases/download/v0.19.1/lima-0.19.1-Linux-x86_64.tar.gz
+linux_x86_64_sha256 = 7d18b1716aae14bf98d6ea93a703e8877b0c3142f7ba2e87401d47d5d0fe3ff1
+linux_arm64 = https://github.com/lima-vm/lima/releases/download/v0.19.1/lima-0.19.1-Linux-aarch64.tar.gz
+linux_arm64_sha256 = c55e57ddbefd9988d0f3676bb873bcc6e0f7b3c3d47a1f07599ee151c5198d96
+# used for autoupdate
+version = 0.19.1
+
# kept here only for compatibility with older `devenv`
[python]
version = 3.12.6
diff --git a/devenv/sync.py b/devenv/sync.py
index 8b3c18dd663c5..b822d2c864e06 100644
--- a/devenv/sync.py
+++ b/devenv/sync.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import importlib
import os
import shlex
import subprocess
@@ -70,7 +71,33 @@ def run_procs(
return all_good
+# Temporary, see https://github.com/getsentry/sentry/pull/78881
+def check_minimum_version(minimum_version: str):
+ version = importlib.metadata.version("sentry-devenv")
+
+ parsed_version = tuple(map(int, version.split(".")))
+ parsed_minimum_version = tuple(map(int, minimum_version.split(".")))
+
+ if parsed_version < parsed_minimum_version:
+ raise SystemExit(
+ f"""
+Hi! To reduce potential breakage we've defined a minimum
+devenv version ({minimum_version}) to run sync.
+
+Please run the following to update your global devenv to the minimum:
+
+{constants.root}/venv/bin/pip install -U 'sentry-devenv=={minimum_version}'
+
+Then, use it to run sync this one time.
+
+{constants.root}/bin/devenv sync
+"""
+ )
+
+
def main(context: dict[str, str]) -> int:
+ check_minimum_version("1.13.0")
+
repo = context["repo"]
reporoot = context["reporoot"]
repo_config = config.get_config(f"{reporoot}/devenv/config.ini")
@@ -82,20 +109,15 @@ def main(context: dict[str, str]) -> int:
# repo-local devenv needs to update itself first with a successful sync
# so it'll take 2 syncs to get onto devenv-managed node, it is what it is
- try:
- from devenv.lib import node
+ from devenv.lib import node
- node.install(
- repo_config["node"]["version"],
- repo_config["node"][constants.SYSTEM_MACHINE],
- repo_config["node"][f"{constants.SYSTEM_MACHINE}_sha256"],
- reporoot,
- )
- node.install_yarn(repo_config["node"]["yarn_version"], reporoot)
- except ImportError:
- from devenv.lib import volta
-
- volta.install(reporoot)
+ node.install(
+ repo_config["node"]["version"],
+ repo_config["node"][constants.SYSTEM_MACHINE],
+ repo_config["node"][f"{constants.SYSTEM_MACHINE}_sha256"],
+ reporoot,
+ )
+ node.install_yarn(repo_config["node"]["yarn_version"], reporoot)
# no more imports from devenv past this point! if the venv is recreated
# then we won't have access to devenv libs until it gets reinstalled
@@ -108,27 +130,18 @@ def main(context: dict[str, str]) -> int:
venv.ensure(venv_dir, python_version, url, sha256)
if constants.DARWIN:
- try:
- colima.install(
- repo_config["colima"]["version"],
- repo_config["colima"][constants.SYSTEM_MACHINE],
- repo_config["colima"][f"{constants.SYSTEM_MACHINE}_sha256"],
- reporoot,
- )
- except TypeError:
- # this is needed for devenv <=1.4.0,>1.2.3 to finish syncing and therefore update itself
- colima.install(
- repo_config["colima"]["version"],
- repo_config["colima"][constants.SYSTEM_MACHINE],
- repo_config["colima"][f"{constants.SYSTEM_MACHINE}_sha256"],
- )
-
- # TODO: move limactl version into per-repo config
- try:
- limactl.install(reporoot)
- except TypeError:
- # this is needed for devenv <=1.4.0,>1.2.3 to finish syncing and therefore update itself
- limactl.install()
+ colima.install(
+ repo_config["colima"]["version"],
+ repo_config["colima"][constants.SYSTEM_MACHINE],
+ repo_config["colima"][f"{constants.SYSTEM_MACHINE}_sha256"],
+ reporoot,
+ )
+ limactl.install(
+ repo_config["lima"]["version"],
+ repo_config["lima"][constants.SYSTEM_MACHINE],
+ repo_config["lima"][f"{constants.SYSTEM_MACHINE}_sha256"],
+ reporoot,
+ )
if not run_procs(
repo,
diff --git a/devservices/clickhouse/config.xml b/devservices/clickhouse/config.xml
new file mode 100644
index 0000000000000..327d60661b29d
--- /dev/null
+++ b/devservices/clickhouse/config.xml
@@ -0,0 +1,6 @@
+
+ 0.3
+
+ 1
+
+
diff --git a/devservices/docker-compose-testing.yml b/devservices/docker-compose-testing.yml
new file mode 100644
index 0000000000000..aa0ddafe656bb
--- /dev/null
+++ b/devservices/docker-compose-testing.yml
@@ -0,0 +1,282 @@
+x-restart-policy: &restart_policy
+ restart: unless-stopped
+x-depends_on-healthy: &depends_on-healthy
+ condition: service_healthy
+x-depends_on-default: &depends_on-default
+ condition: service_started
+x-healthcheck-defaults: &healthcheck_defaults
+ interval: 30s
+ timeout: 1m30s
+ retries: 10
+ start_period: 10s
+services:
+ redis:
+ <<: *restart_policy
+ container_name: sentry_redis
+ image: ghcr.io/getsentry/image-mirror-library-redis:5.0-alpine
+ healthcheck:
+ <<: *healthcheck_defaults
+ test: redis-cli ping
+ command:
+ [
+ 'redis-server',
+ '--appendonly',
+ 'yes',
+ '--save',
+ '60',
+ '20',
+ '--auto-aof-rewrite-percentage',
+ '100',
+ '--auto-aof-rewrite-min-size',
+ '64mb',
+ ]
+ volumes:
+ - 'sentry-redis:/data'
+ ports:
+ - '6379:6379'
+ networks:
+ - sentry
+ extra_hosts:
+ host.docker.internal: host-gateway
+ postgres:
+ <<: *restart_policy
+ container_name: sentry_postgres
+ # Using the same postgres version as Sentry dev for consistency purposes
+ image: 'ghcr.io/getsentry/image-mirror-library-postgres:14-alpine'
+ healthcheck:
+ <<: *healthcheck_defaults
+ # Using default user "postgres" from sentry/sentry.conf.example.py or value of POSTGRES_USER if provided
+ test: ['CMD-SHELL', 'pg_isready -U ${POSTGRES_USER:-postgres}']
+ 'command':
+ [
+ 'postgres',
+ '-c',
+ 'wal_level=logical',
+ '-c',
+ 'max_replication_slots=1',
+ '-c',
+ 'max_wal_senders=1',
+ ]
+ environment:
+ POSTGRES_HOST_AUTH_METHOD: 'trust'
+ POSTGRES_DB: 'sentry'
+ volumes:
+ - 'sentry-postgres:/var/lib/postgresql/data'
+ ports:
+ - '5432:5432'
+ networks:
+ - sentry
+ extra_hosts:
+ host.docker.internal: host-gateway
+ kafka:
+ <<: *restart_policy
+ image: 'ghcr.io/getsentry/image-mirror-confluentinc-cp-kafka:7.5.0'
+ container_name: sentry_kafka
+ environment:
+ # https://docs.confluent.io/platform/current/installation/docker/config-reference.html#cp-kakfa-example
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@127.0.0.1:29093'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_NODE_ID: '1'
+ CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk'
+ KAFKA_LISTENERS: 'PLAINTEXT://0.0.0.0:29092,INTERNAL://0.0.0.0:9093,EXTERNAL://0.0.0.0:9092,CONTROLLER://0.0.0.0:29093'
+ KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://127.0.0.1:29092,INTERNAL://kafka:9093,EXTERNAL://127.0.0.1:9092'
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'PLAINTEXT:PLAINTEXT,INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,CONTROLLER:PLAINTEXT'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: '1'
+ KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS: '1'
+ KAFKA_LOG_RETENTION_HOURS: '24'
+ KAFKA_MESSAGE_MAX_BYTES: '50000000' #50MB or bust
+ KAFKA_MAX_REQUEST_SIZE: '50000000' #50MB on requests apparently too
+ volumes:
+ - 'sentry-kafka:/var/lib/kafka/data'
+ - 'sentry-kafka-log:/var/lib/kafka/log'
+ healthcheck:
+ <<: *healthcheck_defaults
+ test: ['CMD-SHELL', 'nc -z localhost 9092']
+ interval: 10s
+ timeout: 10s
+ retries: 30
+ ports:
+ - '9092:9092'
+ - '9093:9093'
+ networks:
+ - sentry
+ extra_hosts:
+ host.docker.internal: host-gateway
+ clickhouse:
+ <<: *restart_policy
+ container_name: sentry_clickhouse
+ image: 'ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:23.3.19.33.altinitystable'
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ volumes:
+ - 'sentry-clickhouse:/var/lib/clickhouse'
+ - 'sentry-clickhouse-log:/var/log/clickhouse-server'
+ - type: bind
+ read_only: true
+ source: ./clickhouse/config.xml
+ target: /etc/clickhouse-server/config.d/sentry.xml
+ healthcheck:
+ test: [
+ 'CMD-SHELL',
+ # Manually override any http_proxy envvar that might be set, because
+ # this wget does not support no_proxy. See:
+ # https://github.com/getsentry/self-hosted/issues/1537
+ "http_proxy='' wget -nv -t1 --spider 'http://localhost:8123/' || exit 1",
+ ]
+ interval: 10s
+ timeout: 10s
+ retries: 30
+ ports:
+ - '8123:8123'
+ - '9000:9000'
+ - '9009:9009'
+ networks:
+ - sentry
+ extra_hosts:
+ host.docker.internal: host-gateway
+ symbolicator:
+ <<: *restart_policy
+ container_name: sentry_symbolicator
+ image: 'us-central1-docker.pkg.dev/sentryio/symbolicator/image:nightly'
+ volumes:
+ - 'sentry-symbolicator:/data'
+ - type: bind
+ read_only: true
+ source: ./symbolicator
+ target: /etc/symbolicator
+ command: run -c /etc/symbolicator/config.yml
+ ports:
+ - '3021:3021'
+ networks:
+ - sentry
+ extra_hosts:
+ host.docker.internal: host-gateway
+ vroom:
+ <<: *restart_policy
+ container_name: sentry_vroom
+ image: 'us-central1-docker.pkg.dev/sentryio/vroom/vroom:latest'
+ environment:
+ SENTRY_KAFKA_BROKERS_PROFILING: 'sentry_kafka:9092'
+ SENTRY_KAFKA_BROKERS_OCCURRENCES: 'sentry_kafka:9092'
+ SENTRY_BUCKET_PROFILES: file://localhost//var/lib/sentry-profiles
+ SENTRY_SNUBA_HOST: 'http://snuba-api:1218'
+ volumes:
+ - sentry-vroom:/var/lib/sentry-profiles
+ depends_on:
+ kafka:
+ <<: *depends_on-healthy
+ ports:
+ - '8085:8085'
+ networks:
+ - sentry
+ extra_hosts:
+ host.docker.internal: host-gateway
+ snuba:
+ <<: *restart_policy
+ container_name: sentry_snuba
+ image: ghcr.io/getsentry/snuba:latest
+ ports:
+ - '1218:1218'
+ - '1219:1219'
+ networks:
+ - sentry
+ command: ['devserver']
+ environment:
+ PYTHONUNBUFFERED: '1'
+ SNUBA_SETTINGS: docker
+ DEBUG: '1'
+ CLICKHOUSE_HOST: 'clickhouse'
+ CLICKHOUSE_PORT: '9000'
+ CLICKHOUSE_HTTP_PORT: '8123'
+ DEFAULT_BROKERS: 'kafka:9093'
+ REDIS_HOST: 'redis'
+ REDIS_PORT: '6379'
+ REDIS_DB: '1'
+ ENABLE_SENTRY_METRICS_DEV: '${ENABLE_SENTRY_METRICS_DEV:-}'
+ ENABLE_PROFILES_CONSUMER: '${ENABLE_PROFILES_CONSUMER:-}'
+ ENABLE_SPANS_CONSUMER: '${ENABLE_SPANS_CONSUMER:-}'
+ ENABLE_ISSUE_OCCURRENCE_CONSUMER: '${ENABLE_ISSUE_OCCURRENCE_CONSUMER:-}'
+ ENABLE_AUTORUN_MIGRATION_SEARCH_ISSUES: '1'
+ ENABLE_GROUP_ATTRIBUTES_CONSUMER: '${ENABLE_GROUP_ATTRIBUTES_CONSUMER:-}'
+ platform: linux/amd64
+ depends_on:
+ - kafka
+ - redis
+ - clickhouse
+ extra_hosts:
+ host.docker.internal: host-gateway
+ bigtable:
+ <<: *restart_policy
+ container_name: sentry_bigtable
+ image: 'us.gcr.io/sentryio/cbtemulator:23c02d92c7a1747068eb1fc57dddbad23907d614'
+ ports:
+ - '8086:8086'
+ networks:
+ - sentry
+ extra_hosts:
+ host.docker.internal: host-gateway
+ redis-cluster:
+ <<: *restart_policy
+ container_name: sentry_redis-cluster
+ image: ghcr.io/getsentry/docker-redis-cluster:7.0.10
+ ports:
+ - '7000:7000'
+ - '7001:7001'
+ - '7002:7002'
+ - '7003:7003'
+ - '7004:7004'
+ - '7005:7005'
+ networks:
+ - sentry
+ volumes:
+ - sentry-redis-cluster:/redis-data
+ environment:
+ - IP=0.0.0.0
+ chartcuterie:
+ <<: *restart_policy
+ container_name: sentry_chartcuterie
+ image: 'us-central1-docker.pkg.dev/sentryio/chartcuterie/image:latest'
+ environment:
+ CHARTCUTERIE_CONFIG: /etc/chartcuterie/config.js
+ CHARTCUTERIE_CONFIG_POLLING: true
+ volumes:
+ - ./chartcuterie:/etc/chartcuterie
+ ports:
+ - '7901:9090'
+ networks:
+ - sentry
+ extra_hosts:
+ host.docker.internal: host-gateway
+ healthcheck:
+ <<: *healthcheck_defaults
+ # Using default user "postgres" from sentry/sentry.conf.example.py or value of POSTGRES_USER if provided
+ test:
+ [
+ 'CMD-SHELL',
+ 'docker exec sentry_chartcuterie python3 -c "import urllib.request; urllib.request.urlopen(\"http://127.0.0.1:9090/api/chartcuterie/healthcheck/live\", timeout=5)"',
+ ]
+
+volumes:
+ # These store application data that should persist across restarts.
+ sentry-data:
+ sentry-postgres:
+ sentry-redis:
+ sentry-redis-cluster:
+ sentry-kafka:
+ sentry-clickhouse:
+ sentry-symbolicator:
+ # This volume stores profiles and should be persisted.
+ # Not being external will still persist data across restarts.
+ # It won't persist if someone does a docker compose down -v.
+ sentry-vroom:
+ sentry-kafka-log:
+ sentry-clickhouse-log:
+
+networks:
+ sentry:
+ name: sentry
+ external: true
diff --git a/devservices/symbolicator/config.yml b/devservices/symbolicator/config.yml
new file mode 100644
index 0000000000000..290d752a6dd04
--- /dev/null
+++ b/devservices/symbolicator/config.yml
@@ -0,0 +1,11 @@
+bind: '0.0.0.0:3021'
+logging:
+ level: 'debug'
+ format: 'pretty'
+ enable_backtraces: true
+
+# explicitly disable caches as it's not something we want in tests. in
+# development it may be less ideal. perhaps we should do the same thing as we
+# do with relay one day (one container per test/session), although that will be
+# slow
+cache_dir: null
diff --git a/fixtures/apidocs_test_case.py b/fixtures/apidocs_test_case.py
index 80aba059aaf56..11098075dfb6a 100644
--- a/fixtures/apidocs_test_case.py
+++ b/fixtures/apidocs_test_case.py
@@ -8,7 +8,7 @@
from openapi_core.validation.response.validators import V30ResponseDataValidator
from sentry.testutils.cases import APITestCase
-from sentry.testutils.helpers.datetime import before_now, iso_format
+from sentry.testutils.helpers.datetime import before_now
from sentry.testutils.skips import requires_snuba
@@ -41,7 +41,7 @@ def create_event(self, name, **kwargs):
"event_id": (name * 32)[:32],
"fingerprint": ["1"],
"sdk": {"version": "5.17.0", "name": "sentry.javascript.browser"},
- "timestamp": iso_format(before_now(seconds=1)),
+ "timestamp": before_now(seconds=1).isoformat(),
"user": {"id": self.user.id, "email": self.user.email},
"release": name,
}
diff --git a/fixtures/backup/model_dependencies/detailed.json b/fixtures/backup/model_dependencies/detailed.json
index abe8107c5dfee..61f451c6b04dd 100644
--- a/fixtures/backup/model_dependencies/detailed.json
+++ b/fixtures/backup/model_dependencies/detailed.json
@@ -31,6 +31,24 @@
]
]
},
+ "flags.flagauditlogmodel": {
+ "dangling": false,
+ "foreign_keys": {
+ "organization_id": {
+ "kind": "HybridCloudForeignKey",
+ "model": "sentry.organization",
+ "nullable": false
+ }
+ },
+ "model": "flags.flagauditlogmodel",
+ "relocation_dependencies": [],
+ "relocation_scope": "Excluded",
+ "silos": [
+ "Region"
+ ],
+ "table_name": "flags_audit_log",
+ "uniques": []
+ },
"hybridcloud.apikeyreplica": {
"dangling": false,
"foreign_keys": {
@@ -1389,6 +1407,28 @@
]
]
},
+ "sentry.dashboardpermissions": {
+ "dangling": false,
+ "foreign_keys": {
+ "dashboard": {
+ "kind": "DefaultOneToOneField",
+ "model": "sentry.dashboard",
+ "nullable": false
+ }
+ },
+ "model": "sentry.dashboardpermissions",
+ "relocation_dependencies": [],
+ "relocation_scope": "Organization",
+ "silos": [
+ "Region"
+ ],
+ "table_name": "sentry_dashboardpermissions",
+ "uniques": [
+ [
+ "dashboard"
+ ]
+ ]
+ },
"sentry.dashboardproject": {
"dangling": false,
"foreign_keys": {
@@ -2334,6 +2374,11 @@
"kind": "DefaultOneToOneField",
"model": "sentry.grouphash",
"nullable": false
+ },
+ "seer_matched_grouphash": {
+ "kind": "FlexibleForeignKey",
+ "model": "sentry.grouphash",
+ "nullable": true
}
},
"model": "sentry.grouphashmetadata",
@@ -6166,6 +6211,11 @@
"uptime.projectuptimesubscription": {
"dangling": false,
"foreign_keys": {
+ "environment": {
+ "kind": "FlexibleForeignKey",
+ "model": "sentry.environment",
+ "nullable": true
+ },
"owner_team": {
"kind": "FlexibleForeignKey",
"model": "sentry.team",
@@ -6218,6 +6268,77 @@
]
]
},
+ "workflow_engine.action": {
+ "dangling": false,
+ "foreign_keys": {},
+ "model": "workflow_engine.action",
+ "relocation_dependencies": [],
+ "relocation_scope": "Excluded",
+ "silos": [
+ "Region"
+ ],
+ "table_name": "workflow_engine_action",
+ "uniques": []
+ },
+ "workflow_engine.datacondition": {
+ "dangling": false,
+ "foreign_keys": {
+ "condition_group": {
+ "kind": "DefaultForeignKey",
+ "model": "workflow_engine.dataconditiongroup",
+ "nullable": false
+ }
+ },
+ "model": "workflow_engine.datacondition",
+ "relocation_dependencies": [],
+ "relocation_scope": "Organization",
+ "silos": [
+ "Region"
+ ],
+ "table_name": "workflow_engine_datacondition",
+ "uniques": []
+ },
+ "workflow_engine.dataconditiongroup": {
+ "dangling": false,
+ "foreign_keys": {
+ "organization": {
+ "kind": "DefaultForeignKey",
+ "model": "sentry.organization",
+ "nullable": false
+ }
+ },
+ "model": "workflow_engine.dataconditiongroup",
+ "relocation_dependencies": [],
+ "relocation_scope": "Organization",
+ "silos": [
+ "Region"
+ ],
+ "table_name": "workflow_engine_dataconditiongroup",
+ "uniques": []
+ },
+ "workflow_engine.dataconditiongroupaction": {
+ "dangling": false,
+ "foreign_keys": {
+ "action": {
+ "kind": "FlexibleForeignKey",
+ "model": "workflow_engine.action",
+ "nullable": false
+ },
+ "condition_group": {
+ "kind": "FlexibleForeignKey",
+ "model": "workflow_engine.dataconditiongroup",
+ "nullable": false
+ }
+ },
+ "model": "workflow_engine.dataconditiongroupaction",
+ "relocation_dependencies": [],
+ "relocation_scope": "Excluded",
+ "silos": [
+ "Region"
+ ],
+ "table_name": "workflow_engine_dataconditiongroupaction",
+ "uniques": []
+ },
"workflow_engine.datasource": {
"dangling": false,
"foreign_keys": {
@@ -6281,6 +6402,11 @@
"kind": "HybridCloudForeignKey",
"model": "sentry.user",
"nullable": true
+ },
+ "workflow_condition_group": {
+ "kind": "FlexibleForeignKey",
+ "model": "workflow_engine.dataconditiongroup",
+ "nullable": true
}
},
"model": "workflow_engine.detector",
@@ -6294,9 +6420,53 @@
[
"name",
"organization"
+ ],
+ [
+ "workflow_condition_group"
]
]
},
+ "workflow_engine.detectorstate": {
+ "dangling": false,
+ "foreign_keys": {
+ "detector": {
+ "kind": "FlexibleForeignKey",
+ "model": "workflow_engine.detector",
+ "nullable": false
+ }
+ },
+ "model": "workflow_engine.detectorstate",
+ "relocation_dependencies": [],
+ "relocation_scope": "Organization",
+ "silos": [
+ "Region"
+ ],
+ "table_name": "workflow_engine_detectorstate",
+ "uniques": []
+ },
+ "workflow_engine.detectorworkflow": {
+ "dangling": false,
+ "foreign_keys": {
+ "detector": {
+ "kind": "FlexibleForeignKey",
+ "model": "workflow_engine.detector",
+ "nullable": false
+ },
+ "workflow": {
+ "kind": "FlexibleForeignKey",
+ "model": "workflow_engine.workflow",
+ "nullable": false
+ }
+ },
+ "model": "workflow_engine.detectorworkflow",
+ "relocation_dependencies": [],
+ "relocation_scope": "Organization",
+ "silos": [
+ "Region"
+ ],
+ "table_name": "workflow_engine_detectorworkflow",
+ "uniques": []
+ },
"workflow_engine.workflow": {
"dangling": false,
"foreign_keys": {
@@ -6304,6 +6474,11 @@
"kind": "FlexibleForeignKey",
"model": "sentry.organization",
"nullable": false
+ },
+ "when_condition_group": {
+ "kind": "FlexibleForeignKey",
+ "model": "workflow_engine.dataconditiongroup",
+ "nullable": true
}
},
"model": "workflow_engine.workflow",
@@ -6320,22 +6495,31 @@
]
]
},
- "workflow_engine.workflowaction": {
+ "workflow_engine.workflowdataconditiongroup": {
"dangling": false,
"foreign_keys": {
+ "condition_group": {
+ "kind": "FlexibleForeignKey",
+ "model": "workflow_engine.dataconditiongroup",
+ "nullable": false
+ },
"workflow": {
"kind": "FlexibleForeignKey",
"model": "workflow_engine.workflow",
"nullable": false
}
},
- "model": "workflow_engine.workflowaction",
+ "model": "workflow_engine.workflowdataconditiongroup",
"relocation_dependencies": [],
"relocation_scope": "Organization",
"silos": [
"Region"
],
- "table_name": "workflow_engine_workflowaction",
- "uniques": []
+ "table_name": "workflow_engine_workflowdataconditiongroup",
+ "uniques": [
+ [
+ "condition_group"
+ ]
+ ]
}
}
\ No newline at end of file
diff --git a/fixtures/backup/model_dependencies/flat.json b/fixtures/backup/model_dependencies/flat.json
index c64d01b2bc452..7b85fa11b8bbd 100644
--- a/fixtures/backup/model_dependencies/flat.json
+++ b/fixtures/backup/model_dependencies/flat.json
@@ -4,6 +4,9 @@
"sentry.organization",
"sentry.project"
],
+ "flags.flagauditlogmodel": [
+ "sentry.organization"
+ ],
"hybridcloud.apikeyreplica": [
"sentry.apikey",
"sentry.organization"
@@ -190,6 +193,9 @@
"sentry.organization",
"sentry.user"
],
+ "sentry.dashboardpermissions": [
+ "sentry.dashboard"
+ ],
"sentry.dashboardproject": [
"sentry.dashboard",
"sentry.project"
@@ -850,12 +856,24 @@
"sentry.user"
],
"uptime.projectuptimesubscription": [
+ "sentry.environment",
"sentry.project",
"sentry.team",
"sentry.user",
"uptime.uptimesubscription"
],
"uptime.uptimesubscription": [],
+ "workflow_engine.action": [],
+ "workflow_engine.datacondition": [
+ "workflow_engine.dataconditiongroup"
+ ],
+ "workflow_engine.dataconditiongroup": [
+ "sentry.organization"
+ ],
+ "workflow_engine.dataconditiongroupaction": [
+ "workflow_engine.action",
+ "workflow_engine.dataconditiongroup"
+ ],
"workflow_engine.datasource": [
"sentry.organization"
],
@@ -866,12 +884,22 @@
"workflow_engine.detector": [
"sentry.organization",
"sentry.team",
- "sentry.user"
+ "sentry.user",
+ "workflow_engine.dataconditiongroup"
+ ],
+ "workflow_engine.detectorstate": [
+ "workflow_engine.detector"
+ ],
+ "workflow_engine.detectorworkflow": [
+ "workflow_engine.detector",
+ "workflow_engine.workflow"
],
"workflow_engine.workflow": [
- "sentry.organization"
+ "sentry.organization",
+ "workflow_engine.dataconditiongroup"
],
- "workflow_engine.workflowaction": [
+ "workflow_engine.workflowdataconditiongroup": [
+ "workflow_engine.dataconditiongroup",
"workflow_engine.workflow"
]
}
\ No newline at end of file
diff --git a/fixtures/backup/model_dependencies/sorted.json b/fixtures/backup/model_dependencies/sorted.json
index 84116e0c856f5..a7d2fee2ab05d 100644
--- a/fixtures/backup/model_dependencies/sorted.json
+++ b/fixtures/backup/model_dependencies/sorted.json
@@ -48,11 +48,17 @@
"sentry.userroleuser",
"social_auth.usersocialauth",
"uptime.uptimesubscription",
+ "workflow_engine.action",
+ "workflow_engine.dataconditiongroup",
+ "workflow_engine.dataconditiongroupaction",
"workflow_engine.datasource",
"workflow_engine.detector",
+ "workflow_engine.detectorstate",
"workflow_engine.workflow",
- "workflow_engine.workflowaction",
+ "workflow_engine.workflowdataconditiongroup",
+ "workflow_engine.detectorworkflow",
"workflow_engine.datasourcedetector",
+ "workflow_engine.datacondition",
"sentry.savedsearch",
"sentry.relocation",
"sentry.recentsearch",
@@ -102,6 +108,7 @@
"hybridcloud.organizationslugreservationreplica",
"hybridcloud.externalactorreplica",
"hybridcloud.apikeyreplica",
+ "flags.flagauditlogmodel",
"feedback.feedback",
"uptime.projectuptimesubscription",
"sentry.useroption",
@@ -151,6 +158,7 @@
"sentry.debugidartifactbundle",
"sentry.dashboardwidget",
"sentry.dashboardproject",
+ "sentry.dashboardpermissions",
"sentry.customdynamicsamplingruleproject",
"sentry.commitfilechange",
"sentry.broadcastseen",
diff --git a/fixtures/backup/model_dependencies/truncate.json b/fixtures/backup/model_dependencies/truncate.json
index c236755389b59..714940ccaebb7 100644
--- a/fixtures/backup/model_dependencies/truncate.json
+++ b/fixtures/backup/model_dependencies/truncate.json
@@ -48,11 +48,17 @@
"sentry_userrole_users",
"social_auth_usersocialauth",
"uptime_uptimesubscription",
+ "workflow_engine_action",
+ "workflow_engine_dataconditiongroup",
+ "workflow_engine_dataconditiongroupaction",
"workflow_engine_datasource",
"workflow_engine_detector",
+ "workflow_engine_detectorstate",
"workflow_engine_workflow",
- "workflow_engine_workflowaction",
+ "workflow_engine_workflowdataconditiongroup",
+ "workflow_engine_detectorworkflow",
"workflow_engine_datasourcedetector",
+ "workflow_engine_datacondition",
"sentry_savedsearch",
"sentry_relocation",
"sentry_recentsearch",
@@ -102,6 +108,7 @@
"hybridcloud_organizationslugreservationreplica",
"hybridcloud_externalactorreplica",
"hybridcloud_apikeyreplica",
+ "flags_audit_log",
"feedback_feedback",
"uptime_projectuptimesubscription",
"sentry_useroption",
@@ -151,6 +158,7 @@
"sentry_debugidartifactbundle",
"sentry_dashboardwidget",
"sentry_dashboardproject",
+ "sentry_dashboardpermissions",
"sentry_customdynamicsamplingruleproject",
"sentry_commitfilechange",
"sentry_broadcastseen",
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/__init__.py b/fixtures/safe_migrations_apps/run_sql_app/__init__.py
similarity index 100%
rename from src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/__init__.py
rename to fixtures/safe_migrations_apps/run_sql_app/__init__.py
diff --git a/fixtures/safe_migrations_apps/run_sql_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/run_sql_app/migrations/0001_initial.py
new file mode 100644
index 0000000000000..1f566fa78ac91
--- /dev/null
+++ b/fixtures/safe_migrations_apps/run_sql_app/migrations/0001_initial.py
@@ -0,0 +1,30 @@
+# Generated by Django 3.1 on 2019-09-22 21:47
+
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+
+ initial = True
+
+ dependencies = []
+
+ operations = [
+ migrations.CreateModel(
+ name="TestTable",
+ fields=[
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("field", models.IntegerField(null=True)),
+ ],
+ ),
+ ]
diff --git a/fixtures/safe_migrations_apps/run_sql_app/migrations/0002_run_sql.py b/fixtures/safe_migrations_apps/run_sql_app/migrations/0002_run_sql.py
new file mode 100644
index 0000000000000..c8c76653c28cd
--- /dev/null
+++ b/fixtures/safe_migrations_apps/run_sql_app/migrations/0002_run_sql.py
@@ -0,0 +1,23 @@
+from django.db import migrations
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+
+ dependencies = [
+ ("run_sql_app", "0001_initial"),
+ ]
+
+ operations = [
+ migrations.SeparateDatabaseAndState(
+ database_operations=[
+ migrations.RunSQL(
+ """ALTER TABLE "run_sql_app_testtable" DROP COLUMN "field";""",
+ reverse_sql="""ALTER TABLE "run_sql_app_testtable" ADD COLUMN "field" int NULL;""",
+ hints={"tables": ["run_sql_app_testtable"]},
+ )
+ ],
+ state_operations=[migrations.RemoveField("testtable", "field")],
+ )
+ ]
diff --git a/fixtures/safe_migrations_apps/run_sql_app/migrations/0003_add_col.py b/fixtures/safe_migrations_apps/run_sql_app/migrations/0003_add_col.py
new file mode 100644
index 0000000000000..59d7c9343c3a3
--- /dev/null
+++ b/fixtures/safe_migrations_apps/run_sql_app/migrations/0003_add_col.py
@@ -0,0 +1,14 @@
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+
+ dependencies = [
+ ("run_sql_app", "0002_run_sql"),
+ ]
+
+ operations = [
+ migrations.AddField("testtable", "field", models.IntegerField(null=True)),
+ ]
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/stats/__init__.py b/fixtures/safe_migrations_apps/run_sql_app/migrations/__init__.py
similarity index 100%
rename from src/sentry/api/endpoints/integrations/sentry_apps/stats/__init__.py
rename to fixtures/safe_migrations_apps/run_sql_app/migrations/__init__.py
diff --git a/fixtures/safe_migrations_apps/run_sql_app/models.py b/fixtures/safe_migrations_apps/run_sql_app/models.py
new file mode 100644
index 0000000000000..fdd098a365453
--- /dev/null
+++ b/fixtures/safe_migrations_apps/run_sql_app/models.py
@@ -0,0 +1,5 @@
+from django.db import models
+
+
+class TestTable(models.Model):
+ field = models.IntegerField(default=0)
diff --git a/jest.config.ts b/jest.config.ts
index 2298e04af2ed4..1bec1edcf29f1 100644
--- a/jest.config.ts
+++ b/jest.config.ts
@@ -15,15 +15,8 @@ const {
GITHUB_PR_REF,
GITHUB_RUN_ID,
GITHUB_RUN_ATTEMPT,
- USING_YARN_TEST,
} = process.env;
-if (USING_YARN_TEST === undefined) {
- // eslint-disable-next-line no-console
- console.error('Do not run `jest` directly, use `yarn test` instead!');
- process.exit();
-}
-
const IS_MASTER_BRANCH = GITHUB_PR_REF === 'refs/heads/master';
const BALANCE_RESULTS_PATH = path.resolve(
diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt
index 05d67e8cc4579..27f992dc08ccf 100644
--- a/migrations_lockfile.txt
+++ b/migrations_lockfile.txt
@@ -10,7 +10,7 @@ hybridcloud: 0016_add_control_cacheversion
nodestore: 0002_nodestore_no_dictfield
remote_subscriptions: 0003_drop_remote_subscription
replays: 0004_index_together
-sentry: 0765_add_org_to_api_auth
+sentry: 0777_add_related_name_to_dashboard_permissions
social_auth: 0002_default_auto_field
-uptime: 0013_uptime_subscription_new_unique
-workflow_engine: 0005_data_source_detector
+uptime: 0017_unique_on_timeout
+workflow_engine: 0009_detector_type
diff --git a/package.json b/package.json
index e8cb6ce69cfed..681187faf4dd0 100644
--- a/package.json
+++ b/package.json
@@ -54,17 +54,16 @@
"@react-types/shared": "^3.24.1",
"@rsdoctor/webpack-plugin": "0.4.4",
"@sentry-internal/global-search": "^1.0.0",
- "@sentry-internal/react-inspector": "6.0.1-4",
"@sentry-internal/rrweb": "2.26.0",
"@sentry-internal/rrweb-player": "2.26.0",
"@sentry-internal/rrweb-snapshot": "2.26.0",
- "@sentry/core": "^8.28.0",
- "@sentry/node": "^8.28.0",
- "@sentry/react": "^8.28.0",
+ "@sentry/core": "^8.35.0-beta.0",
+ "@sentry/node": "^8.35.0-beta.0",
+ "@sentry/react": "^8.35.0-beta.0",
"@sentry/release-parser": "^1.3.1",
"@sentry/status-page-list": "^0.3.0",
- "@sentry/types": "^8.28.0",
- "@sentry/utils": "^8.28.0",
+ "@sentry/types": "^8.35.0-beta.0",
+ "@sentry/utils": "^8.35.0-beta.0",
"@sentry/webpack-plugin": "^2.22.4",
"@spotlightjs/spotlight": "^2.0.0-alpha.1",
"@tanstack/react-query": "^5.56.2",
@@ -73,6 +72,7 @@
"@types/color": "^3.0.3",
"@types/diff": "5.2.1",
"@types/dompurify": "^3.0.5",
+ "@types/history": "^3.2.5",
"@types/invariant": "^2.2.35",
"@types/jest": "29.5.12",
"@types/js-beautify": "^1.14.3",
@@ -88,7 +88,6 @@
"@types/react-grid-layout": "^1.3.2",
"@types/react-lazyload": "3.2.3",
"@types/react-mentions": "4.1.13",
- "@types/react-router": "^3.0.28",
"@types/react-select": "4.0.18",
"@types/react-sparklines": "^1.7.2",
"@types/react-virtualized": "^9.21.22",
@@ -142,7 +141,7 @@
"papaparse": "^5.3.2",
"pegjs": "^0.10.0",
"pegjs-loader": "^0.5.8",
- "platformicons": "^6.0.1",
+ "platformicons": "^7.0.1",
"po-catalog-loader": "2.1.0",
"prettier": "3.3.2",
"prismjs": "^1.29.0",
@@ -157,8 +156,7 @@
"react-lazyload": "^3.2.1",
"react-mentions": "4.4.10",
"react-popper": "^2.3.0",
- "react-router": "3.2.6",
- "react-router-dom": "^6.23.0",
+ "react-router-dom": "^6.26.2",
"react-select": "4.3.1",
"react-sparklines": "1.7.0",
"react-virtualized": "^9.22.5",
@@ -169,8 +167,8 @@
"style-loader": "^3.3.4",
"terser-webpack-plugin": "^5.3.10",
"ts-node": "^10.9.2",
- "tslib": "^2.6.3",
- "typescript": "^5.5.2",
+ "tslib": "^2.7.0",
+ "typescript": "^5.6.3",
"u2f-api": "1.0.10",
"url-loader": "^4.1.1",
"webpack": "5.94.0",
@@ -183,7 +181,7 @@
"@codecov/webpack-plugin": "^1.2.0",
"@pmmmwh/react-refresh-webpack-plugin": "0.5.15",
"@sentry/jest-environment": "6.0.0",
- "@sentry/profiling-node": "^8.28.0",
+ "@sentry/profiling-node": "^8.35.0-beta.0",
"@styled/typescript-styled-plugin": "^1.0.1",
"@testing-library/dom": "10.1.0",
"@testing-library/jest-dom": "6.4.5",
@@ -193,8 +191,8 @@
"babel-gettext-extractor": "^4.1.3",
"babel-jest": "29.7.0",
"benchmark": "^2.1.4",
- "eslint": "8.57.0",
- "eslint-config-sentry-app": "2.8.0",
+ "eslint": "8.57.1",
+ "eslint-config-sentry-app": "2.9.0",
"html-webpack-plugin": "^5.6.0",
"jest": "29.7.0",
"jest-canvas-mock": "^2.5.2",
@@ -247,6 +245,7 @@
"build-js-loader": "ts-node scripts/build-js-loader.ts",
"validate-api-examples": "yarn --cwd api-docs openapi-examples-validator ../tests/apidocs/openapi-derefed.json --no-additional-properties",
"mkcert-localhost": "mkcert -key-file config/localhost-key.pem -cert-file config/localhost.pem localhost 127.0.0.1 dev.getsentry.net *.dev.getsentry.net && mkcert -install",
+ "https-proxy": "caddy run --config - <<< '{\"apps\":{\"http\":{\"servers\":{\"srv0\":{\"listen\":[\":8003\"],\"routes\":[{\"handle\":[{\"handler\":\"reverse_proxy\",\"upstreams\":[{\"dial\":\"localhost:8000\"}]}]}],\"tls_connection_policies\":[{\"certificate_selection\":{\"any_tag\":[\"cert0\"]}}]}}},\"tls\":{\"certificates\":{\"load_files\":[{\"certificate\":\"./config/localhost.pem\",\"key\":\"./config/localhost-key.pem\",\"tags\":[\"cert0\"]}]}}}}'",
"extract-ios-device-names": "ts-node scripts/extract-ios-device-names.ts"
},
"browserslist": {
diff --git a/pyproject.toml b/pyproject.toml
index 078d2f3629a71..67dfbf9ece345 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -124,7 +124,6 @@ module = [
"sentry.api.bases.organizationmember",
"sentry.api.bases.project",
"sentry.api.bases.project_request_change",
- "sentry.api.bases.sentryapps",
"sentry.api.bases.team",
"sentry.api.endpoints.accept_organization_invite",
"sentry.api.endpoints.auth_config",
@@ -136,8 +135,6 @@ module = [
"sentry.api.endpoints.group_integration_details",
"sentry.api.endpoints.group_integrations",
"sentry.api.endpoints.index",
- "sentry.api.endpoints.integrations.sentry_apps.internal_app_token.index",
- "sentry.api.endpoints.integrations.sentry_apps.stats.details",
"sentry.api.endpoints.internal.mail",
"sentry.api.endpoints.organization_details",
"sentry.api.endpoints.organization_events",
@@ -194,14 +191,11 @@ module = [
"sentry.api.serializers.models.project",
"sentry.api.serializers.models.role",
"sentry.api.serializers.models.rule",
- "sentry.api.serializers.models.sentry_app",
"sentry.api.serializers.models.team",
"sentry.api.serializers.rest_framework.mentions",
"sentry.api.serializers.rest_framework.notification_action",
"sentry.api.serializers.rest_framework.rule",
- "sentry.api.serializers.rest_framework.sentry_app_request",
"sentry.api.serializers.snuba",
- "sentry.api.validators.email",
"sentry.auth.helper",
"sentry.auth.provider",
"sentry.auth.system",
@@ -210,8 +204,6 @@ module = [
"sentry.db.router",
"sentry.discover.endpoints.discover_key_transactions",
"sentry.eventstore.models",
- "sentry.features.handler",
- "sentry.features.manager",
"sentry.grouping.strategies.legacy",
"sentry.identity.bitbucket.provider",
"sentry.identity.github_enterprise.provider",
@@ -254,7 +246,6 @@ module = [
"sentry.integrations.jira_server.client",
"sentry.integrations.jira_server.integration",
"sentry.integrations.metric_alerts",
- "sentry.integrations.mixins.notifications",
"sentry.integrations.msteams.actions.form",
"sentry.integrations.msteams.client",
"sentry.integrations.msteams.integration",
@@ -299,7 +290,6 @@ module = [
"sentry.notifications.notifications.activity.base",
"sentry.notifications.notifications.activity.release",
"sentry.notifications.notifications.integration_nudge",
- "sentry.ownership.grammar",
"sentry.pipeline.base",
"sentry.pipeline.views.base",
"sentry.pipeline.views.nested",
@@ -335,7 +325,6 @@ module = [
"sentry.sentry_apps.installations",
"sentry.sentry_metrics.indexer.postgres.postgres_v2",
"sentry.shared_integrations.client.proxy",
- "sentry.similarity.features",
"sentry.snuba.errors",
"sentry.snuba.issue_platform",
"sentry.snuba.metrics.datasource",
@@ -348,16 +337,9 @@ module = [
"sentry.tagstore.types",
"sentry.tasks.auth",
"sentry.tasks.base",
- "sentry.tasks.process_buffer",
- "sentry.tasks.sentry_apps",
- "sentry.templatetags.sentry_assets",
- "sentry.templatetags.sentry_helpers",
- "sentry.templatetags.sentry_plugins",
"sentry.testutils.cases",
"sentry.testutils.fixtures",
- "sentry.testutils.helpers.features",
"sentry.testutils.helpers.notifications",
- "sentry.testutils.helpers.slack",
"sentry.utils.auth",
"sentry.utils.committers",
"sentry.utils.services",
@@ -367,14 +349,6 @@ module = [
"sentry.web.frontend.auth_logout",
"sentry.web.frontend.auth_organization_login",
"sentry.web.frontend.base",
- "sentry.web.frontend.debug.debug_codeowners_auto_sync_failure_email",
- "sentry.web.frontend.debug.debug_incident_activity_email",
- "sentry.web.frontend.debug.debug_incident_trigger_email",
- "sentry.web.frontend.debug.debug_mfa_added_email",
- "sentry.web.frontend.debug.debug_mfa_removed_email",
- "sentry.web.frontend.debug.debug_organization_integration_request",
- "sentry.web.frontend.debug.debug_organization_invite_request",
- "sentry.web.frontend.debug.debug_organization_join_request",
"sentry.web.frontend.disabled_member_view",
"sentry.web.frontend.group_plugin_action",
"sentry.web.frontend.idp_email_verification",
@@ -394,7 +368,6 @@ module = [
"sentry_plugins.jira.plugin",
"tests.sentry.api.bases.test_organization",
"tests.sentry.api.bases.test_project",
- "tests.sentry.api.bases.test_sentryapps",
"tests.sentry.api.bases.test_team",
"tests.sentry.api.endpoints.notifications.test_notification_actions_details",
"tests.sentry.api.endpoints.notifications.test_notification_actions_index",
@@ -429,7 +402,7 @@ disable_error_code = [
# begin: stronger typing
[[tool.mypy.overrides]]
module = [
- "sentry.api.endpoints.issues.*",
+ "sentry.api.endpoints.project_backfill_similar_issues_embeddings_records",
"sentry.api.helpers.deprecation",
"sentry.api.helpers.source_map_helper",
"sentry.auth.services.*",
@@ -449,8 +422,7 @@ module = [
"sentry.eventtypes.error",
"sentry.grouping.component",
"sentry.grouping.fingerprinting",
- "sentry.grouping.ingest.metrics",
- "sentry.grouping.ingest.utils",
+ "sentry.grouping.ingest.*",
"sentry.grouping.parameterization",
"sentry.hybridcloud.*",
"sentry.ingest.slicing",
@@ -470,6 +442,7 @@ module = [
"sentry.issues.endpoints.organization_searches",
"sentry.issues.endpoints.project_events",
"sentry.issues.endpoints.project_stacktrace_link",
+ "sentry.issues.endpoints.related_issues",
"sentry.issues.endpoints.shared_group_details",
"sentry.issues.endpoints.team_groups_old",
"sentry.issues.escalating_group_forecast",
@@ -494,7 +467,6 @@ module = [
"sentry.issues.update_inbox",
"sentry.lang.java.processing",
"sentry.llm.*",
- "sentry.mediators.sentry_app_installations.installation_notifier",
"sentry.migrations.*",
"sentry.models.event",
"sentry.models.eventattachment",
@@ -507,6 +479,7 @@ module = [
"sentry.nodestore.filesystem.backend",
"sentry.nodestore.models",
"sentry.organizations.*",
+ "sentry.ownership.*",
"sentry.plugins.base.response",
"sentry.plugins.base.view",
"sentry.profiles.*",
@@ -559,10 +532,12 @@ module = [
"sentry.utils.uwsgi",
"sentry.utils.zip",
"sentry.web.frontend.auth_provider_login",
+ "sentry.web.frontend.cli",
"sentry.web.frontend.csv",
"sentry_plugins.base",
- "tests.sentry.api.endpoints.issues.*",
+ "tests.sentry.deletions.test_group",
"tests.sentry.event_manager.test_event_manager",
+ "tests.sentry.grouping.ingest.test_seer",
"tests.sentry.grouping.test_fingerprinting",
"tests.sentry.hybridcloud.*",
"tests.sentry.issues",
@@ -571,6 +546,7 @@ module = [
"tests.sentry.issues.endpoints.test_organization_group_search_views",
"tests.sentry.issues.endpoints.test_organization_searches",
"tests.sentry.issues.endpoints.test_project_stacktrace_link",
+ "tests.sentry.issues.endpoints.test_related_issues",
"tests.sentry.issues.endpoints.test_source_map_debug",
"tests.sentry.issues.test_attributes",
"tests.sentry.issues.test_escalating",
@@ -592,11 +568,13 @@ module = [
"tests.sentry.issues.test_status_change",
"tests.sentry.issues.test_status_change_consumer",
"tests.sentry.issues.test_update_inbox",
+ "tests.sentry.ownership.*",
"tests.sentry.ratelimits.test_leaky_bucket",
"tests.sentry.relay.config.test_metric_extraction",
"tests.sentry.tasks.test_on_demand_metrics",
"tests.sentry.types.test_actor",
"tests.sentry.types.test_region",
+ "tests.sentry.web.frontend.test_cli",
"tools.*",
]
disallow_any_generics = true
diff --git a/requirements-base.txt b/requirements-base.txt
index 2bde86c72cf6a..4d10fffb82e43 100644
--- a/requirements-base.txt
+++ b/requirements-base.txt
@@ -67,17 +67,17 @@ rfc3986-validator>=0.1.1
sentry-arroyo>=2.16.5
sentry-kafka-schemas>=0.1.111
sentry-ophio==1.0.0
-sentry-protos>=0.1.21
+sentry-protos>=0.1.26
sentry-redis-tools>=0.1.7
-sentry-relay>=0.9.1
-sentry-sdk>=2.12.0
+sentry-relay>=0.9.2
+sentry-sdk>=2.17.0
slack-sdk>=3.27.2
-snuba-sdk>=3.0.38
+snuba-sdk>=3.0.43
simplejson>=3.17.6
sqlparse>=0.4.4
statsd>=3.3
structlog>=22
-symbolic==12.10.0
+symbolic==12.12.0
tiktoken>=0.6.0
tldextract>=5.1.2
toronado>=0.1.0
diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt
index c52f5eae48b99..fea5d1a38d28d 100644
--- a/requirements-dev-frozen.txt
+++ b/requirements-dev-frozen.txt
@@ -32,7 +32,7 @@ confluent-kafka==2.3.0
covdefaults==2.3.0
coverage==7.4.1
croniter==1.3.10
-cryptography==42.0.4
+cryptography==43.0.1
cssselect==1.0.3
cssutils==2.9.0
datadog==0.49.1
@@ -42,7 +42,7 @@ django==5.1.1
django-crispy-forms==1.14.0
django-csp==3.8
django-pg-zero-downtime-migrations==0.13
-django-stubs-ext==5.0.4
+django-stubs-ext==5.1.0
djangorestframework==3.15.2
docker==6.1.3
drf-spectacular==0.26.3
@@ -73,11 +73,11 @@ grpcio==1.60.1
grpcio-status==1.60.1
h11==0.13.0
hiredis==2.3.2
-honcho==1.1.0
+honcho==2.0.0
httpcore==1.0.2
httpx==0.25.2
-identify==2.5.24
-idna==2.10
+identify==2.6.1
+idna==3.7
inflection==0.5.1
iniconfig==1.1.1
iso3166==2.1.1
@@ -103,7 +103,7 @@ msgpack==1.0.7
msgpack-types==0.2.0
mypy==1.11.2
mypy-extensions==1.0.0
-nodeenv==1.8.0
+nodeenv==1.9.1
oauthlib==3.1.0
openai==1.3.5
openapi-core==0.18.2
@@ -125,7 +125,7 @@ pillow==10.2.0
pip-tools==7.1.0
platformdirs==4.2.0
pluggy==1.5.0
-pre-commit==3.3.2
+pre-commit==4.0.0
progressbar2==3.41.0
prompt-toolkit==3.0.41
proto-plus==1.24.0
@@ -149,7 +149,7 @@ pytest-django==4.9.0
pytest-fail-slow==0.3.0
pytest-json-report==1.5.0
pytest-metadata==3.1.1
-pytest-rerunfailures==11.0
+pytest-rerunfailures==14.0
pytest-sentry==0.3.0
pytest-xdist==3.0.2
python-dateutil==2.9.0
@@ -179,28 +179,28 @@ s3transfer==0.10.0
selenium==4.16.0
sentry-arroyo==2.16.5
sentry-cli==2.16.0
-sentry-devenv==1.10.2
-sentry-forked-django-stubs==5.0.4.post2
-sentry-forked-djangorestframework-stubs==3.15.1.post1
+sentry-devenv==1.13.0
+sentry-forked-django-stubs==5.1.0.post2
+sentry-forked-djangorestframework-stubs==3.15.1.post2
sentry-kafka-schemas==0.1.111
sentry-ophio==1.0.0
-sentry-protos==0.1.21
+sentry-protos==0.1.26
sentry-redis-tools==0.1.7
-sentry-relay==0.9.1
-sentry-sdk==2.12.0
+sentry-relay==0.9.2
+sentry-sdk==2.17.0
sentry-usage-accountant==0.0.10
simplejson==3.17.6
six==1.16.0
slack-sdk==3.27.2
sniffio==1.2.0
-snuba-sdk==3.0.39
+snuba-sdk==3.0.43
sortedcontainers==2.4.0
soupsieve==2.3.2.post1
sqlparse==0.5.0
statsd==3.3.0
stripe==3.1.0
structlog==22.1.0
-symbolic==12.10.0
+symbolic==12.12.0
tiktoken==0.6.0
time-machine==2.13.0
tldextract==5.1.2
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 61a113efa780e..cf3b0fbee4de1 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,11 +1,11 @@
--index-url https://pypi.devinfra.sentry.io/simple
-sentry-devenv>=1.10.2
+sentry-devenv>=1.13.0
covdefaults>=2.3.0
docker>=6
time-machine>=2.13.0
-honcho>=1.1.0
+honcho>=2
openapi-core>=0.18.2
openapi-pydantic>=0.4.0
pytest>=8.1
@@ -13,7 +13,7 @@ pytest-cov>=4.0.0
pytest-django>=4.9.0
pytest-fail-slow>=0.3.0
pytest-json-report>=1.5.0
-pytest-rerunfailures>=11
+pytest-rerunfailures>=14
pytest-sentry>=0.3.0
pytest-xdist>=3
responses>=0.23.1
@@ -21,7 +21,7 @@ selenium>=4.16.0
sentry-cli>=2.16.0
# pre-commit dependencies
-pre-commit>=3.3
+pre-commit>=4
black>=22.10.0
flake8>=7
flake8-bugbear>=22.10
@@ -35,8 +35,8 @@ pip-tools>=7.1.0
packaging>=21.3
# for type checking
-sentry-forked-django-stubs>=5.0.4.post2
-sentry-forked-djangorestframework-stubs>=3.15.1.post1
+sentry-forked-django-stubs>=5.1.0.post2
+sentry-forked-djangorestframework-stubs>=3.15.1.post2
lxml-stubs
msgpack-types>=0.2.0
mypy>=1.11.2
diff --git a/requirements-frozen.txt b/requirements-frozen.txt
index b58a5fe92b673..7a34edf710bb2 100644
--- a/requirements-frozen.txt
+++ b/requirements-frozen.txt
@@ -27,7 +27,7 @@ click-plugins==1.1.1
click-repl==0.3.0
confluent-kafka==2.3.0
croniter==1.3.10
-cryptography==42.0.4
+cryptography==43.0.1
cssselect==1.0.3
cssutils==2.9.0
datadog==0.49.1
@@ -63,7 +63,7 @@ h11==0.14.0
hiredis==2.3.2
httpcore==1.0.2
httpx==0.25.2
-idna==2.10
+idna==3.7
inflection==0.5.1
iso3166==2.1.1
isodate==0.6.1
@@ -125,22 +125,22 @@ s3transfer==0.10.0
sentry-arroyo==2.16.5
sentry-kafka-schemas==0.1.111
sentry-ophio==1.0.0
-sentry-protos==0.1.21
+sentry-protos==0.1.26
sentry-redis-tools==0.1.7
-sentry-relay==0.9.1
-sentry-sdk==2.12.0
+sentry-relay==0.9.2
+sentry-sdk==2.17.0
sentry-usage-accountant==0.0.10
simplejson==3.17.6
six==1.16.0
slack-sdk==3.27.2
sniffio==1.3.0
-snuba-sdk==3.0.39
+snuba-sdk==3.0.43
soupsieve==2.3.2.post1
sqlparse==0.5.0
statsd==3.3.0
stripe==3.1.0
structlog==22.1.0
-symbolic==12.10.0
+symbolic==12.12.0
tiktoken==0.6.0
tldextract==5.1.2
toronado==0.1.0
diff --git a/scripts/test.js b/scripts/test.js
index d73f8696933f8..156608c4ceeb1 100644
--- a/scripts/test.js
+++ b/scripts/test.js
@@ -6,9 +6,6 @@ process.env.NODE_ENV = 'test';
process.env.PUBLIC_URL = '';
process.env.TZ = 'America/New_York';
-// Marker to indicate that we've correctly ran with `yarn test`.
-process.env.USING_YARN_TEST = true;
-
// Makes the script crash on unhandled rejections instead of silently
// ignoring them. In the future, promise rejections that are not handled will
// terminate the Node.js process with a non-zero exit code.
diff --git a/setup.cfg b/setup.cfg
index 16372c6641425..fa329dc0dff63 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
[metadata]
name = sentry
-version = 24.10.0.dev0
+version = 24.11.0.dev0
description = A realtime logging and aggregation server.
long_description = file: README.md
long_description_content_type = text/markdown
diff --git a/src/flagpole/conditions.py b/src/flagpole/conditions.py
index ff3e8c7404cd3..ac530ffbeb6de 100644
--- a/src/flagpole/conditions.py
+++ b/src/flagpole/conditions.py
@@ -20,7 +20,7 @@ class ConditionOperatorKind(str, Enum):
"""Provided a single value, check if the property (a list) is not included"""
EQUALS = "equals"
- """Comprare a value to another. Values are compared with types"""
+ """Compare a value to another. Values are compared with types"""
NOT_EQUALS = "not_equals"
"""Compare a value to not be equal to another. Values are compared with types"""
diff --git a/src/sentry/analytics/events/alert_created.py b/src/sentry/analytics/events/alert_created.py
index d9f743dcdac9d..c00ac0b2f4038 100644
--- a/src/sentry/analytics/events/alert_created.py
+++ b/src/sentry/analytics/events/alert_created.py
@@ -15,6 +15,7 @@ class AlertCreatedEvent(analytics.Event):
analytics.Attribute("alert_rule_ui_component", required=False),
analytics.Attribute("duplicate_rule", required=False),
analytics.Attribute("wizard_v3", required=False),
+ analytics.Attribute("query_type", required=False),
)
diff --git a/src/sentry/analytics/events/onboarding_complete.py b/src/sentry/analytics/events/onboarding_complete.py
new file mode 100644
index 0000000000000..59b2924442aa8
--- /dev/null
+++ b/src/sentry/analytics/events/onboarding_complete.py
@@ -0,0 +1,14 @@
+from sentry import analytics
+
+
+class OnboardingCompleteEvent(analytics.Event):
+ type = "onboarding.complete"
+
+ attributes = (
+ analytics.Attribute("user_id"),
+ analytics.Attribute("organization_id"),
+ analytics.Attribute("referrer"),
+ )
+
+
+analytics.register(OnboardingCompleteEvent)
diff --git a/src/sentry/api/analytics.py b/src/sentry/api/analytics.py
index 339c9bc5bc458..e875453f138fe 100644
--- a/src/sentry/api/analytics.py
+++ b/src/sentry/api/analytics.py
@@ -33,6 +33,25 @@ class GroupSimilarIssuesEmbeddingsCountEvent(analytics.Event):
)
+class DevToolbarApiRequestEvent(analytics.Event):
+ type = "devtoolbar.api_request"
+
+ attributes = (
+ analytics.Attribute("view_name"),
+ analytics.Attribute("route"),
+ analytics.Attribute("query_string", required=False),
+ analytics.Attribute("origin", required=False),
+ analytics.Attribute("method"),
+ analytics.Attribute("status_code", type=int),
+ analytics.Attribute("organization_id", type=int, required=False),
+ analytics.Attribute("organization_slug", required=False),
+ analytics.Attribute("project_id", type=int, required=False),
+ analytics.Attribute("project_slug", required=False),
+ analytics.Attribute("user_id", type=int, required=False),
+ )
+
+
analytics.register(OrganizationSavedSearchCreatedEvent)
analytics.register(OrganizationSavedSearchDeletedEvent)
analytics.register(GroupSimilarIssuesEmbeddingsCountEvent)
+analytics.register(DevToolbarApiRequestEvent)
diff --git a/src/sentry/api/api_owners.py b/src/sentry/api/api_owners.py
index da89e1e004e22..60e5d1884f26a 100644
--- a/src/sentry/api/api_owners.py
+++ b/src/sentry/api/api_owners.py
@@ -13,6 +13,7 @@ class ApiOwner(Enum):
ECOSYSTEM = "ecosystem"
ENTERPRISE = "enterprise"
FEEDBACK = "feedback-backend"
+ FLAG = "replay-backend"
HYBRID_CLOUD = "hybrid-cloud"
INTEGRATIONS = "product-owners-settings-integrations"
ISSUES = "issues"
@@ -27,4 +28,3 @@ class ApiOwner(Enum):
TELEMETRY_EXPERIENCE = "telemetry-experience"
UNOWNED = "unowned"
WEB_FRONTEND_SDKS = "team-web-sdk-frontend"
- REMOTE_CONFIG = "replay-backend"
diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py
index 962b733c078b7..52b49e991ea33 100644
--- a/src/sentry/api/base.py
+++ b/src/sentry/api/base.py
@@ -394,7 +394,7 @@ def dispatch(self, request: Request, *args, **kwargs) -> Response:
Identical to rest framework's dispatch except we add the ability
to convert arguments (for common URL params).
"""
- with sentry_sdk.start_span(op="base.dispatch.setup", description=type(self).__name__):
+ with sentry_sdk.start_span(op="base.dispatch.setup", name=type(self).__name__):
self.args = args
self.kwargs = kwargs
request = self.initialize_request(request, *args, **kwargs)
@@ -415,7 +415,7 @@ def dispatch(self, request: Request, *args, **kwargs) -> Response:
origin = None
try:
- with sentry_sdk.start_span(op="base.dispatch.request", description=type(self).__name__):
+ with sentry_sdk.start_span(op="base.dispatch.request", name=type(self).__name__):
if origin:
if request.auth:
allowed_origins = request.auth.get_allowed_origins()
@@ -449,7 +449,7 @@ def dispatch(self, request: Request, *args, **kwargs) -> Response:
with sentry_sdk.start_span(
op="base.dispatch.execute",
- description=".".join(
+ name=".".join(
getattr(part, "__name__", None) or str(part) for part in (type(self), handler)
),
) as span:
@@ -469,7 +469,7 @@ def dispatch(self, request: Request, *args, **kwargs) -> Response:
if duration < (settings.SENTRY_API_RESPONSE_DELAY / 1000.0):
with sentry_sdk.start_span(
op="base.dispatch.sleep",
- description=type(self).__name__,
+ name=type(self).__name__,
) as span:
span.set_data("SENTRY_API_RESPONSE_DELAY", settings.SENTRY_API_RESPONSE_DELAY)
time.sleep(settings.SENTRY_API_RESPONSE_DELAY / 1000.0 - duration)
@@ -556,7 +556,7 @@ def paginate(
cursor = self.get_cursor_from_request(request, cursor_cls)
with sentry_sdk.start_span(
op="base.paginate.get_result",
- description=type(self).__name__,
+ name=type(self).__name__,
) as span:
annotate_span_with_pagination_args(span, per_page)
paginator = get_paginator(paginator, paginator_cls, paginator_kwargs)
@@ -576,7 +576,7 @@ def paginate(
if on_results:
with sentry_sdk.start_span(
op="base.paginate.on_results",
- description=type(self).__name__,
+ name=type(self).__name__,
):
results = on_results(cursor_result.results)
else:
diff --git a/src/sentry/api/bases/__init__.py b/src/sentry/api/bases/__init__.py
index eeb0925009617..bf18019cc99a8 100644
--- a/src/sentry/api/bases/__init__.py
+++ b/src/sentry/api/bases/__init__.py
@@ -4,5 +4,4 @@
from .organizationmember import * # NOQA
from .project import * # NOQA
from .project_transaction_threshold_override import * # NOQA
-from .sentryapps import * # NOQA
from .team import * # NOQA
diff --git a/src/sentry/api/bases/organization_events.py b/src/sentry/api/bases/organization_events.py
index 4ddd5bd2469e5..3ef920d04efbf 100644
--- a/src/sentry/api/bases/organization_events.py
+++ b/src/sentry/api/bases/organization_events.py
@@ -117,7 +117,7 @@ def get_snuba_params(
quantize_date_params: bool = True,
) -> SnubaParams:
"""Returns params to make snuba queries with"""
- with sentry_sdk.start_span(op="discover.endpoint", description="filter_params(dataclass)"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="filter_params(dataclass)"):
if (
len(self.get_field_list(organization, request))
+ len(self.get_equation_list(organization, request))
@@ -317,7 +317,7 @@ def handle_results_with_meta(
standard_meta: bool | None = False,
dataset: Any | None = None,
) -> dict[str, Any]:
- with sentry_sdk.start_span(op="discover.endpoint", description="base.handle_results"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="base.handle_results"):
data = self.handle_data(request, organization, project_ids, results.get("data"))
meta = results.get("meta", {})
fields_meta = meta.get("fields", {})
@@ -424,9 +424,7 @@ def get_event_stats_data(
dataset: Any | None = None,
) -> dict[str, Any]:
with handle_query_errors():
- with sentry_sdk.start_span(
- op="discover.endpoint", description="base.stats_query_creation"
- ):
+ with sentry_sdk.start_span(op="discover.endpoint", name="base.stats_query_creation"):
_columns = [query_column]
# temporary change to make topN query work for multi-axes requests
if additional_query_column is not None:
@@ -466,14 +464,14 @@ def get_event_stats_data(
raise ValidationError("Comparison period is outside your retention window")
query_columns = get_query_columns(columns, rollup)
- with sentry_sdk.start_span(op="discover.endpoint", description="base.stats_query"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="base.stats_query"):
result = get_event_stats(
query_columns, query, snuba_params, rollup, zerofill_results, comparison_delta
)
serializer = SnubaTSResultSerializer(organization, None, request.user)
- with sentry_sdk.start_span(op="discover.endpoint", description="base.stats_serialization"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="base.stats_serialization"):
# When the request is for top_events, result can be a SnubaTSResult in the event that
# there were no top events found. In this case, result contains a zerofilled series
# that acts as a placeholder.
diff --git a/src/sentry/api/bases/sentryapps.py b/src/sentry/api/bases/sentryapps.py
index 242c81858f2f2..b1e5512dce5ed 100644
--- a/src/sentry/api/bases/sentryapps.py
+++ b/src/sentry/api/bases/sentryapps.py
@@ -1,498 +1,13 @@
-from __future__ import annotations
-
-import logging
-from functools import wraps
-from typing import Any
-
-from django.http import Http404
-from rest_framework.exceptions import PermissionDenied
-from rest_framework.permissions import BasePermission
-from rest_framework.request import Request
-from rest_framework.response import Response
-from rest_framework.serializers import ValidationError
-
-from sentry.api.authentication import ClientIdSecretAuthentication
-from sentry.api.base import Endpoint
-from sentry.api.permissions import SentryPermission, StaffPermissionMixin
-from sentry.auth.staff import is_active_staff
-from sentry.auth.superuser import is_active_superuser, superuser_has_permission
-from sentry.coreapi import APIError
-from sentry.integrations.api.bases.integration import PARANOID_GET
-from sentry.middleware.stats import add_request_metric_tags
-from sentry.models.organization import OrganizationStatus
-from sentry.organizations.services.organization import (
- RpcUserOrganizationContext,
- organization_service,
+from sentry.sentry_apps.api.bases.sentryapps import (
+ RegionSentryAppBaseEndpoint,
+ SentryAppBaseEndpoint,
+ SentryAppInstallationBaseEndpoint,
+ SentryAppInstallationsBaseEndpoint,
)
-from sentry.sentry_apps.models.sentry_app import SentryApp
-from sentry.sentry_apps.services.app import RpcSentryApp, app_service
-from sentry.users.services.user import RpcUser
-from sentry.users.services.user.service import user_service
-from sentry.utils.sdk import Scope
-from sentry.utils.strings import to_single_line_str
-
-COMPONENT_TYPES = ["stacktrace-link", "issue-link"]
-
-logger = logging.getLogger(__name__)
-
-
-def catch_raised_errors(func):
- @wraps(func)
- def wrapped(self, *args, **kwargs):
- try:
- return func(self, *args, **kwargs)
- except APIError as e:
- return Response({"detail": e.msg}, status=400)
-
- return wrapped
-
-
-def ensure_scoped_permission(request, allowed_scopes):
- """
- Verifies the User making the request has at least one required scope for
- the endpoint being requested.
-
- If no scopes were specified in a ``scope_map``, it means the endpoint should
- not be accessible. That is, this function expects every accessible endpoint
- to have a list of scopes.
-
- That list of scopes may be empty, implying that the User does not need any
- specific scope and the endpoint is public.
- """
- # If no scopes were found at all, the endpoint should not be accessible.
- if allowed_scopes is None:
- return False
-
- # If there are no scopes listed, it implies a public endpoint.
- if len(allowed_scopes) == 0:
- return True
-
- return any(request.access.has_scope(s) for s in set(allowed_scopes))
-
-
-def add_integration_platform_metric_tag(func):
- @wraps(func)
- def wrapped(self, *args, **kwargs):
- add_request_metric_tags(self.request, integration_platform=True)
- return func(self, *args, **kwargs)
-
- return wrapped
-
-
-class SentryAppsPermission(SentryPermission):
- scope_map = {
- "GET": PARANOID_GET,
- "POST": ("org:write", "org:admin"),
- }
-
- def has_object_permission(self, request: Request, view, context: RpcUserOrganizationContext):
- if not hasattr(request, "user") or not request.user:
- return False
-
- self.determine_access(request, context)
-
- if superuser_has_permission(request):
- return True
-
- # User must be a part of the Org they're trying to create the app in.
- if context.organization.status != OrganizationStatus.ACTIVE or not context.member:
- raise Http404
-
- return ensure_scoped_permission(request, self.scope_map.get(request.method))
-
-
-class SentryAppsAndStaffPermission(StaffPermissionMixin, SentryAppsPermission):
- """Allows staff to access the GET method of sentry apps endpoints."""
-
- staff_allowed_methods = {"GET"}
-
-
-class IntegrationPlatformEndpoint(Endpoint):
- def dispatch(self, request, *args, **kwargs):
- add_request_metric_tags(request, integration_platform=True)
- return super().dispatch(request, *args, **kwargs)
-
-
-class SentryAppsBaseEndpoint(IntegrationPlatformEndpoint):
- permission_classes: tuple[type[BasePermission], ...] = (SentryAppsAndStaffPermission,)
-
- def _get_organization_slug(self, request: Request):
- organization_slug = request.json_body.get("organization")
- if not organization_slug or not isinstance(organization_slug, str):
- error_message = "Please provide a valid value for the 'organization' field."
- raise ValidationError({"organization": to_single_line_str(error_message)})
- return organization_slug
-
- def _get_organization_for_superuser_or_staff(
- self, user: RpcUser, organization_slug: str
- ) -> RpcUserOrganizationContext:
- context = organization_service.get_organization_by_slug(
- slug=organization_slug, only_visible=False, user_id=user.id
- )
-
- if context is None:
- error_message = f"Organization '{organization_slug}' does not exist."
- raise ValidationError({"organization": to_single_line_str(error_message)})
-
- return context
-
- def _get_organization_for_user(
- self, user: RpcUser, organization_slug: str
- ) -> RpcUserOrganizationContext:
- context = organization_service.get_organization_by_slug(
- slug=organization_slug, only_visible=True, user_id=user.id
- )
- if context is None or context.member is None:
- error_message = f"User does not belong to the '{organization_slug}' organization."
- raise PermissionDenied(to_single_line_str(error_message))
- return context
-
- def _get_org_context(self, request: Request) -> RpcUserOrganizationContext:
- organization_slug = self._get_organization_slug(request)
- if is_active_superuser(request) or is_active_staff(request):
- return self._get_organization_for_superuser_or_staff(request.user, organization_slug)
- else:
- return self._get_organization_for_user(request.user, organization_slug)
-
- def convert_args(self, request: Request, *args, **kwargs):
- """
- This baseclass is the SentryApp collection endpoints:
-
- [GET, POST] /sentry-apps
-
- The GET endpoint is public and doesn't require (or handle) any query
- params or request body.
-
- The POST endpoint is for creating a Sentry App. Part of that creation
- is associating it with the Organization that it's created within.
-
- So in the case of POST requests, we want to pull the Organization out
- of the request body so that we can ensure the User making the request
- has access to it.
-
- Since ``convert_args`` is conventionally where you materialize model
- objects from URI params, we're applying the same logic for a param in
- the request body.
- """
- if not request.json_body:
- return (args, kwargs)
-
- context = self._get_org_context(request)
- self.check_object_permissions(request, context)
- kwargs["organization"] = context.organization
-
- return (args, kwargs)
-
-
-class SentryAppPermission(SentryPermission):
- unpublished_scope_map = {
- "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
- "PUT": ("org:write", "org:admin"),
- "POST": ("org:admin",), # used for publishing an app
- "DELETE": ("org:admin",),
- }
-
- published_scope_map = {
- "GET": PARANOID_GET,
- "PUT": ("org:write", "org:admin"),
- "POST": ("org:admin",),
- "DELETE": ("org:admin",),
- }
-
- @property
- def scope_map(self):
- return self.published_scope_map
-
- def has_object_permission(self, request: Request, view, sentry_app: RpcSentryApp | SentryApp):
- if not hasattr(request, "user") or not request.user:
- return False
-
- owner_app = organization_service.get_organization_by_id(
- id=sentry_app.owner_id, user_id=request.user.id
- )
- self.determine_access(request, owner_app)
-
- if superuser_has_permission(request):
- return True
-
- organizations = (
- user_service.get_organizations(user_id=request.user.id)
- if request.user.id is not None
- else ()
- )
- # if app is unpublished, user must be in the Org who owns the app.
- if not sentry_app.is_published:
- if not any(sentry_app.owner_id == org.id for org in organizations):
- raise Http404
-
- # TODO(meredith): make a better way to allow for public
- # endpoints. we can't use ensure_scoped_permission now
- # that the public endpoint isn't denoted by '()'
- if sentry_app.is_published and request.method == "GET":
- return True
-
- return ensure_scoped_permission(
- request, self._scopes_for_sentry_app(sentry_app).get(request.method)
- )
-
- def _scopes_for_sentry_app(self, sentry_app):
- if sentry_app.is_published:
- return self.published_scope_map
- else:
- return self.unpublished_scope_map
-
-
-class SentryAppAndStaffPermission(StaffPermissionMixin, SentryAppPermission):
- """Allows staff to access sentry app endpoints. Note that this is used for
- endpoints acting on a single sentry app only."""
-
- pass
-
-
-class SentryAppBaseEndpoint(IntegrationPlatformEndpoint):
- permission_classes: tuple[type[BasePermission], ...] = (SentryAppPermission,)
- def convert_args(
- self, request: Request, sentry_app_id_or_slug: int | str, *args: Any, **kwargs: Any
- ):
- try:
- sentry_app = SentryApp.objects.get(slug__id_or_slug=sentry_app_id_or_slug)
- except SentryApp.DoesNotExist:
- raise Http404
-
- self.check_object_permissions(request, sentry_app)
-
- Scope.get_isolation_scope().set_tag("sentry_app", sentry_app.slug)
-
- kwargs["sentry_app"] = sentry_app
- return (args, kwargs)
-
-
-class RegionSentryAppBaseEndpoint(IntegrationPlatformEndpoint):
- def convert_args(
- self, request: Request, sentry_app_id_or_slug: int | str, *args: Any, **kwargs: Any
- ):
- if str(sentry_app_id_or_slug).isdecimal():
- sentry_app = app_service.get_sentry_app_by_id(id=int(sentry_app_id_or_slug))
- else:
- sentry_app = app_service.get_sentry_app_by_slug(slug=sentry_app_id_or_slug)
- if sentry_app is None:
- raise Http404
-
- self.check_object_permissions(request, sentry_app)
-
- Scope.get_isolation_scope().set_tag("sentry_app", sentry_app.slug)
-
- kwargs["sentry_app"] = sentry_app
- return (args, kwargs)
-
-
-class SentryAppInstallationsPermission(SentryPermission):
- scope_map = {
- "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
- "POST": ("org:integrations", "org:write", "org:admin"),
- }
-
- def has_object_permission(self, request: Request, view, organization):
- if not hasattr(request, "user") or not request.user:
- return False
-
- self.determine_access(request, organization)
-
- if superuser_has_permission(request):
- return True
-
- organizations = (
- user_service.get_organizations(user_id=request.user.id)
- if request.user.id is not None
- else ()
- )
- if not any(organization.id == org.id for org in organizations):
- raise Http404
-
- return ensure_scoped_permission(request, self.scope_map.get(request.method))
-
-
-class SentryAppInstallationsBaseEndpoint(IntegrationPlatformEndpoint):
- permission_classes = (SentryAppInstallationsPermission,)
-
- def convert_args(self, request: Request, organization_id_or_slug, *args, **kwargs):
- extra_args = {}
- # We need to pass user_id if the user is not a superuser
- if not is_active_superuser(request):
- extra_args["user_id"] = request.user.id
-
- if str(organization_id_or_slug).isdecimal():
- organization = organization_service.get_org_by_id(
- id=int(organization_id_or_slug), **extra_args
- )
- else:
- organization = organization_service.get_org_by_slug(
- slug=str(organization_id_or_slug), **extra_args
- )
-
- if organization is None:
- raise Http404
- self.check_object_permissions(request, organization)
-
- kwargs["organization"] = organization
- return (args, kwargs)
-
-
-class SentryAppInstallationPermission(SentryPermission):
- scope_map = {
- "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
- "DELETE": ("org:integrations", "org:write", "org:admin"),
- # NOTE(mn): The only POST endpoint right now is to create External
- # Issues, which uses this baseclass since it's nested under an
- # installation.
- #
- # The scopes below really only make sense for that endpoint. Any other
- # nested endpoints will probably need different scopes - figure out how
- # to deal with that when it happens.
- "POST": ("org:integrations", "event:write", "event:admin"),
- }
-
- def has_permission(self, request: Request, *args, **kwargs):
- # To let the app mark the installation as installed, we don't care about permissions
- if (
- hasattr(request, "user")
- and hasattr(request.user, "is_sentry_app")
- and request.user.is_sentry_app
- and request.method == "PUT"
- ):
- return True
- return super().has_permission(request, *args, **kwargs)
-
- def has_object_permission(self, request: Request, view, installation):
- if not hasattr(request, "user") or not request.user:
- return False
-
- self.determine_access(request, installation.organization_id)
-
- if superuser_has_permission(request):
- return True
-
- # if user is an app, make sure it's for that same app
- if request.user.is_sentry_app:
- return request.user.id == installation.sentry_app.proxy_user_id
-
- org_context = organization_service.get_organization_by_id(
- id=installation.organization_id,
- user_id=request.user.id,
- include_teams=False,
- include_projects=False,
- )
- if (
- org_context.member is None
- or org_context.organization.status != OrganizationStatus.ACTIVE
- ):
- raise Http404
-
- return ensure_scoped_permission(request, self.scope_map.get(request.method))
-
-
-class SentryAppInstallationBaseEndpoint(IntegrationPlatformEndpoint):
- permission_classes: tuple[type[BasePermission], ...] = (SentryAppInstallationPermission,)
-
- def convert_args(self, request: Request, uuid, *args, **kwargs):
- installations = app_service.get_many(filter=dict(uuids=[uuid]))
- installation = installations[0] if installations else None
- if installation is None:
- raise Http404
-
- self.check_object_permissions(request, installation)
-
- Scope.get_isolation_scope().set_tag("sentry_app_installation", installation.uuid)
-
- kwargs["installation"] = installation
- return (args, kwargs)
-
-
-class SentryAppInstallationExternalIssuePermission(SentryAppInstallationPermission):
- scope_map = {
- "POST": ("event:read", "event:write", "event:admin"),
- "DELETE": ("event:admin",),
- }
-
-
-class SentryAppInstallationExternalIssueBaseEndpoint(SentryAppInstallationBaseEndpoint):
- permission_classes = (SentryAppInstallationExternalIssuePermission,)
-
-
-class SentryAppAuthorizationsPermission(SentryPermission):
- def has_object_permission(self, request: Request, view, installation):
- if not hasattr(request, "user") or not request.user:
- return False
-
- installation_org_context = organization_service.get_organization_by_id(
- id=installation.organization_id, user_id=request.user.id
- )
- self.determine_access(request, installation_org_context)
-
- if not request.user.is_sentry_app:
- return False
-
- # Request must be made as the app's Proxy User, using their Client ID
- # and Secret.
- return request.user.id == installation.sentry_app.proxy_user_id
-
-
-class SentryAppAuthorizationsBaseEndpoint(SentryAppInstallationBaseEndpoint):
- authentication_classes = (ClientIdSecretAuthentication,)
- permission_classes = (SentryAppAuthorizationsPermission,)
-
-
-class SentryInternalAppTokenPermission(SentryPermission):
- scope_map = {
- "GET": ("org:write", "org:admin"),
- "POST": ("org:write", "org:admin"),
- "DELETE": ("org:write", "org:admin"),
- }
-
- def has_object_permission(self, request: Request, view, sentry_app):
- if not hasattr(request, "user") or not request.user:
- return False
-
- owner_app = organization_service.get_organization_by_id(
- id=sentry_app.owner_id, user_id=request.user.id
- )
- self.determine_access(request, owner_app)
-
- if superuser_has_permission(request):
- return True
-
- return ensure_scoped_permission(request, self.scope_map.get(request.method))
-
-
-class SentryAppStatsPermission(SentryPermission):
- scope_map = {
- "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
- # Anyone logged in can increment the stats, so leave the scopes empty
- # Note: this only works for session-based auth so you cannot increment stats through API
- "POST": (),
- }
-
- def has_object_permission(self, request: Request, view, sentry_app: SentryApp | RpcSentryApp):
- if not hasattr(request, "user") or not request.user:
- return False
-
- owner_app = organization_service.get_organization_by_id(
- id=sentry_app.owner_id, user_id=request.user.id
- )
- if owner_app is None:
- logger.error(
- "sentry_app_stats.permission_org_not_found",
- extra={
- "sentry_app_id": sentry_app.id,
- "owner_org_id": sentry_app.owner_id,
- "user_id": request.user.id,
- },
- )
- return False
- self.determine_access(request, owner_app)
-
- if is_active_superuser(request):
- return True
-
- return ensure_scoped_permission(request, self.scope_map.get(request.method))
+__all__ = (
+ "SentryAppBaseEndpoint",
+ "RegionSentryAppBaseEndpoint",
+ "SentryAppInstallationBaseEndpoint",
+ "SentryAppInstallationsBaseEndpoint",
+)
diff --git a/src/sentry/api/decorators.py b/src/sentry/api/decorators.py
index 921b6953ac952..ef28203d67841 100644
--- a/src/sentry/api/decorators.py
+++ b/src/sentry/api/decorators.py
@@ -19,11 +19,7 @@ def is_considered_sudo(request: Request) -> bool:
or is_api_key_auth(request.auth)
or is_api_token_auth(request.auth)
or is_org_auth_token_auth(request.auth)
- or (
- request.user.is_authenticated
- and not isinstance(request.user, AnonymousUser)
- and not request.user.has_usable_password()
- )
+ or (request.user.is_authenticated and not request.user.has_usable_password())
)
diff --git a/src/sentry/api/endpoints/admin_project_configs.py b/src/sentry/api/endpoints/admin_project_configs.py
index a33b9a6d5a259..3b46ff73c17ac 100644
--- a/src/sentry/api/endpoints/admin_project_configs.py
+++ b/src/sentry/api/endpoints/admin_project_configs.py
@@ -46,7 +46,7 @@ def get(self, request: Request) -> Response:
else:
configs[key] = None
- # TODO if we don't think we'll add anything to the endpoint
+ # TODO: if we don't think we'll add anything to the endpoint
# we may as well return just the configs
return Response({"configs": configs}, status=200)
diff --git a/src/sentry/api/endpoints/api_application_details.py b/src/sentry/api/endpoints/api_application_details.py
index 6eae213e37d30..73f6dadeeba95 100644
--- a/src/sentry/api/endpoints/api_application_details.py
+++ b/src/sentry/api/endpoints/api_application_details.py
@@ -10,8 +10,8 @@
from sentry.api.base import Endpoint, control_silo_endpoint
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.serializers import serialize
+from sentry.deletions.models.scheduleddeletion import ScheduledDeletion
from sentry.models.apiapplication import ApiApplication, ApiApplicationStatus
-from sentry.models.scheduledeletion import ScheduledDeletion
class ApiApplicationSerializer(serializers.Serializer):
diff --git a/src/sentry/api/endpoints/debug_files.py b/src/sentry/api/endpoints/debug_files.py
index e27fd4f2efe3f..f623e71a8e865 100644
--- a/src/sentry/api/endpoints/debug_files.py
+++ b/src/sentry/api/endpoints/debug_files.py
@@ -15,7 +15,7 @@
from symbolic.debuginfo import normalize_debug_id
from symbolic.exceptions import SymbolicError
-from sentry import ratelimits, roles
+from sentry import ratelimits
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
@@ -39,6 +39,7 @@
from sentry.models.project import Project
from sentry.models.release import Release, get_artifact_counts
from sentry.models.releasefile import ReleaseFile
+from sentry.roles import organization_roles
from sentry.tasks.assemble import (
AssembleTask,
ChunkFileState,
@@ -53,7 +54,7 @@
_release_suffix = re.compile(r"^(.*)\s+\(([^)]+)\)\s*$")
-def upload_from_request(request, project):
+def upload_from_request(request: Request, project: Project):
if "file" not in request.data:
return Response({"detail": "Missing uploaded file"}, status=400)
fileobj = request.data["file"]
@@ -61,7 +62,7 @@ def upload_from_request(request, project):
return Response(serialize(files, request.user), status=201)
-def has_download_permission(request, project):
+def has_download_permission(request: Request, project: Project):
if is_system_auth(request.auth) or is_active_superuser(request):
return True
@@ -72,7 +73,7 @@ def has_download_permission(request, project):
required_role = organization.get_option("sentry:debug_files_role") or DEBUG_FILES_ROLE_DEFAULT
if request.user.is_sentry_app:
- if roles.get(required_role).priority > roles.get("member").priority:
+ if organization_roles.can_manage("member", required_role):
return request.access.has_scope("project:write")
else:
return request.access.has_scope("project:read")
@@ -86,7 +87,12 @@ def has_download_permission(request, project):
except OrganizationMember.DoesNotExist:
return False
- return roles.get(current_role).priority >= roles.get(required_role).priority
+ if organization_roles.can_manage(current_role, required_role):
+ return True
+
+ # There's an edge case where a team admin is an org member but the required
+ # role is org admin. In that case, the team admin should be able to download.
+ return required_role == "admin" and request.access.has_project_scope(project, "project:write")
def _has_delete_permission(access: Access, project: Project) -> bool:
@@ -104,7 +110,7 @@ class ProguardArtifactReleasesEndpoint(ProjectEndpoint):
}
permission_classes = (ProjectReleasePermission,)
- def post(self, request: Request, project) -> Response:
+ def post(self, request: Request, project: Project) -> Response:
release_name = request.data.get("release_name")
proguard_uuid = request.data.get("proguard_uuid")
@@ -153,7 +159,7 @@ def post(self, request: Request, project) -> Response:
status=status.HTTP_409_CONFLICT,
)
- def get(self, request: Request, project) -> Response:
+ def get(self, request: Request, project: Project) -> Response:
"""
List a Project's Proguard Associated Releases
````````````````````````````````````````
@@ -189,7 +195,7 @@ class DebugFilesEndpoint(ProjectEndpoint):
}
permission_classes = (ProjectReleasePermission,)
- def download(self, debug_file_id, project):
+ def download(self, debug_file_id, project: Project):
rate_limited = ratelimits.backend.is_limited(
project=project,
key=f"rl:DSymFilesEndpoint:download:{debug_file_id}:{project.id}",
@@ -223,7 +229,7 @@ def download(self, debug_file_id, project):
except OSError:
raise Http404
- def get(self, request: Request, project) -> Response:
+ def get(self, request: Request, project: Project) -> Response:
"""
List a Project's Debug Information Files
````````````````````````````````````````
@@ -240,7 +246,7 @@ def get(self, request: Request, project) -> Response:
:auth: required
"""
download_requested = request.GET.get("id") is not None
- if download_requested and (has_download_permission(request, project)):
+ if download_requested and has_download_permission(request, project):
return self.download(request.GET.get("id"), project)
elif download_requested:
return Response(status=403)
@@ -335,7 +341,7 @@ def delete(self, request: Request, project: Project) -> Response:
return Response(status=404)
- def post(self, request: Request, project) -> Response:
+ def post(self, request: Request, project: Project) -> Response:
"""
Upload a New File
`````````````````
@@ -367,7 +373,7 @@ class UnknownDebugFilesEndpoint(ProjectEndpoint):
}
permission_classes = (ProjectReleasePermission,)
- def get(self, request: Request, project) -> Response:
+ def get(self, request: Request, project: Project) -> Response:
checksums = request.GET.getlist("checksums")
missing = ProjectDebugFile.objects.find_missing(checksums, project=project)
return Response({"missing": missing})
@@ -382,7 +388,7 @@ class AssociateDSymFilesEndpoint(ProjectEndpoint):
permission_classes = (ProjectReleasePermission,)
# Legacy endpoint, kept for backwards compatibility
- def post(self, request: Request, project) -> Response:
+ def post(self, request: Request, project: Project) -> Response:
return Response({"associatedDsymFiles": []})
@@ -394,7 +400,7 @@ class DifAssembleEndpoint(ProjectEndpoint):
}
permission_classes = (ProjectReleasePermission,)
- def post(self, request: Request, project) -> Response:
+ def post(self, request: Request, project: Project) -> Response:
"""
Assemble one or multiple chunks (FileBlob) into debug files
````````````````````````````````````````````````````````````
@@ -517,7 +523,7 @@ class SourceMapsEndpoint(ProjectEndpoint):
}
permission_classes = (ProjectReleasePermission,)
- def get(self, request: Request, project) -> Response:
+ def get(self, request: Request, project: Project) -> Response:
"""
List a Project's Source Map Archives
````````````````````````````````````
@@ -549,7 +555,7 @@ def get(self, request: Request, project) -> Response:
queryset = queryset.filter(query_q)
- def expose_release(release, count):
+ def expose_release(release, count: int):
return {
"type": "release",
"id": release["id"],
@@ -581,7 +587,7 @@ def serialize_results(results):
on_results=serialize_results,
)
- def delete(self, request: Request, project) -> Response:
+ def delete(self, request: Request, project: Project) -> Response:
"""
Delete an Archive
```````````````````````````````````````````````````
diff --git a/src/sentry/api/endpoints/event_ai_suggested_fix.py b/src/sentry/api/endpoints/event_ai_suggested_fix.py
index d5d32e1b3c9f8..b856ecf96bca4 100644
--- a/src/sentry/api/endpoints/event_ai_suggested_fix.py
+++ b/src/sentry/api/endpoints/event_ai_suggested_fix.py
@@ -297,8 +297,6 @@ class EventAiSuggestedFixEndpoint(ProjectEndpoint):
publish_status = {
"GET": ApiPublishStatus.PRIVATE,
}
- # go away
- private = True
enforce_rate_limit = True
rate_limits = {
"GET": {
diff --git a/src/sentry/api/endpoints/group_ai_autofix.py b/src/sentry/api/endpoints/group_ai_autofix.py
index 7f2835bbeca01..74a1a07e81677 100644
--- a/src/sentry/api/endpoints/group_ai_autofix.py
+++ b/src/sentry/api/endpoints/group_ai_autofix.py
@@ -39,8 +39,6 @@ class GroupAutofixEndpoint(GroupEndpoint):
"GET": ApiPublishStatus.EXPERIMENTAL,
}
owner = ApiOwner.ML_AI
- # go away
- private = True
enforce_rate_limit = True
rate_limits = {
"POST": {
@@ -87,6 +85,7 @@ def _call_autofix(
serialized_event: dict[str, Any],
instruction: str,
timeout_secs: int,
+ pr_to_comment_on_url: str | None = None,
):
path = "/v1/automation/autofix/start"
body = orjson.dumps(
@@ -116,7 +115,8 @@ def _call_autofix(
"organizations:autofix-disable-codebase-indexing",
group.organization,
actor=user,
- )
+ ),
+ "comment_on_pr_with_url": pr_to_comment_on_url,
},
},
option=orjson.OPT_NON_STR_KEYS,
@@ -191,6 +191,7 @@ def post(self, request: Request, group: Group) -> Response:
serialized_event,
data.get("instruction", data.get("additional_context", "")),
TIMEOUT_SECONDS,
+ data.get("pr_to_comment_on_url", None), # support optional PR id for copilot
)
except Exception as e:
logger.exception(
diff --git a/src/sentry/api/endpoints/group_ai_summary.py b/src/sentry/api/endpoints/group_ai_summary.py
index 350d6054feaea..43660b92ccb02 100644
--- a/src/sentry/api/endpoints/group_ai_summary.py
+++ b/src/sentry/api/endpoints/group_ai_summary.py
@@ -44,7 +44,6 @@ class GroupAiSummaryEndpoint(GroupEndpoint):
"POST": ApiPublishStatus.EXPERIMENTAL,
}
owner = ApiOwner.ML_AI
- private = True
enforce_rate_limit = True
rate_limits = {
"POST": {
diff --git a/src/sentry/api/endpoints/group_autofix_setup_check.py b/src/sentry/api/endpoints/group_autofix_setup_check.py
index a7d23cadba360..6f0d2e05a5347 100644
--- a/src/sentry/api/endpoints/group_autofix_setup_check.py
+++ b/src/sentry/api/endpoints/group_autofix_setup_check.py
@@ -17,6 +17,7 @@
get_project_codebase_indexing_status,
)
from sentry.autofix.utils import get_autofix_repos_from_project_code_mappings
+from sentry.constants import ObjectStatus
from sentry.integrations.services.integration import integration_service
from sentry.integrations.utils.code_mapping import get_sorted_code_mapping_configs
from sentry.models.group import Group
@@ -44,7 +45,7 @@ def get_autofix_integration_setup_problems(
organization_integration = organization_integrations[0] if organization_integrations else None
integration = organization_integration and integration_service.get_integration(
- organization_integration_id=organization_integration.id
+ organization_integration_id=organization_integration.id, status=ObjectStatus.ACTIVE
)
installation = integration and integration.get_installation(organization_id=organization.id)
@@ -102,7 +103,6 @@ class GroupAutofixSetupCheck(GroupEndpoint):
"GET": ApiPublishStatus.EXPERIMENTAL,
}
owner = ApiOwner.ML_AI
- private = True
def get(self, request: Request, group: Group) -> Response:
"""
diff --git a/src/sentry/api/endpoints/group_autofix_update.py b/src/sentry/api/endpoints/group_autofix_update.py
index 89545a207e6a8..60906134faad6 100644
--- a/src/sentry/api/endpoints/group_autofix_update.py
+++ b/src/sentry/api/endpoints/group_autofix_update.py
@@ -26,7 +26,6 @@ class GroupAutofixUpdateEndpoint(GroupEndpoint):
"POST": ApiPublishStatus.EXPERIMENTAL,
}
owner = ApiOwner.ML_AI
- private = True
def post(self, request: Request, group: Group) -> Response:
"""
diff --git a/src/sentry/api/endpoints/group_external_issue_details.py b/src/sentry/api/endpoints/group_external_issue_details.py
index 13a730a620fcd..1e04086f98319 100644
--- a/src/sentry/api/endpoints/group_external_issue_details.py
+++ b/src/sentry/api/endpoints/group_external_issue_details.py
@@ -5,7 +5,7 @@
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.group import GroupEndpoint
-from sentry.models.platformexternalissue import PlatformExternalIssue
+from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
@region_silo_endpoint
diff --git a/src/sentry/api/endpoints/group_external_issues.py b/src/sentry/api/endpoints/group_external_issues.py
index b1c812c432d81..71ecceb05b00b 100644
--- a/src/sentry/api/endpoints/group_external_issues.py
+++ b/src/sentry/api/endpoints/group_external_issues.py
@@ -5,7 +5,7 @@
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.group import GroupEndpoint
from sentry.api.serializers import serialize
-from sentry.models.platformexternalissue import PlatformExternalIssue
+from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
@region_silo_endpoint
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/__init__.py b/src/sentry/api/endpoints/integrations/sentry_apps/__init__.py
index a3654763c8be0..7f0a6d8ce27d9 100644
--- a/src/sentry/api/endpoints/integrations/sentry_apps/__init__.py
+++ b/src/sentry/api/endpoints/integrations/sentry_apps/__init__.py
@@ -1,11 +1,5 @@
-from .internal_app_token.details import SentryInternalAppTokenDetailsEndpoint
-from .internal_app_token.index import SentryInternalAppTokensEndpoint
-from .stats.details import SentryAppStatsEndpoint
-from .stats.index import SentryAppsStatsEndpoint
-
-__all__ = (
- "SentryAppsStatsEndpoint",
- "SentryAppStatsEndpoint",
- "SentryInternalAppTokenDetailsEndpoint",
- "SentryInternalAppTokensEndpoint",
+from sentry.sentry_apps.api.endpoints.sentry_internal_app_token_details import (
+ SentryInternalAppTokenDetailsEndpoint,
)
+
+__all__ = ("SentryInternalAppTokenDetailsEndpoint",)
diff --git a/src/sentry/api/endpoints/internal/feature_flags.py b/src/sentry/api/endpoints/internal/feature_flags.py
index f1917b398f802..04775415f41a3 100644
--- a/src/sentry/api/endpoints/internal/feature_flags.py
+++ b/src/sentry/api/endpoints/internal/feature_flags.py
@@ -46,7 +46,7 @@ def put(self, request: Request) -> Response:
for valid_flag in valid_feature_flags:
match_found = False
new_string = (
- f'\nSENTRY_FEATURES["{valid_flag}"]={request.data.get(valid_flag,False)}\n'
+ f'\nSENTRY_FEATURES["{valid_flag}"]={request.data.get(valid_flag, False)}\n'
)
# Search for the string match and update lines
for i, line in enumerate(lines):
diff --git a/src/sentry/api/endpoints/notification_defaults.py b/src/sentry/api/endpoints/notification_defaults.py
index 63a3baf3019ac..2a9684f9143b1 100644
--- a/src/sentry/api/endpoints/notification_defaults.py
+++ b/src/sentry/api/endpoints/notification_defaults.py
@@ -17,7 +17,6 @@ class NotificationDefaultsEndpoints(Endpoint):
}
owner = ApiOwner.ALERTS_NOTIFICATIONS
permission_classes = ()
- private = True
def get(self, request: Request) -> Response:
"""
diff --git a/src/sentry/api/endpoints/organization_access_request_details.py b/src/sentry/api/endpoints/organization_access_request_details.py
index 4dfd8c1dda50b..06e89a732f592 100644
--- a/src/sentry/api/endpoints/organization_access_request_details.py
+++ b/src/sentry/api/endpoints/organization_access_request_details.py
@@ -1,3 +1,5 @@
+import logging
+
from django.db import IntegrityError, router, transaction
from rest_framework import serializers
from rest_framework.request import Request
@@ -11,8 +13,11 @@
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.serializers import serialize
from sentry.models.organizationaccessrequest import OrganizationAccessRequest
+from sentry.models.organizationmember import OrganizationMember
from sentry.models.organizationmemberteam import OrganizationMemberTeam
+logger = logging.getLogger(__name__)
+
class AccessRequestPermission(OrganizationPermission):
scope_map = {
@@ -71,8 +76,8 @@ def _can_access(self, request: Request, access_request):
def get(self, request: Request, organization) -> Response:
"""
- Get list of requests to join org/team
-
+ Get a list of requests to join org/team.
+ If any requests are redundant (user already joined the team), they are not returned.
"""
if request.access.has_scope("org:write"):
access_requests = list(
@@ -80,7 +85,7 @@ def get(self, request: Request, organization) -> Response:
team__organization=organization,
member__user_is_active=True,
member__user_id__isnull=False,
- ).select_related("team")
+ ).select_related("team", "member")
)
elif request.access.has_scope("team:write") and request.access.team_ids_with_membership:
@@ -89,20 +94,28 @@ def get(self, request: Request, organization) -> Response:
member__user_is_active=True,
member__user_id__isnull=False,
team__id__in=request.access.team_ids_with_membership,
- ).select_related("team")
+ ).select_related("team", "member")
)
else:
# Return empty response if user does not have access
return Response([])
- return Response(serialize(access_requests, request.user))
+ teams_by_user = OrganizationMember.objects.get_teams_by_user(organization=organization)
+
+ # We omit any requests which are now redundant (i.e. the user joined that team some other way)
+ valid_access_requests = [
+ access_request
+ for access_request in access_requests
+ if access_request.member.user_id is not None
+ and access_request.team_id not in teams_by_user[access_request.member.user_id]
+ ]
+
+ return Response(serialize(valid_access_requests, request.user))
def put(self, request: Request, organization, request_id) -> Response:
"""
Approve or deny a request
- Approve or deny a request.
-
{method} {path}
"""
diff --git a/src/sentry/api/endpoints/organization_dashboards.py b/src/sentry/api/endpoints/organization_dashboards.py
index 481c528d32065..5cc723e6a8a49 100644
--- a/src/sentry/api/endpoints/organization_dashboards.py
+++ b/src/sentry/api/endpoints/organization_dashboards.py
@@ -50,9 +50,25 @@ def has_object_permission(self, request: Request, view, obj):
return super().has_object_permission(request, view, obj)
if isinstance(obj, Dashboard):
- for project in obj.projects.all():
- if not request.access.has_project_access(project):
- return False
+ # 1. Dashboard contains certain projects
+ if obj.projects.exists():
+ return request.access.has_projects_access(obj.projects.all())
+
+ # 2. Dashboard covers all projects or all my projects
+
+ # allow when Open Membership
+ if obj.organization.flags.allow_joinleave:
+ return True
+
+ # allow for Managers and Owners
+ if request.access.has_scope("org:write"):
+ return True
+
+ # allow for creator
+ if request.user.id == obj.created_by_id:
+ return True
+
+ return False
return True
diff --git a/src/sentry/api/endpoints/organization_details.py b/src/sentry/api/endpoints/organization_details.py
index fb412016f4cf1..258f3202a386f 100644
--- a/src/sentry/api/endpoints/organization_details.py
+++ b/src/sentry/api/endpoints/organization_details.py
@@ -62,9 +62,11 @@
SAFE_FIELDS_DEFAULT,
SCRAPE_JAVASCRIPT_DEFAULT,
SENSITIVE_FIELDS_DEFAULT,
+ TARGET_SAMPLE_RATE_DEFAULT,
UPTIME_AUTODETECTION,
)
from sentry.datascrubbing import validate_pii_config_update, validate_pii_selectors
+from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.hybridcloud.rpc import IDEMPOTENCY_KEY_LENGTH
from sentry.integrations.utils.codecov import has_codecov_integration
from sentry.lang.native.utils import (
@@ -75,7 +77,6 @@
from sentry.models.avatars.organization_avatar import OrganizationAvatar
from sentry.models.options.organization_option import OrganizationOption
from sentry.models.organization import Organization, OrganizationStatus
-from sentry.models.scheduledeletion import RegionScheduledDeletion
from sentry.organizations.services.organization import organization_service
from sentry.organizations.services.organization.model import (
RpcOrganization,
@@ -215,6 +216,7 @@
METRICS_ACTIVATE_LAST_FOR_GAUGES_DEFAULT,
),
("uptimeAutodetection", "sentry:uptime_autodetection", bool, UPTIME_AUTODETECTION),
+ ("targetSampleRate", "sentry:target_sample_rate", float, TARGET_SAMPLE_RATE_DEFAULT),
)
DELETION_STATUSES = frozenset(
@@ -276,6 +278,7 @@ class OrganizationSerializer(BaseOrganizationSerializer):
relayPiiConfig = serializers.CharField(required=False, allow_blank=True, allow_null=True)
apdexThreshold = serializers.IntegerField(min_value=1, required=False)
uptimeAutodetection = serializers.BooleanField(required=False)
+ targetSampleRate = serializers.FloatField(required=False)
@cached_property
def _has_legacy_rate_limits(self):
@@ -365,6 +368,25 @@ def validate_projectRateLimit(self, value):
)
return value
+ def validate_targetSampleRate(self, value):
+ from sentry import features
+
+ organization = self.context["organization"]
+ request = self.context["request"]
+ has_dynamic_sampling_custom = features.has(
+ "organizations:dynamic-sampling-custom", organization, actor=request.user
+ )
+ if not has_dynamic_sampling_custom:
+ raise serializers.ValidationError(
+ "Organization does not have the custom dynamic sample rate feature enabled."
+ )
+
+ if not 0.0 <= value <= 1.0:
+ raise serializers.ValidationError(
+ "The targetSampleRate option must be in the range [0:1]"
+ )
+ return value
+
def validate(self, attrs):
attrs = super().validate(attrs)
if attrs.get("avatarType") == "upload":
diff --git a/src/sentry/api/endpoints/organization_environments.py b/src/sentry/api/endpoints/organization_environments.py
index 2b7e261415597..2176808a485e4 100644
--- a/src/sentry/api/endpoints/organization_environments.py
+++ b/src/sentry/api/endpoints/organization_environments.py
@@ -1,5 +1,3 @@
-from typing import TypedDict
-
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.response import Response
@@ -10,6 +8,7 @@
from sentry.api.bases import OrganizationEndpoint
from sentry.api.helpers.environments import environment_visibility_filter_options
from sentry.api.serializers import serialize
+from sentry.api.serializers.models.environment import EnvironmentSerializerResponse
from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN
from sentry.apidocs.examples.environment_examples import EnvironmentExamples
from sentry.apidocs.parameters import EnvironmentParams, GlobalParams
@@ -17,11 +16,6 @@
from sentry.models.environment import Environment, EnvironmentProject
-class OrganizationEnvironmentResponseType(TypedDict):
- id: int
- name: str
-
-
@extend_schema(tags=["Environments"])
@region_silo_endpoint
class OrganizationEnvironmentsEndpoint(OrganizationEndpoint):
@@ -35,7 +29,7 @@ class OrganizationEnvironmentsEndpoint(OrganizationEndpoint):
parameters=[GlobalParams.ORG_ID_OR_SLUG, EnvironmentParams.VISIBILITY],
responses={
200: inline_sentry_response_serializer(
- "OrganizationEnvironmentResponse", list[OrganizationEnvironmentResponseType]
+ "OrganizationEnvironmentResponse", list[EnvironmentSerializerResponse]
),
400: RESPONSE_BAD_REQUEST,
403: RESPONSE_FORBIDDEN,
diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py
index 672c4441708ea..3930d2130239f 100644
--- a/src/sentry/api/endpoints/organization_events.py
+++ b/src/sentry/api/endpoints/organization_events.py
@@ -59,13 +59,36 @@ class DiscoverDatasetSplitException(Exception):
Referrer.API_DASHBOARDS_BIGNUMBERWIDGET.value,
Referrer.API_DISCOVER_TRANSACTIONS_LIST.value,
Referrer.API_DISCOVER_QUERY_TABLE.value,
+ Referrer.API_PERFORMANCE_BROWSER_RESOURCE_MAIN_TABLE.value,
+ Referrer.API_PERFORMANCE_BROWSER_RESOURCES_PAGE_SELECTOR.value,
+ Referrer.API_PERFORMANCE_BROWSER_WEB_VITALS_PROJECT.value,
+ Referrer.API_PERFORMANCE_BROWSER_WEB_VITALS_PROJECT_SCORES.value,
+ Referrer.API_PERFORMANCE_BROWSER_WEB_VITALS_TRANSACTION.value,
+ Referrer.API_PERFORMANCE_BROWSER_WEB_VITALS_TRANSACTIONS_SCORES.value,
+ Referrer.API_PERFORMANCE_CACHE_LANDING_CACHE_TRANSACTION_LIST.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_APDEX_AREA.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_HIGHEST_CACHE_MISS_RATE_TRANSACTIONS.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_FROZEN_FRAMES.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_SLOW_FRAMES.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_TIME_CONSUMING_DOMAINS.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_TIME_CONSUMING_RESOURCES.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_TIME_SPENT_DB_QUERIES.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_DB_OPS.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_HTTP_OPS.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_RESOURCE_OPS.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_SCREENS_BY_TTID.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_TPM_AREA.value,
+ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_USER_MISERY_AREA.value,
Referrer.API_PERFORMANCE_VITALS_CARDS.value,
Referrer.API_PERFORMANCE_LANDING_TABLE.value,
- Referrer.API_PERFORMANCE_TRANSACTION_SUMMARY.value,
+ Referrer.API_PERFORMANCE_TRANSACTION_EVENTS.value,
+ Referrer.API_PERFORMANCE_TRANSACTION_NAME_SEARCH_BAR.value,
Referrer.API_PERFORMANCE_TRANSACTION_SPANS.value,
+ Referrer.API_PERFORMANCE_TRANSACTION_SUMMARY.value,
Referrer.API_PERFORMANCE_STATUS_BREAKDOWN.value,
Referrer.API_PERFORMANCE_VITAL_DETAIL.value,
Referrer.API_PERFORMANCE_DURATIONPERCENTILECHART.value,
+ Referrer.API_PERFORMANCE_TRACE_TRACE_DRAWER_TRANSACTION_CACHE_METRICS.value,
Referrer.API_PERFORMANCE_TRANSACTIONS_STATISTICAL_DETECTOR_ROOT_CAUSE_ANALYSIS.value,
Referrer.API_PROFILING_LANDING_TABLE.value,
Referrer.API_PROFILING_LANDING_FUNCTIONS_CARD.value,
@@ -83,12 +106,15 @@ class DiscoverDatasetSplitException(Exception):
Referrer.API_TRACE_VIEW_ERRORS_VIEW.value,
Referrer.API_TRACE_VIEW_HOVER_CARD.value,
Referrer.API_ISSUES_ISSUE_EVENTS.value,
+ Referrer.API_STARFISH_DATABASE_SYSTEM_SELECTOR.value,
Referrer.API_STARFISH_ENDPOINT_LIST.value,
+ Referrer.API_STARFISH_FULL_SPAN_FROM_TRACE.value,
Referrer.API_STARFISH_GET_SPAN_ACTIONS.value,
Referrer.API_STARFISH_GET_SPAN_DOMAINS.value,
Referrer.API_STARFISH_GET_SPAN_OPERATIONS.value,
Referrer.API_STARFISH_SIDEBAR_SPAN_METRICS.value,
Referrer.API_STARFISH_SPAN_CATEGORY_BREAKDOWN.value,
+ Referrer.API_STARFISH_SPAN_DESCRIPTION.value,
Referrer.API_STARFISH_SPAN_LIST.value,
Referrer.API_STARFISH_SPAN_SUMMARY_P95.value,
Referrer.API_STARFISH_SPAN_SUMMARY_PAGE.value,
@@ -379,11 +405,12 @@ def get(self, request: Request, organization) -> Response:
if request.auth:
referrer = API_TOKEN_REFERRER
elif referrer not in ALLOWED_EVENTS_REFERRERS:
- with sentry_sdk.isolation_scope() as scope:
- scope.set_tag("forbidden_referrer", referrer)
- sentry_sdk.capture_message(
- "Forbidden Referrer. If this is intentional, add it to `ALLOWED_EVENTS_REFERRERS`"
- )
+ if referrer:
+ with sentry_sdk.isolation_scope() as scope:
+ scope.set_tag("forbidden_referrer", referrer)
+ sentry_sdk.capture_message(
+ "Forbidden Referrer. If this is intentional, add it to `ALLOWED_EVENTS_REFERRERS`"
+ )
referrer = Referrer.API_ORGANIZATION_EVENTS.value
def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]:
diff --git a/src/sentry/api/endpoints/organization_events_anomalies.py b/src/sentry/api/endpoints/organization_events_anomalies.py
index ea1cc23c5c510..4f5dfc081fe4a 100644
--- a/src/sentry/api/endpoints/organization_events_anomalies.py
+++ b/src/sentry/api/endpoints/organization_events_anomalies.py
@@ -6,6 +6,7 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
+from sentry.api.bases.organization import OrganizationAlertRulePermission
from sentry.api.bases.organization_events import OrganizationEventsV2EndpointBase
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.paginator import OffsetPaginator
@@ -32,6 +33,7 @@ class OrganizationEventsAnomaliesEndpoint(OrganizationEventsV2EndpointBase):
publish_status = {
"POST": ApiPublishStatus.EXPERIMENTAL,
}
+ permission_classes = (OrganizationAlertRulePermission,)
@extend_schema(
operation_id="Identify anomalies in historical data",
@@ -68,7 +70,9 @@ def post(self, request: Request, organization: Organization) -> Response:
"""
Return a list of anomalies for a time series of historical event data.
"""
- if not features.has("organizations:anomaly-detection-alerts", organization):
+ if not features.has(
+ "organizations:anomaly-detection-alerts", organization
+ ) and not features.has("organizations:anomaly-detection-rollout", organization):
raise ResourceDoesNotExist("Your organization does not have access to this feature.")
historical_data = self._format_historical_data(request.data.get("historical_data"))
diff --git a/src/sentry/api/endpoints/organization_events_facets.py b/src/sentry/api/endpoints/organization_events_facets.py
index 1704d7086a485..0879fc0700f11 100644
--- a/src/sentry/api/endpoints/organization_events_facets.py
+++ b/src/sentry/api/endpoints/organization_events_facets.py
@@ -32,7 +32,7 @@ def get(self, request: Request, organization) -> Response:
update_snuba_params_with_timestamp(request, snuba_params, timestamp_key="traceTimestamp")
def data_fn(offset, limit):
- with sentry_sdk.start_span(op="discover.endpoint", description="discover_query"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="discover_query"):
with handle_query_errors():
facets = discover.get_facets(
query=request.GET.get("query"),
@@ -42,9 +42,7 @@ def data_fn(offset, limit):
cursor=offset,
)
- with sentry_sdk.start_span(
- op="discover.endpoint", description="populate_results"
- ) as span:
+ with sentry_sdk.start_span(op="discover.endpoint", name="populate_results") as span:
span.set_data("facet_count", len(facets or []))
resp = defaultdict(lambda: {"key": "", "topValues": []})
for row in facets:
diff --git a/src/sentry/api/endpoints/organization_events_facets_performance.py b/src/sentry/api/endpoints/organization_events_facets_performance.py
index 4ee603dd1d4fa..c01dca77371f2 100644
--- a/src/sentry/api/endpoints/organization_events_facets_performance.py
+++ b/src/sentry/api/endpoints/organization_events_facets_performance.py
@@ -90,7 +90,7 @@ def get(self, request: Request, organization) -> Response:
tag_key = TAG_ALIASES.get(tag_key)
def data_fn(offset, limit):
- with sentry_sdk.start_span(op="discover.endpoint", description="discover_query"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="discover_query"):
referrer = "api.organization-events-facets-performance.top-tags"
tag_data = query_tag_data(
filter_query=filter_query,
@@ -178,7 +178,7 @@ def get(self, request: Request, organization) -> Response:
tag_key = TAG_ALIASES.get(tag_key)
def data_fn(offset, limit, raw_limit):
- with sentry_sdk.start_span(op="discover.endpoint", description="discover_query"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="discover_query"):
referrer = "api.organization-events-facets-performance-histogram"
top_tags = query_top_tags(
tag_key=tag_key,
@@ -269,9 +269,7 @@ def query_tag_data(
:return: Returns the row with aggregate and count if the query was successful
Returns None if query was not successful which causes the endpoint to return early
"""
- with sentry_sdk.start_span(
- op="discover.discover", description="facets.filter_transform"
- ) as span:
+ with sentry_sdk.start_span(op="discover.discover", name="facets.filter_transform") as span:
span.set_data("query", filter_query)
tag_query = DiscoverQueryBuilder(
dataset=Dataset.Discover,
@@ -289,7 +287,7 @@ def query_tag_data(
Condition(tag_query.resolve_column(aggregate_column), Op.IS_NOT_NULL)
)
- with sentry_sdk.start_span(op="discover.discover", description="facets.frequent_tags"):
+ with sentry_sdk.start_span(op="discover.discover", name="facets.frequent_tags"):
# Get the average and count to use to filter the next request to facets
tag_data = tag_query.run_query(f"{referrer}.all_transactions")
@@ -324,7 +322,7 @@ def query_top_tags(
"""
translated_aggregate_column = discover.resolve_discover_column(aggregate_column)
- with sentry_sdk.start_span(op="discover.discover", description="facets.top_tags"):
+ with sentry_sdk.start_span(op="discover.discover", name="facets.top_tags"):
if not orderby:
orderby = ["-count"]
@@ -399,9 +397,7 @@ def query_facet_performance(
tag_key_limit = limit if tag_key else 1
- with sentry_sdk.start_span(
- op="discover.discover", description="facets.filter_transform"
- ) as span:
+ with sentry_sdk.start_span(op="discover.discover", name="facets.filter_transform") as span:
span.set_data("query", filter_query)
tag_query = DiscoverQueryBuilder(
dataset=Dataset.Discover,
@@ -452,7 +448,7 @@ def query_facet_performance(
["trace", "trace.ctx", "trace.span", "project", "browser", "celery_task_id", "url"],
)
- with sentry_sdk.start_span(op="discover.discover", description="facets.aggregate_tags"):
+ with sentry_sdk.start_span(op="discover.discover", name="facets.aggregate_tags"):
span.set_data("sample_rate", sample_rate)
span.set_data("target_sample", target_sample)
aggregate_comparison = transaction_aggregate * 1.005 if transaction_aggregate else 0
diff --git a/src/sentry/api/endpoints/organization_events_has_measurements.py b/src/sentry/api/endpoints/organization_events_has_measurements.py
index 96e18e20ece72..6c359059d991f 100644
--- a/src/sentry/api/endpoints/organization_events_has_measurements.py
+++ b/src/sentry/api/endpoints/organization_events_has_measurements.py
@@ -58,7 +58,7 @@ def get(self, request: Request, organization) -> Response:
if not self.has_feature(organization, request):
return Response(status=404)
- with sentry_sdk.start_span(op="discover.endpoint", description="parse params"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="parse params"):
try:
# This endpoint only allows for a single project + transaction, so no need
# to check `global-views`.
diff --git a/src/sentry/api/endpoints/organization_events_histogram.py b/src/sentry/api/endpoints/organization_events_histogram.py
index 86a244f0a2eed..aa05de8b26383 100644
--- a/src/sentry/api/endpoints/organization_events_histogram.py
+++ b/src/sentry/api/endpoints/organization_events_histogram.py
@@ -72,7 +72,7 @@ def get(self, request: Request, organization) -> Response:
sentry_sdk.set_tag("performance.metrics_enhanced", metrics_enhanced)
- with sentry_sdk.start_span(op="discover.endpoint", description="histogram"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="histogram"):
serializer = HistogramSerializer(data=request.GET)
if serializer.is_valid():
data = serializer.validated_data
diff --git a/src/sentry/api/endpoints/organization_events_meta.py b/src/sentry/api/endpoints/organization_events_meta.py
index 87b74bee64340..3b3f8f5375ec2 100644
--- a/src/sentry/api/endpoints/organization_events_meta.py
+++ b/src/sentry/api/endpoints/organization_events_meta.py
@@ -66,7 +66,7 @@ def get(self, request: Request, organization) -> Response:
except NoProjects:
return Response([])
- with sentry_sdk.start_span(op="discover.endpoint", description="find_lookup_keys") as span:
+ with sentry_sdk.start_span(op="discover.endpoint", name="find_lookup_keys") as span:
possible_keys = ["transaction"]
lookup_keys = {key: request.query_params.get(key) for key in possible_keys}
@@ -79,7 +79,7 @@ def get(self, request: Request, organization) -> Response:
)
with handle_query_errors():
- with sentry_sdk.start_span(op="discover.endpoint", description="filter_creation"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="filter_creation"):
projects = self.get_projects(request, organization)
query_kwargs = build_query_params_from_request(
request, organization, projects, snuba_params.environments
@@ -99,10 +99,10 @@ def get(self, request: Request, organization) -> Response:
query_kwargs["actor"] = request.user
- with sentry_sdk.start_span(op="discover.endpoint", description="issue_search"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="issue_search"):
results_cursor = search.backend.query(**query_kwargs)
- with sentry_sdk.start_span(op="discover.endpoint", description="serialize_results") as span:
+ with sentry_sdk.start_span(op="discover.endpoint", name="serialize_results") as span:
results = list(results_cursor)
span.set_data("result_length", len(results))
context = serialize(
diff --git a/src/sentry/api/endpoints/organization_events_spans_histogram.py b/src/sentry/api/endpoints/organization_events_spans_histogram.py
index af54c6ecbfc88..aef74605cf470 100644
--- a/src/sentry/api/endpoints/organization_events_spans_histogram.py
+++ b/src/sentry/api/endpoints/organization_events_spans_histogram.py
@@ -55,7 +55,7 @@ def get(self, request: Request, organization) -> Response:
except NoProjects:
return Response({})
- with sentry_sdk.start_span(op="discover.endpoint", description="spans_histogram"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="spans_histogram"):
serializer = SpansHistogramSerializer(data=request.GET)
if serializer.is_valid():
data = serializer.validated_data
diff --git a/src/sentry/api/endpoints/organization_events_spans_performance.py b/src/sentry/api/endpoints/organization_events_spans_performance.py
index 727c15d8e7d7d..3a359569974cc 100644
--- a/src/sentry/api/endpoints/organization_events_spans_performance.py
+++ b/src/sentry/api/endpoints/organization_events_spans_performance.py
@@ -333,9 +333,7 @@ def get_event_stats(
zerofill_results: bool,
comparison_delta: datetime | None = None,
) -> SnubaTSResult:
- with sentry_sdk.start_span(
- op="discover.discover", description="timeseries.filter_transform"
- ):
+ with sentry_sdk.start_span(op="discover.discover", name="timeseries.filter_transform"):
builder = TimeseriesQueryBuilder(
Dataset.Discover,
{},
@@ -372,9 +370,7 @@ def get_event_stats(
snql_query, "api.organization-events-spans-performance-stats"
)
- with sentry_sdk.start_span(
- op="discover.discover", description="timeseries.transform_results"
- ):
+ with sentry_sdk.start_span(op="discover.discover", name="timeseries.transform_results"):
result = discover.zerofill(
results["data"],
snuba_params.start_date,
diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py
index f46ea78cd5fc9..0a11b698e8c81 100644
--- a/src/sentry/api/endpoints/organization_events_stats.py
+++ b/src/sentry/api/endpoints/organization_events_stats.py
@@ -183,7 +183,7 @@ def check_if_results_have_data(self, results: SnubaTSResult | dict[str, SnubaTSR
def get(self, request: Request, organization: Organization) -> Response:
query_source = self.get_request_source(request)
- with sentry_sdk.start_span(op="discover.endpoint", description="filter_params") as span:
+ with sentry_sdk.start_span(op="discover.endpoint", name="filter_params") as span:
span.set_data("organization", organization)
top_events = 0
diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py
index 5d509b985534c..92f9f881500a9 100644
--- a/src/sentry/api/endpoints/organization_events_trace.py
+++ b/src/sentry/api/endpoints/organization_events_trace.py
@@ -38,7 +38,7 @@
from sentry.utils.numbers import base32_encode, format_grouped_length
from sentry.utils.sdk import set_measurement
from sentry.utils.snuba import bulk_snuba_queries
-from sentry.utils.validators import INVALID_ID_DETAILS, is_event_id
+from sentry.utils.validators import INVALID_ID_DETAILS, is_event_id, is_span_id
logger: logging.Logger = logging.getLogger(__name__)
MAX_TRACE_SIZE: int = 100
@@ -218,7 +218,7 @@ def __init__(
@property
def nodestore_event(self) -> Event | GroupEvent | None:
if self._nodestore_event is None and not self.fetched_nodestore:
- with sentry_sdk.start_span(op="nodestore", description="get_event_by_id"):
+ with sentry_sdk.start_span(op="nodestore", name="get_event_by_id"):
self.fetched_nodestore = True
self._nodestore_event = eventstore.backend.get_event_by_id(
self.event["project.id"], self.event["id"]
@@ -618,7 +618,11 @@ def query_trace_data(
# id is just for consistent results
transaction_orderby = ["-root", "timestamp", "id"]
if event_id is not None:
- transaction_columns.append(f'to_other(id, "{event_id}", 0, 1) AS target')
+ # Already validated to be one of the two
+ if is_event_id(event_id):
+ transaction_columns.append(f'to_other(id, "{event_id}", 0, 1) AS target')
+ else:
+ transaction_columns.append(f'to_other(trace.span, "{event_id}", 0, 1) AS target')
# Target is the event_id the frontend plans to render, we try to sort it to the top so it loads even if its not
# within the query limit, needs to be the first orderby cause it takes precedence over finding the root
transaction_orderby.insert(0, "-target")
@@ -758,7 +762,7 @@ def build_span_query(trace_id: str, spans_params: SnubaParams, query_spans: list
sentry_sdk.set_measurement("trace_view.spans.span_minimum", span_minimum)
sentry_sdk.set_tag("trace_view.split_by_char.optimization", len(query_spans) > span_minimum)
if len(query_spans) > span_minimum:
- # TODO because we're not doing an IN on a list of literals, snuba will not optimize the query with the HexInt
+ # TODO: because we're not doing an IN on a list of literals, snuba will not optimize the query with the HexInt
# column processor which means we won't be taking advantage of the span_id index but if we only do this when we
# have a lot of query_spans we should have a great performance improvement still once we do that we can simplify
# this code and always apply this optimization
@@ -790,7 +794,7 @@ def augment_transactions_with_spans(
query_source: QuerySource | None = QuerySource.SENTRY_BACKEND,
) -> Sequence[SnubaTransaction]:
"""Augment the list of transactions with parent, error and problem data"""
- with sentry_sdk.start_span(op="augment.transactions", description="setup"):
+ with sentry_sdk.start_span(op="augment.transactions", name="setup"):
trace_parent_spans = set() # parent span ids of segment spans
transaction_problem_map: dict[str, SnubaTransaction] = {}
problem_project_map: dict[int, list[str]] = {}
@@ -819,7 +823,7 @@ def augment_transactions_with_spans(
else:
sentry_sdk.set_tag("trace_view.missing_timestamp_constraints", True)
- with sentry_sdk.start_span(op="augment.transactions", description="get transaction span ids"):
+ with sentry_sdk.start_span(op="augment.transactions", name="get transaction span ids"):
for index, transaction in enumerate(transactions):
transaction["occurrence_spans"] = []
transaction["issue_occurrences"] = []
@@ -839,7 +843,7 @@ def augment_transactions_with_spans(
# parent span ids of the segment spans
trace_parent_spans.add(transaction["trace.parent_span"])
- with sentry_sdk.start_span(op="augment.transactions", description="get perf issue span ids"):
+ with sentry_sdk.start_span(op="augment.transactions", name="get perf issue span ids"):
for problem_project, occurrences in problem_project_map.items():
if occurrences:
issue_occurrences.extend(
@@ -855,7 +859,7 @@ def augment_transactions_with_spans(
set(problem.evidence_data["offender_span_ids"])
)
- with sentry_sdk.start_span(op="augment.transactions", description="create query params"):
+ with sentry_sdk.start_span(op="augment.transactions", name="create query params"):
query_spans = {*trace_parent_spans, *error_spans, *occurrence_spans}
if "" in query_spans:
query_spans.remove("")
@@ -920,7 +924,7 @@ def augment_transactions_with_spans(
parent["span_id"] = pad_span_id(parent["span_id"])
parent_map[parent["span_id"]] = parent
- with sentry_sdk.start_span(op="augment.transactions", description="linking transactions"):
+ with sentry_sdk.start_span(op="augment.transactions", name="linking transactions"):
for transaction in transactions:
# For a given transaction, if parent span id exists in the tranaction (so this is
# not a root span), see if the indexed spans data can tell us what the parent
@@ -929,7 +933,7 @@ def augment_transactions_with_spans(
parent = parent_map.get(transaction["trace.parent_span"])
if parent is not None:
transaction["trace.parent_transaction"] = parent["transaction.id"]
- with sentry_sdk.start_span(op="augment.transactions", description="linking perf issues"):
+ with sentry_sdk.start_span(op="augment.transactions", name="linking perf issues"):
for problem in issue_occurrences:
for span_id in problem.evidence_data["offender_span_ids"]:
parent = parent_map.get(span_id)
@@ -938,7 +942,7 @@ def augment_transactions_with_spans(
occurrence = parent.copy()
occurrence["problem"] = problem
transaction_problem["occurrence_spans"].append(occurrence)
- with sentry_sdk.start_span(op="augment.transactions", description="linking errors"):
+ with sentry_sdk.start_span(op="augment.transactions", name="linking errors"):
for error in errors:
parent = parent_map.get(error["trace.span"])
error["trace.transaction"] = parent["transaction.id"] if parent is not None else None
@@ -1062,10 +1066,12 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht
if detailed and use_spans:
raise ParseError("Cannot return a detailed response while using spans")
limit = min(int(request.GET.get("limit", MAX_TRACE_SIZE)), 10_000)
- event_id = request.GET.get("event_id") or request.GET.get("eventId")
+ event_id = (
+ request.GET.get("targetId") or request.GET.get("event_id") or request.GET.get("eventId")
+ )
# Only need to validate event_id as trace_id is validated in the URL
- if event_id and not is_event_id(event_id):
+ if event_id and not (is_event_id(event_id) or is_span_id(event_id)):
return Response({"detail": INVALID_ID_DETAILS.format("Event ID")}, status=400)
query_source = self.get_request_source(request)
@@ -1233,7 +1239,7 @@ def serialize(
current_generation: int | None = None
root_id: str | None = None
- with sentry_sdk.start_span(op="building.trace", description="light trace"):
+ with sentry_sdk.start_span(op="building.trace", name="light trace"):
# Check if the event is an orphan_error
if not snuba_event or not nodestore_event:
orphan_error = find_event(
@@ -1438,7 +1444,7 @@ def serialize(
to_check.append(root)
iteration = 0
- with sentry_sdk.start_span(op="building.trace", description="full trace"):
+ with sentry_sdk.start_span(op="building.trace", name="full trace"):
has_orphans = False
while parent_map or to_check:
@@ -1613,7 +1619,7 @@ def serialize_with_spans(
if detailed:
raise ParseError("Cannot return a detailed response using Spans")
- with sentry_sdk.start_span(op="serialize", description="create parent map"):
+ with sentry_sdk.start_span(op="serialize", name="create parent map"):
parent_to_children_event_map = defaultdict(list)
serialized_transactions: list[TraceEvent] = []
for transaction in transactions:
@@ -1642,7 +1648,7 @@ def serialize_with_spans(
else:
orphan_errors.append(error)
- with sentry_sdk.start_span(op="serialize", description="associate children"):
+ with sentry_sdk.start_span(op="serialize", name="associate children"):
for trace_event in serialized_transactions:
event_id = trace_event.event["id"]
if event_id in parent_to_children_event_map:
@@ -1653,7 +1659,7 @@ def serialize_with_spans(
parent_error_map.pop(event_id), key=lambda k: k["timestamp"]
)
- with sentry_sdk.start_span(op="serialize", description="more orphans"):
+ with sentry_sdk.start_span(op="serialize", name="more orphans"):
visited_transactions_ids: set[str] = {
root_trace.event["id"] for root_trace in root_traces
}
@@ -1666,7 +1672,7 @@ def serialize_with_spans(
for child in serialized_transaction.children:
visited_transactions_ids.add(child.event["id"])
- with sentry_sdk.start_span(op="serialize", description="sort"):
+ with sentry_sdk.start_span(op="serialize", name="sort"):
# Sort the results so they're consistent
orphan_errors.sort(key=lambda k: k["timestamp"])
root_traces.sort(key=child_sort_key)
@@ -1688,7 +1694,7 @@ def serialize_with_spans(
if serialized_orphan is not None:
result_transactions.append(serialized_orphan)
- with sentry_sdk.start_span(op="serialize", description="to dict"):
+ with sentry_sdk.start_span(op="serialize", name="to dict"):
return {
"transactions": result_transactions,
"orphan_errors": [self.serialize_error(error) for error in orphan_errors],
diff --git a/src/sentry/api/endpoints/organization_events_trends.py b/src/sentry/api/endpoints/organization_events_trends.py
index 63274f5cc45e0..e2cded11a2ee8 100644
--- a/src/sentry/api/endpoints/organization_events_trends.py
+++ b/src/sentry/api/endpoints/organization_events_trends.py
@@ -54,7 +54,7 @@ class TrendColumns(TypedDict):
TREND_TYPES = [IMPROVED, REGRESSION]
-# TODO move this to the builder file and introduce a top-events version instead
+# TODO: move this to the builder file and introduce a top-events version instead
class TrendQueryBuilder(DiscoverQueryBuilder):
def convert_aggregate_filter_to_condition(
self, aggregate_filter: AggregateFilter
@@ -431,7 +431,7 @@ def get(self, request: Request, organization) -> Response:
except NoProjects:
return Response([])
- with sentry_sdk.start_span(op="discover.endpoint", description="trend_dates"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="trend_dates"):
middle_date = request.GET.get("middle")
if middle_date:
try:
diff --git a/src/sentry/api/endpoints/organization_events_trends_v2.py b/src/sentry/api/endpoints/organization_events_trends_v2.py
index e293806d073fa..95ca60a010f0a 100644
--- a/src/sentry/api/endpoints/organization_events_trends_v2.py
+++ b/src/sentry/api/endpoints/organization_events_trends_v2.py
@@ -177,7 +177,7 @@ def get_timeseries(top_events, _, rollup, zerofill_results):
results[result_key]["data"].append(row)
else:
discarded += 1
- # TODO filter out entries that don't have transaction or trend_function
+ # TODO: filter out entries that don't have transaction or trend_function
logger.warning(
"trends.top-events.timeseries.key-mismatch",
extra={
diff --git a/src/sentry/api/endpoints/organization_events_vitals.py b/src/sentry/api/endpoints/organization_events_vitals.py
index 0dc36ffed38ee..c308e82810fcd 100644
--- a/src/sentry/api/endpoints/organization_events_vitals.py
+++ b/src/sentry/api/endpoints/organization_events_vitals.py
@@ -31,7 +31,7 @@ def get(self, request: Request, organization) -> Response:
if not self.has_feature(organization, request):
return Response(status=404)
- with sentry_sdk.start_span(op="discover.endpoint", description="parse params"):
+ with sentry_sdk.start_span(op="discover.endpoint", name="parse params"):
try:
snuba_params = self.get_snuba_params(request, organization)
except NoProjects:
diff --git a/src/sentry/api/endpoints/organization_measurements_meta.py b/src/sentry/api/endpoints/organization_measurements_meta.py
index 8346fb3f82663..bbca2d6a7b9a2 100644
--- a/src/sentry/api/endpoints/organization_measurements_meta.py
+++ b/src/sentry/api/endpoints/organization_measurements_meta.py
@@ -33,7 +33,7 @@ def get(self, request: Request, organization: Organization) -> Response:
use_case_id=UseCaseID.TRANSACTIONS,
)
- with start_span(op="transform", description="metric meta"):
+ with start_span(op="transform", name="metric meta"):
result = {
item["name"]: {
"functions": METRIC_FUNCTION_LIST_BY_TYPE[item["type"]],
diff --git a/src/sentry/api/endpoints/organization_member/details.py b/src/sentry/api/endpoints/organization_member/details.py
index 57db9ef68552d..108d5b7eb0239 100644
--- a/src/sentry/api/endpoints/organization_member/details.py
+++ b/src/sentry/api/endpoints/organization_member/details.py
@@ -508,15 +508,16 @@ def delete(
)
with transaction.atomic(router.db_for_write(OrganizationMember)):
- # Delete any invite requests and pending invites by the deleted member
- existing_invites = OrganizationMember.objects.filter(
- Q(invite_status=InviteStatus.REQUESTED_TO_BE_INVITED.value)
- | Q(token__isnull=False),
- inviter_id=member.user_id,
- organization=organization,
- )
- for om in existing_invites:
- om.delete()
+ if member.user_id:
+ # Delete any invite requests and pending invites by the deleted member
+ existing_invites = OrganizationMember.objects.filter(
+ Q(invite_status=InviteStatus.REQUESTED_TO_BE_INVITED.value)
+ | Q(token__isnull=False),
+ inviter_id=member.user_id,
+ organization=organization,
+ )
+ for om in existing_invites:
+ om.delete()
self.create_audit_entry(
request=request,
diff --git a/src/sentry/api/endpoints/organization_metrics_tag_details.py b/src/sentry/api/endpoints/organization_metrics_tag_details.py
index 58ccb30864611..01b4e3d4dba63 100644
--- a/src/sentry/api/endpoints/organization_metrics_tag_details.py
+++ b/src/sentry/api/endpoints/organization_metrics_tag_details.py
@@ -41,7 +41,7 @@ def get(self, request: Request, organization: Organization, tag_name: str) -> Re
for project in projects
):
if len(metric_names) == 1 and metric_names[0].startswith("d:eap"):
- # TODO hack for EAP, hardcode some metric names
+ # TODO: hack for EAP, hardcode some metric names
if tag_name == "color":
return Response(
[
diff --git a/src/sentry/api/endpoints/organization_metrics_tags.py b/src/sentry/api/endpoints/organization_metrics_tags.py
index 041eb403727ac..52d0cde0c3a01 100644
--- a/src/sentry/api/endpoints/organization_metrics_tags.py
+++ b/src/sentry/api/endpoints/organization_metrics_tags.py
@@ -58,7 +58,7 @@ def get(self, request: Request, organization: Organization) -> Response:
for project in projects
):
if metric_name.startswith("d:eap"):
- # TODO hack for EAP, return a fixed list
+ # TODO: hack for EAP, return a fixed list
return Response([Tag(key="color"), Tag(key="location")])
try:
diff --git a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py
index fc9f1c3c4fe67..1f5901dfe5f28 100644
--- a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py
+++ b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py
@@ -66,9 +66,7 @@ def get(self, request: Request, organization: Organization) -> Response:
if measurement is None:
return Response({"detail": "missing required parameter yAxis"}, status=400)
- with sentry_sdk.start_span(
- op="discover.metrics.endpoint", description="get_full_metrics"
- ) as span:
+ with sentry_sdk.start_span(op="discover.metrics.endpoint", name="get_full_metrics") as span:
span.set_data("organization", organization)
try:
diff --git a/src/sentry/api/endpoints/organization_releases.py b/src/sentry/api/endpoints/organization_releases.py
index de668f2730237..3fe2cab0ef644 100644
--- a/src/sentry/api/endpoints/organization_releases.py
+++ b/src/sentry/api/endpoints/organization_releases.py
@@ -42,6 +42,7 @@
SEMVER_PACKAGE_ALIAS,
)
from sentry.search.events.filter import handle_operator_negation, parse_semver
+from sentry.search.utils import get_latest_release
from sentry.signals import release_created
from sentry.snuba.sessions import STATS_PERIODS
from sentry.types.activity import ActivityType
@@ -101,6 +102,13 @@ def _filter_releases_by_query(queryset, organization, query, filter_params):
query_q = ~Q(version__in=raw_value)
elif search_filter.operator == "IN":
query_q = Q(version__in=raw_value)
+ elif raw_value == "latest":
+ latest_releases = get_latest_release(
+ projects=filter_params["project_id"],
+ environments=filter_params.get("environment"),
+ organization_id=organization.id,
+ )
+ query_q = Q(version__in=latest_releases)
else:
query_q = Q(version=search_filter.value.value)
@@ -238,7 +246,7 @@ def get_projects(self, request: Request, organization, project_ids=None, project
organization,
project_ids=project_ids,
project_slugs=project_slugs,
- include_all_accessible="GET" != request.method,
+ include_all_accessible=False,
)
def get(self, request: Request, organization) -> Response:
diff --git a/src/sentry/api/endpoints/organization_sessions.py b/src/sentry/api/endpoints/organization_sessions.py
index a247080b9b1e3..372c1e58b4012 100644
--- a/src/sentry/api/endpoints/organization_sessions.py
+++ b/src/sentry/api/endpoints/organization_sessions.py
@@ -82,9 +82,7 @@ def get(self, request: Request, organization) -> Response:
def data_fn(offset: int, limit: int) -> SessionsQueryResult:
with self.handle_query_errors():
- with sentry_sdk.start_span(
- op="sessions.endpoint", description="build_sessions_query"
- ):
+ with sentry_sdk.start_span(op="sessions.endpoint", name="build_sessions_query"):
request_limit = None
if request.GET.get("per_page") is not None:
request_limit = limit
diff --git a/src/sentry/api/endpoints/organization_spans_aggregation.py b/src/sentry/api/endpoints/organization_spans_aggregation.py
index 8a4031f75c1bd..dbb7ec1a6d5ed 100644
--- a/src/sentry/api/endpoints/organization_spans_aggregation.py
+++ b/src/sentry/api/endpoints/organization_spans_aggregation.py
@@ -410,7 +410,7 @@ def get(self, request: Request, organization: Organization) -> Response:
)
with sentry_sdk.start_span(
- op="span.aggregation", description="AggregateIndexedSpans.build_aggregate_span_tree"
+ op="span.aggregation", name="AggregateIndexedSpans.build_aggregate_span_tree"
):
aggregated_tree = AggregateIndexedSpans().build_aggregate_span_tree(results)
@@ -442,7 +442,7 @@ def get(self, request: Request, organization: Organization) -> Response:
)
with sentry_sdk.start_span(
- op="span.aggregation", description="AggregateNodestoreSpans.build_aggregate_span_tree"
+ op="span.aggregation", name="AggregateNodestoreSpans.build_aggregate_span_tree"
):
aggregated_tree = AggregateNodestoreSpans().build_aggregate_span_tree(events)
diff --git a/src/sentry/api/endpoints/organization_spans_fields.py b/src/sentry/api/endpoints/organization_spans_fields.py
index d3697277dbae7..6fe1203765fea 100644
--- a/src/sentry/api/endpoints/organization_spans_fields.py
+++ b/src/sentry/api/endpoints/organization_spans_fields.py
@@ -7,6 +7,8 @@
from rest_framework.request import Request
from rest_framework.response import Response
from sentry_protos.snuba.v1alpha.endpoint_tags_list_pb2 import (
+ AttributeValuesRequest,
+ AttributeValuesResponse,
TraceItemAttributesRequest,
TraceItemAttributesResponse,
)
@@ -20,6 +22,7 @@
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
from sentry.api.bases import NoProjects, OrganizationEventsV2EndpointBase
+from sentry.api.event_search import translate_escape_sequences
from sentry.api.paginator import ChainPaginator
from sentry.api.serializers import serialize
from sentry.api.utils import handle_query_errors
@@ -29,7 +32,7 @@
from sentry.snuba.dataset import Dataset
from sentry.snuba.referrer import Referrer
from sentry.tagstore.types import TagKey, TagValue
-from sentry.utils import snuba
+from sentry.utils import snuba_rpc
# This causes problems if a user sends an attribute with any of these values
# but the meta table currently can't handle that anyways
@@ -55,6 +58,13 @@ class OrganizationSpansFieldsEndpointSerializer(serializers.Serializer):
)
type = serializers.ChoiceField(["string", "number"], required=False)
+ def validate_type(self, value):
+ if value == "string":
+ return AttributeKey.Type.TYPE_STRING
+ if value == "number":
+ return AttributeKey.Type.TYPE_FLOAT
+ raise NotImplementedError
+
def validate(self, attrs):
if attrs["dataset"] == "spans" and attrs.get("type") is None:
raise ParseError(detail='type is required when using dataset="spans"')
@@ -112,9 +122,9 @@ def get(self, request: Request, organization) -> Response:
),
limit=max_span_tags,
offset=0,
- type=AttributeKey.Type.TYPE_STRING,
+ type=serialized["type"],
)
- rpc_response = snuba.rpc(rpc_request, TraceItemAttributesResponse)
+ rpc_response = snuba_rpc.rpc(rpc_request, TraceItemAttributesResponse)
paginator = ChainPaginator(
[
@@ -195,6 +205,68 @@ def get(self, request: Request, organization, key: str) -> Response:
max_span_tag_values = options.get("performance.spans-tags-values.max")
+ serializer = OrganizationSpansFieldsEndpointSerializer(data=request.GET)
+ if not serializer.is_valid():
+ return Response(serializer.errors, status=400)
+ serialized = serializer.validated_data
+
+ if serialized["dataset"] == "spans" and features.has(
+ "organizations:visibility-explore-dataset", organization, actor=request.user
+ ):
+ start_timestamp = Timestamp()
+ start_timestamp.FromDatetime(
+ snuba_params.start_date.replace(hour=0, minute=0, second=0, microsecond=0)
+ )
+
+ end_timestamp = Timestamp()
+ end_timestamp.FromDatetime(
+ snuba_params.end_date.replace(hour=0, minute=0, second=0, microsecond=0)
+ + timedelta(days=1)
+ )
+
+ query = translate_escape_sequences(request.GET.get("query", ""))
+ rpc_request = AttributeValuesRequest(
+ meta=RequestMeta(
+ organization_id=organization.id,
+ cogs_category="performance",
+ referrer=Referrer.API_SPANS_TAG_VALUES_RPC.value,
+ project_ids=snuba_params.project_ids,
+ start_timestamp=start_timestamp,
+ end_timestamp=end_timestamp,
+ trace_item_name=TraceItemName.TRACE_ITEM_NAME_EAP_SPANS,
+ ),
+ name=key,
+ value_substring_match=query,
+ limit=max_span_tag_values,
+ offset=0,
+ )
+ rpc_response = snuba_rpc.rpc(rpc_request, AttributeValuesResponse)
+
+ paginator = ChainPaginator(
+ [
+ [
+ TagValue(
+ key=key,
+ value=tag_value,
+ times_seen=None,
+ first_seen=None,
+ last_seen=None,
+ )
+ for tag_value in rpc_response.values
+ if tag_value
+ ]
+ ],
+ max_limit=max_span_tag_values,
+ )
+
+ return self.paginate(
+ request=request,
+ paginator=paginator,
+ on_results=lambda results: serialize(results, request.user),
+ default_per_page=max_span_tag_values,
+ max_per_page=max_span_tag_values,
+ )
+
executor = SpanFieldValuesAutocompletionExecutor(
snuba_params=snuba_params,
key=key,
@@ -339,7 +411,7 @@ def get_autocomplete_query_base(self) -> BaseQueryBuilder:
def get_autocomplete_results(self, query: BaseQueryBuilder) -> list[TagValue]:
with handle_query_errors():
- results = query.process_results(query.run_query(Referrer.API_SPANS_TAG_KEYS.value))
+ results = query.process_results(query.run_query(Referrer.API_SPANS_TAG_VALUES.value))
return [
TagValue(
diff --git a/src/sentry/api/endpoints/organization_stats_summary.py b/src/sentry/api/endpoints/organization_stats_summary.py
index d81651da576be..311ca67a2c8c1 100644
--- a/src/sentry/api/endpoints/organization_stats_summary.py
+++ b/src/sentry/api/endpoints/organization_stats_summary.py
@@ -142,16 +142,14 @@ def get(self, request: Request, organization) -> HttpResponse:
"""
with self.handle_query_errors():
tenant_ids = {"organization_id": organization.id}
- with sentry_sdk.start_span(op="outcomes.endpoint", description="build_outcomes_query"):
+ with sentry_sdk.start_span(op="outcomes.endpoint", name="build_outcomes_query"):
query = self.build_outcomes_query(
request,
organization,
)
- with sentry_sdk.start_span(op="outcomes.endpoint", description="run_outcomes_query"):
+ with sentry_sdk.start_span(op="outcomes.endpoint", name="run_outcomes_query"):
result_totals = run_outcomes_query_totals(query, tenant_ids=tenant_ids)
- with sentry_sdk.start_span(
- op="outcomes.endpoint", description="massage_outcomes_result"
- ):
+ with sentry_sdk.start_span(op="outcomes.endpoint", name="massage_outcomes_result"):
projects, result = massage_sessions_result_summary(
query, result_totals, request.GET.getlist("outcome")
)
diff --git a/src/sentry/api/endpoints/organization_stats_v2.py b/src/sentry/api/endpoints/organization_stats_v2.py
index 833879efc119e..9627e04d077b8 100644
--- a/src/sentry/api/endpoints/organization_stats_v2.py
+++ b/src/sentry/api/endpoints/organization_stats_v2.py
@@ -166,21 +166,19 @@ def get(self, request: Request, organization) -> Response:
with self.handle_query_errors():
tenant_ids = {"organization_id": organization.id}
- with sentry_sdk.start_span(op="outcomes.endpoint", description="build_outcomes_query"):
+ with sentry_sdk.start_span(op="outcomes.endpoint", name="build_outcomes_query"):
query = self.build_outcomes_query(
request,
organization,
)
- with sentry_sdk.start_span(op="outcomes.endpoint", description="run_outcomes_query"):
+ with sentry_sdk.start_span(op="outcomes.endpoint", name="run_outcomes_query"):
result_totals = run_outcomes_query_totals(query, tenant_ids=tenant_ids)
result_timeseries = (
None
if "project_id" in query.query_groupby
else run_outcomes_query_timeseries(query, tenant_ids=tenant_ids)
)
- with sentry_sdk.start_span(
- op="outcomes.endpoint", description="massage_outcomes_result"
- ):
+ with sentry_sdk.start_span(op="outcomes.endpoint", name="massage_outcomes_result"):
result = massage_outcomes_result(query, result_totals, result_timeseries)
return Response(result, status=200)
diff --git a/src/sentry/api/endpoints/organization_tags.py b/src/sentry/api/endpoints/organization_tags.py
index 4a99e726ce231..197210992ecd4 100644
--- a/src/sentry/api/endpoints/organization_tags.py
+++ b/src/sentry/api/endpoints/organization_tags.py
@@ -37,7 +37,7 @@ def get(self, request: Request, organization) -> Response:
else:
dataset = Dataset.Discover
- with sentry_sdk.start_span(op="tagstore", description="get_tag_keys_for_projects"):
+ with sentry_sdk.start_span(op="tagstore", name="get_tag_keys_for_projects"):
with handle_query_errors():
results = tagstore.backend.get_tag_keys_for_projects(
filter_params["project_id"],
diff --git a/src/sentry/api/endpoints/organization_teams.py b/src/sentry/api/endpoints/organization_teams.py
index 505c6bc3d76fb..5b7f1f6bea456 100644
--- a/src/sentry/api/endpoints/organization_teams.py
+++ b/src/sentry/api/endpoints/organization_teams.py
@@ -18,6 +18,7 @@
from sentry.apidocs.examples.team_examples import TeamExamples
from sentry.apidocs.parameters import CursorQueryParam, GlobalParams, TeamParams
from sentry.apidocs.utils import inline_sentry_response_serializer
+from sentry.db.models.fields.slug import DEFAULT_SLUG_MAX_LENGTH
from sentry.integrations.models.external_actor import ExternalActor
from sentry.models.organizationmember import OrganizationMember
from sentry.models.organizationmemberteam import OrganizationMemberTeam
@@ -44,7 +45,7 @@ class TeamPostSerializer(serializers.Serializer):
slug = SentrySerializerSlugField(
help_text="""Uniquely identifies a team and is used for the interface. If not
provided, it is automatically generated from the name.""",
- max_length=50,
+ max_length=DEFAULT_SLUG_MAX_LENGTH,
required=False,
allow_null=True,
)
diff --git a/src/sentry/api/endpoints/organization_traces.py b/src/sentry/api/endpoints/organization_traces.py
index abfa434f5ecbc..4018865ed536f 100644
--- a/src/sentry/api/endpoints/organization_traces.py
+++ b/src/sentry/api/endpoints/organization_traces.py
@@ -7,6 +7,7 @@
from typing import Any, Literal, NotRequired, TypedDict
import sentry_sdk
+from django.utils import timezone
from rest_framework import serializers
from rest_framework.exceptions import ParseError, ValidationError
from rest_framework.request import Request
@@ -27,7 +28,9 @@
from sentry.search.events.builder.base import BaseQueryBuilder
from sentry.search.events.builder.discover import DiscoverQueryBuilder
from sentry.search.events.builder.spans_indexed import (
+ SpansEAPQueryBuilder,
SpansIndexedQueryBuilder,
+ TimeseriesSpanEAPIndexedQueryBuilder,
TimeseriesSpanIndexedQueryBuilder,
)
from sentry.search.events.constants import TIMEOUT_SPAN_ERROR_MESSAGE
@@ -81,6 +84,9 @@ class TraceResult(TypedDict):
class OrganizationTracesSerializer(serializers.Serializer):
+ dataset = serializers.ChoiceField(
+ ["spans", "spansIndexed"], required=False, default="spansIndexed"
+ )
metricsMax = serializers.FloatField(required=False)
metricsMin = serializers.FloatField(required=False)
metricsOp = serializers.CharField(required=False)
@@ -91,6 +97,24 @@ class OrganizationTracesSerializer(serializers.Serializer):
query = serializers.ListField(
required=False, allow_empty=True, child=serializers.CharField(allow_blank=True)
)
+ sort = serializers.CharField(required=False)
+
+ def validate_dataset(self, value):
+ if value == "spans":
+ return Dataset.EventsAnalyticsPlatform
+ if value == "spansIndexed":
+ return Dataset.SpansIndexed
+ raise ParseError(detail=f"Unsupported dataset: {value}")
+
+ def validate(self, data):
+ if data["dataset"] == Dataset.EventsAnalyticsPlatform:
+ sort = data.get("sort")
+ if sort is not None:
+ sort_field = sort[1:] if sort.startswith("-") else sort
+
+ if sort_field not in {"timestamp"}:
+ raise ParseError(detail=f"Unsupported sort: {sort}")
+ return data
@contextmanager
@@ -124,14 +148,21 @@ def get(self, request: Request, organization: Organization) -> Response:
except NoProjects:
return Response(status=404)
+ buffer = options.get("performance.traces.trace-explorer-skip-recent-seconds")
+ now = timezone.now() - timedelta(seconds=buffer)
+ assert snuba_params.end is not None
+ snuba_params.end = min(snuba_params.end, now)
+
serializer = OrganizationTracesSerializer(data=request.GET)
if not serializer.is_valid():
return Response(serializer.errors, status=400)
serialized = serializer.validated_data
executor = TracesExecutor(
+ dataset=serialized["dataset"],
snuba_params=snuba_params,
user_queries=serialized.get("query", []),
+ sort=serialized.get("sort"),
metrics_max=serialized.get("metricsMax"),
metrics_min=serialized.get("metricsMin"),
metrics_operation=serialized.get("metricsOp"),
@@ -163,6 +194,9 @@ def get(self, request: Request, organization: Organization) -> Response:
class OrganizationTraceSpansSerializer(serializers.Serializer):
+ dataset = serializers.ChoiceField(
+ ["spans", "spansIndexed"], required=False, default="spansIndexed"
+ )
metricsMax = serializers.FloatField(required=False)
metricsMin = serializers.FloatField(required=False)
metricsOp = serializers.CharField(required=False)
@@ -175,6 +209,13 @@ class OrganizationTraceSpansSerializer(serializers.Serializer):
required=False, allow_empty=True, child=serializers.CharField(allow_blank=True)
)
+ def validate_dataset(self, value):
+ if value == "spans":
+ return Dataset.EventsAnalyticsPlatform
+ if value == "spansIndexed":
+ return Dataset.SpansIndexed
+ raise ParseError(detail=f"Unsupported dataset: {value}")
+
@region_silo_endpoint
class OrganizationTraceSpansEndpoint(OrganizationTracesEndpointBase):
@@ -197,6 +238,7 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Re
serialized = serializer.validated_data
executor = TraceSpansExecutor(
+ dataset=serialized["dataset"],
snuba_params=snuba_params,
trace_id=trace_id,
fields=serialized["field"],
@@ -224,11 +266,21 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Re
class OrganizationTracesStatsSerializer(serializers.Serializer):
+ dataset = serializers.ChoiceField(
+ ["spans", "spansIndexed"], required=False, default="spansIndexed"
+ )
query = serializers.ListField(
required=False, allow_empty=True, child=serializers.CharField(allow_blank=True)
)
yAxis = serializers.ListField(required=True, child=serializers.CharField())
+ def validate_dataset(self, value):
+ if value == "spans":
+ return Dataset.EventsAnalyticsPlatform
+ if value == "spansIndexed":
+ return Dataset.SpansIndexed
+ raise ParseError(detail=f"Unsupported dataset: {value}")
+
@region_silo_endpoint
class OrganizationTracesStatsEndpoint(OrganizationTracesEndpointBase):
@@ -271,6 +323,7 @@ def get_event_stats(
comparison_delta: timedelta | None,
) -> SnubaTSResult:
executor = TraceStatsExecutor(
+ dataset=serialized["dataset"],
snuba_params=snuba_params,
columns=serialized["yAxis"],
user_queries=serialized.get("query", []),
@@ -300,8 +353,10 @@ class TracesExecutor:
def __init__(
self,
*,
+ dataset: Dataset,
snuba_params: SnubaParams,
user_queries: list[str],
+ sort: str | None,
metrics_max: float | None,
metrics_min: float | None,
metrics_operation: str | None,
@@ -311,8 +366,10 @@ def __init__(
breakdown_slices: int,
get_all_projects: Callable[[], list[Project]],
):
+ self.dataset = dataset
self.snuba_params = snuba_params
- self.user_queries = process_user_queries(snuba_params, user_queries)
+ self.user_queries = process_user_queries(snuba_params, user_queries, dataset)
+ self.sort = sort
self.metrics_max = metrics_max
self.metrics_min = metrics_min
self.metrics_operation = metrics_operation
@@ -338,11 +395,11 @@ def _execute(self):
self.snuba_params,
)
- self.refine_params(min_timestamp, max_timestamp)
-
if not trace_ids:
return []
+ self.refine_params(min_timestamp, max_timestamp)
+
with handle_span_query_errors():
snuba_params = self.params_with_all_projects()
@@ -384,6 +441,9 @@ def _execute(self):
traces_breakdown_projects_results=traces_breakdown_projects_results,
)
+ ordering = {trace_id: i for i, trace_id in enumerate(trace_ids)}
+ data.sort(key=lambda trace: ordering[trace["trace"]])
+
return data
def refine_params(self, min_timestamp: datetime, max_timestamp: datetime):
@@ -564,21 +624,99 @@ def get_traces_matching_span_conditions_in_traces(
def get_traces_matching_span_conditions_query(
self,
snuba_params: SnubaParams,
- sort: str | None = None,
+ ) -> tuple[BaseQueryBuilder, str]:
+ if self.dataset == Dataset.EventsAnalyticsPlatform:
+ return self.get_traces_matching_span_conditions_query_eap(snuba_params)
+ return self.get_traces_matching_span_conditions_query_indexed(snuba_params)
+
+ def get_traces_matching_span_conditions_query_eap(
+ self,
+ snuba_params: SnubaParams,
) -> tuple[BaseQueryBuilder, str]:
if len(self.user_queries) < 2:
timestamp_column = "timestamp"
else:
timestamp_column = "min(timestamp)"
- if sort == "-timestamp":
+ if self.sort == "-timestamp":
orderby = [f"-{timestamp_column}"]
+ elif self.sort == "timestamp":
+ orderby = [timestamp_column]
else:
# The orderby is intentionally `None` here as this query is much faster
# if we let Clickhouse decide which order to return the results in.
# This also means we cannot order by any columns or paginate.
orderby = None
+ if len(self.user_queries) < 2:
+ # Optimization: If there is only a condition for a single span,
+ # we can take the fast path and query without using aggregates.
+ query = SpansEAPQueryBuilder(
+ Dataset.EventsAnalyticsPlatform,
+ params={},
+ snuba_params=snuba_params,
+ query=None,
+ selected_columns=["trace", timestamp_column],
+ orderby=orderby,
+ limit=self.limit,
+ limitby=("trace", 1),
+ config=QueryBuilderConfig(
+ transform_alias_to_input_format=True,
+ ),
+ )
+
+ for where in self.user_queries.values():
+ query.where.extend(where)
+ else:
+ query = SpansEAPQueryBuilder(
+ Dataset.EventsAnalyticsPlatform,
+ params={},
+ snuba_params=snuba_params,
+ query=None,
+ selected_columns=["trace", timestamp_column],
+ orderby=orderby,
+ limit=self.limit,
+ limitby=("trace", 1),
+ config=QueryBuilderConfig(
+ auto_aggregations=True,
+ transform_alias_to_input_format=True,
+ ),
+ )
+
+ trace_conditions = []
+ for where in self.user_queries.values():
+ if len(where) == 1:
+ trace_conditions.extend(where)
+ elif len(where) > 1:
+ trace_conditions.append(BooleanCondition(op=BooleanOp.AND, conditions=where))
+
+ # Transform the condition into it's aggregate form so it can be used to
+ # match on the trace.
+ new_condition = generate_trace_condition(where)
+ if new_condition:
+ query.having.append(new_condition)
+
+ if len(trace_conditions) == 1:
+ # This should never happen since it should use a flat query
+ # but handle it just in case.
+ query.where.extend(trace_conditions)
+ elif len(trace_conditions) > 1:
+ query.where.append(BooleanCondition(op=BooleanOp.OR, conditions=trace_conditions))
+
+ if options.get("performance.traces.trace-explorer-skip-floating-spans"):
+ query.add_conditions([Condition(Column("segment_id"), Op.NEQ, "00")])
+
+ return query, timestamp_column
+
+ def get_traces_matching_span_conditions_query_indexed(
+ self,
+ snuba_params: SnubaParams,
+ ) -> tuple[BaseQueryBuilder, str]:
+ if len(self.user_queries) < 2:
+ timestamp_column = "timestamp"
+ else:
+ timestamp_column = "min(timestamp)"
+
if len(self.user_queries) < 2:
# Optimization: If there is only a condition for a single span,
# we can take the fast path and query without using aggregates.
@@ -588,7 +726,6 @@ def get_traces_matching_span_conditions_query(
snuba_params=snuba_params,
query=None,
selected_columns=["trace", timestamp_column],
- orderby=orderby,
limit=self.limit,
limitby=("trace", 1),
config=QueryBuilderConfig(
@@ -605,7 +742,6 @@ def get_traces_matching_span_conditions_query(
snuba_params=snuba_params,
query=None,
selected_columns=["trace", timestamp_column],
- orderby=orderby,
limit=self.limit,
config=QueryBuilderConfig(
auto_aggregations=True,
@@ -759,6 +895,50 @@ def get_traces_breakdown_projects_query(
self,
snuba_params: SnubaParams,
trace_ids: list[str],
+ ) -> tuple[BaseQueryBuilder, Referrer]:
+ if self.dataset == Dataset.EventsAnalyticsPlatform:
+ return self.get_traces_breakdown_projects_query_eap(snuba_params, trace_ids)
+ return self.get_traces_breakdown_projects_query_indexed(snuba_params, trace_ids)
+
+ def get_traces_breakdown_projects_query_eap(
+ self,
+ snuba_params: SnubaParams,
+ trace_ids: list[str],
+ ) -> tuple[BaseQueryBuilder, Referrer]:
+ query = SpansEAPQueryBuilder(
+ Dataset.EventsAnalyticsPlatform,
+ params={},
+ snuba_params=snuba_params,
+ query="is_transaction:1",
+ selected_columns=[
+ "trace",
+ "project",
+ "sdk.name",
+ "span.op",
+ "parent_span",
+ "transaction",
+ "precise.start_ts",
+ "precise.finish_ts",
+ ],
+ orderby=["precise.start_ts", "-precise.finish_ts"],
+ # limit the number of segments we fetch per trace so a single
+ # large trace does not result in the rest being blank
+ limitby=("trace", int(MAX_SNUBA_RESULTS / len(trace_ids))),
+ limit=MAX_SNUBA_RESULTS,
+ config=QueryBuilderConfig(
+ transform_alias_to_input_format=True,
+ ),
+ )
+
+ # restrict the query to just this subset of trace ids
+ query.add_conditions([Condition(Column("trace_id"), Op.IN, trace_ids)])
+
+ return query, Referrer.API_TRACE_EXPLORER_TRACES_BREAKDOWNS
+
+ def get_traces_breakdown_projects_query_indexed(
+ self,
+ snuba_params: SnubaParams,
+ trace_ids: list[str],
) -> tuple[BaseQueryBuilder, Referrer]:
query = SpansIndexedQueryBuilder(
Dataset.SpansIndexed,
@@ -794,6 +974,74 @@ def get_traces_metas_query(
self,
snuba_params: SnubaParams,
trace_ids: list[str],
+ ) -> tuple[BaseQueryBuilder, Referrer]:
+ if self.dataset == Dataset.EventsAnalyticsPlatform:
+ return self.get_traces_metas_query_eap(snuba_params, trace_ids)
+ return self.get_traces_metas_query_indexed(snuba_params, trace_ids)
+
+ def get_traces_metas_query_eap(
+ self,
+ snuba_params: SnubaParams,
+ trace_ids: list[str],
+ ) -> tuple[BaseQueryBuilder, Referrer]:
+ query = SpansEAPQueryBuilder(
+ Dataset.EventsAnalyticsPlatform,
+ params={},
+ snuba_params=snuba_params,
+ query=None,
+ selected_columns=[
+ "trace",
+ "count()",
+ "first_seen()",
+ "last_seen()",
+ ],
+ limit=len(trace_ids),
+ config=QueryBuilderConfig(
+ functions_acl=["first_seen", "last_seen"],
+ transform_alias_to_input_format=True,
+ ),
+ )
+
+ # restrict the query to just this subset of trace ids
+ query.add_conditions([Condition(Column("trace_id"), Op.IN, trace_ids)])
+
+ """
+ We want to get a count of the number of matching spans. To do this, we have to
+ translate the user queries into conditions, and get a count of spans that match
+ any one of the user queries.
+ """
+
+ # Translate each user query into a condition to match one
+ trace_conditions = []
+ for where in self.user_queries.values():
+ trace_condition = format_as_trace_conditions(where)
+ if not trace_condition:
+ continue
+ elif len(trace_condition) == 1:
+ trace_conditions.append(trace_condition[0])
+ else:
+ trace_conditions.append(Function("and", trace_condition))
+
+ # Join all the user queries together into a single one where at least 1 have
+ # to be true.
+ if not trace_conditions:
+ query.columns.append(Function("count", [], MATCHING_COUNT_ALIAS))
+ elif len(trace_conditions) == 1:
+ query.columns.append(Function("countIf", trace_conditions, MATCHING_COUNT_ALIAS))
+ else:
+ query.columns.append(
+ Function("countIf", [Function("or", trace_conditions)], MATCHING_COUNT_ALIAS)
+ )
+
+ if options.get("performance.traces.trace-explorer-skip-floating-spans"):
+ query.add_conditions([Condition(Column("segment_id"), Op.NEQ, "00")])
+
+ return query, Referrer.API_TRACE_EXPLORER_TRACES_META
+
+ def get_traces_metas_query_indexed(
+ self,
+ snuba_params: SnubaParams,
+ trace_ids: list[str],
) -> tuple[BaseQueryBuilder, Referrer]:
query = SpansIndexedQueryBuilder(
Dataset.SpansIndexed,
@@ -898,6 +1146,7 @@ class TraceSpansExecutor:
def __init__(
self,
*,
+ dataset: Dataset,
snuba_params: SnubaParams,
trace_id: str,
fields: list[str],
@@ -909,10 +1158,11 @@ def __init__(
metrics_query: str | None,
mri: str | None,
):
+ self.dataset = dataset
self.snuba_params = snuba_params
self.trace_id = trace_id
self.fields = fields
- self.user_queries = process_user_queries(snuba_params, user_queries)
+ self.user_queries = process_user_queries(snuba_params, user_queries, dataset)
self.metrics_max = metrics_max
self.metrics_min = metrics_min
self.metrics_operation = metrics_operation
@@ -990,6 +1240,76 @@ def get_user_spans_query(
span_keys: list[SpanKey] | None,
limit: int,
offset: int,
+ ) -> BaseQueryBuilder:
+ if self.dataset == Dataset.EventsAnalyticsPlatform:
+ # span_keys is not supported in EAP mode because that's a legacy
+ # code path to support metrics that no longer exists
+ return self.get_user_spans_query_eap(snuba_params, limit, offset)
+ return self.get_user_spans_query_indexed(snuba_params, span_keys, limit, offset)
+
+ def get_user_spans_query_eap(
+ self,
+ snuba_params: SnubaParams,
+ limit: int,
+ offset: int,
+ ) -> BaseQueryBuilder:
+ user_spans_query = SpansEAPQueryBuilder(
+ Dataset.EventsAnalyticsPlatform,
+ params={},
+ snuba_params=snuba_params,
+ query=None, # Note: conditions are added below
+ selected_columns=self.fields,
+ orderby=self.sort,
+ limit=limit,
+ offset=offset,
+ config=QueryBuilderConfig(
+ transform_alias_to_input_format=True,
+ ),
+ )
+
+ user_conditions = []
+
+ for where in self.user_queries.values():
+ user_conditions.append(where)
+
+ # First make sure that we only return spans from the trace specified
+ user_spans_query.add_conditions([Condition(Column("trace_id"), Op.EQ, self.trace_id)])
+
+ conditions = []
+
+ # Next we have to turn the user queries into the appropriate conditions in
+ # the SnQL that we produce.
+
+ # There are multiple sets of user conditions that needs to be satisfied
+ # and if a span satisfy any of them, it should be considered.
+ #
+ # To handle this use case, we want to OR all the user specified
+ # conditions together in this query.
+ for where in user_conditions:
+ if len(where) > 1:
+ conditions.append(BooleanCondition(op=BooleanOp.AND, conditions=where))
+ elif len(where) == 1:
+ conditions.append(where[0])
+
+ if len(conditions) > 1:
+ # More than 1 set of conditions were specified, we want to show
+ # spans that match any 1 of them so join the conditions with `OR`s.
+ user_spans_query.add_conditions(
+ [BooleanCondition(op=BooleanOp.OR, conditions=conditions)]
+ )
+ elif len(conditions) == 1:
+ # Only 1 set of user conditions were specified, simply insert them into
+ # the final query.
+ user_spans_query.add_conditions([conditions[0]])
+
+ return user_spans_query
+
+ def get_user_spans_query_indexed(
+ self,
+ snuba_params: SnubaParams,
+ span_keys: list[SpanKey] | None,
+ limit: int,
+ offset: int,
) -> BaseQueryBuilder:
user_spans_query = SpansIndexedQueryBuilder(
Dataset.SpansIndexed,
@@ -1098,15 +1418,17 @@ class TraceStatsExecutor:
def __init__(
self,
*,
+ dataset: Dataset,
snuba_params: SnubaParams,
columns: list[str],
user_queries: list[str],
rollup: int,
zerofill_results: bool,
):
+ self.dataset = dataset
self.snuba_params = snuba_params
self.columns = columns
- self.user_queries = process_user_queries(snuba_params, user_queries)
+ self.user_queries = process_user_queries(snuba_params, user_queries, dataset)
self.rollup = rollup
self.zerofill_results = zerofill_results
@@ -1137,6 +1459,39 @@ def execute(self) -> SnubaTSResult:
)
def get_timeseries_query(self) -> BaseQueryBuilder:
+ if self.dataset == Dataset.EventsAnalyticsPlatform:
+ return self.get_timeseries_query_eap()
+ return self.get_timeseries_query_indexed()
+
+ def get_timeseries_query_eap(self) -> BaseQueryBuilder:
+ query = TimeseriesSpanEAPIndexedQueryBuilder(
+ Dataset.EventsAnalyticsPlatform,
+ params={},
+ snuba_params=self.snuba_params,
+ interval=self.rollup,
+ query=None,
+ selected_columns=self.columns,
+ )
+
+ trace_conditions = []
+
+ for where in self.user_queries.values():
+ if len(where) == 1:
+ trace_conditions.extend(where)
+ elif len(where) > 1:
+ trace_conditions.append(BooleanCondition(op=BooleanOp.AND, conditions=where))
+
+ if len(trace_conditions) == 1:
+ query.where.extend(trace_conditions)
+ elif len(trace_conditions) > 1:
+ query.where.append(BooleanCondition(op=BooleanOp.OR, conditions=trace_conditions))
+
+ if options.get("performance.traces.trace-explorer-skip-floating-spans"):
+ query.add_conditions([Condition(Column("segment_id"), Op.NEQ, "00")])
+
+ return query
+
+ def get_timeseries_query_indexed(self) -> BaseQueryBuilder:
query = TimeseriesSpanIndexedQueryBuilder(
Dataset.SpansIndexed,
params={},
@@ -1468,18 +1823,33 @@ def stack_clear(trace, until=None):
def process_user_queries(
snuba_params: SnubaParams,
user_queries: list[str],
+ dataset: Dataset = Dataset.SpansIndexed,
) -> dict[str, list[list[WhereType]]]:
with handle_span_query_errors():
- builder = SpansIndexedQueryBuilder(
- Dataset.SpansIndexed,
- params={},
- snuba_params=snuba_params,
- query=None, # Note: conditions are added below
- selected_columns=[],
- config=QueryBuilderConfig(
- transform_alias_to_input_format=True,
- ),
- )
+ if dataset == Dataset.EventsAnalyticsPlatform:
+ span_indexed_builder = SpansEAPQueryBuilder(
+ dataset,
+ params={},
+ snuba_params=snuba_params,
+ query=None, # Note: conditions are added below
+ selected_columns=[],
+ config=QueryBuilderConfig(
+ transform_alias_to_input_format=True,
+ ),
+ )
+ resolve_conditions = span_indexed_builder.resolve_conditions
+ else:
+ span_eap_builder = SpansIndexedQueryBuilder(
+ dataset,
+ params={},
+ snuba_params=snuba_params,
+ query=None, # Note: conditions are added below
+ selected_columns=[],
+ config=QueryBuilderConfig(
+ transform_alias_to_input_format=True,
+ ),
+ )
+ resolve_conditions = span_eap_builder.resolve_conditions
queries: dict[str, list[list[WhereType]]] = {}
@@ -1492,7 +1862,7 @@ def process_user_queries(
# We want to ignore all the aggregate conditions here because we're strictly
# searching on span attributes, not aggregates
- where, _ = builder.resolve_conditions(user_query)
+ where, _ = resolve_conditions(user_query)
queries[user_query] = where
set_measurement("user_queries_count", len(queries))
diff --git a/src/sentry/api/endpoints/organization_user_reports.py b/src/sentry/api/endpoints/organization_user_reports.py
index a083d32db550c..705f072e3b885 100644
--- a/src/sentry/api/endpoints/organization_user_reports.py
+++ b/src/sentry/api/endpoints/organization_user_reports.py
@@ -1,8 +1,10 @@
+from datetime import UTC, datetime, timedelta
from typing import NotRequired, TypedDict
from rest_framework.request import Request
from rest_framework.response import Response
+from sentry import quotas
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
@@ -13,6 +15,7 @@
from sentry.api.serializers import serialize
from sentry.api.serializers.models import UserReportWithGroupSerializer
from sentry.models.userreport import UserReport
+from sentry.utils.dates import epoch
class _PaginateKwargs(TypedDict):
@@ -56,6 +59,10 @@ def get(self, request: Request, organization) -> Response:
queryset = queryset.filter(
date_added__range=(filter_params["start"], filter_params["end"])
)
+ else:
+ retention = quotas.backend.get_event_retention(organization=organization)
+ start = datetime.now(UTC) - timedelta(days=retention) if retention else epoch
+ queryset = queryset.filter(date_added__gte=start)
status = request.GET.get("status", "unresolved")
paginate_kwargs: _PaginateKwargs = {}
diff --git a/src/sentry/api/endpoints/project_autofix_codebase_index_status.py b/src/sentry/api/endpoints/project_autofix_codebase_index_status.py
index 0072bffe0865f..7ed2bb1fde436 100644
--- a/src/sentry/api/endpoints/project_autofix_codebase_index_status.py
+++ b/src/sentry/api/endpoints/project_autofix_codebase_index_status.py
@@ -22,7 +22,6 @@ class ProjectAutofixCodebaseIndexStatusEndpoint(ProjectEndpoint):
"GET": ApiPublishStatus.EXPERIMENTAL,
}
owner = ApiOwner.ML_AI
- private = True
def get(self, request: Request, project: Project) -> Response:
"""
diff --git a/src/sentry/api/endpoints/project_autofix_create_codebase_index.py b/src/sentry/api/endpoints/project_autofix_create_codebase_index.py
index 7db0402b44cc7..c79dd86ef3e94 100644
--- a/src/sentry/api/endpoints/project_autofix_create_codebase_index.py
+++ b/src/sentry/api/endpoints/project_autofix_create_codebase_index.py
@@ -33,7 +33,6 @@ class ProjectAutofixCreateCodebaseIndexEndpoint(ProjectEndpoint):
"POST": ApiPublishStatus.EXPERIMENTAL,
}
owner = ApiOwner.ML_AI
- private = True
permission_classes = (ProjectAutofixCreateCodebaseIndexPermission,)
diff --git a/src/sentry/api/endpoints/project_backfill_similar_issues_embeddings_records.py b/src/sentry/api/endpoints/project_backfill_similar_issues_embeddings_records.py
index f3ba17d9bde69..a97aec8997611 100644
--- a/src/sentry/api/endpoints/project_backfill_similar_issues_embeddings_records.py
+++ b/src/sentry/api/endpoints/project_backfill_similar_issues_embeddings_records.py
@@ -8,6 +8,7 @@
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.project import ProjectEndpoint
from sentry.auth.superuser import is_active_superuser
+from sentry.models.project import Project
from sentry.tasks.embeddings_grouping.backfill_seer_grouping_records_for_project import (
backfill_seer_grouping_records_for_project,
)
@@ -20,7 +21,7 @@ class ProjectBackfillSimilarIssuesEmbeddingsRecords(ProjectEndpoint):
"POST": ApiPublishStatus.PRIVATE,
}
- def post(self, request: Request, project) -> Response:
+ def post(self, request: Request, project: Project) -> Response:
if not features.has("projects:similarity-embeddings-backfill", project):
return Response(status=404)
diff --git a/src/sentry/api/endpoints/project_details.py b/src/sentry/api/endpoints/project_details.py
index ba3c4fca0fee5..7f85df1a955d4 100644
--- a/src/sentry/api/endpoints/project_details.py
+++ b/src/sentry/api/endpoints/project_details.py
@@ -30,6 +30,7 @@
from sentry.apidocs.parameters import GlobalParams
from sentry.constants import RESERVED_PROJECT_SLUGS, ObjectStatus
from sentry.datascrubbing import validate_pii_config_update, validate_pii_selectors
+from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.dynamic_sampling import get_supported_biases_ids, get_user_biases
from sentry.grouping.enhancer import Enhancements
from sentry.grouping.enhancer.exceptions import InvalidEnhancerConfig
@@ -44,10 +45,9 @@
)
from sentry.lang.native.utils import STORE_CRASH_REPORTS_MAX, convert_crashreport_count
from sentry.models.group import Group, GroupStatus
-from sentry.models.project import Project
+from sentry.models.project import PROJECT_SLUG_MAX_LENGTH, Project
from sentry.models.projectbookmark import ProjectBookmark
from sentry.models.projectredirect import ProjectRedirect
-from sentry.models.scheduledeletion import RegionScheduledDeletion
from sentry.notifications.utils import has_alert_integration
from sentry.tasks.delete_seer_grouping_records import call_seer_delete_project_grouping_records
@@ -122,8 +122,6 @@ class ProjectMemberSerializer(serializers.Serializer):
"performanceIssueCreationRate",
"performanceIssueCreationThroughPlatform",
"performanceIssueSendToPlatform",
- "highlightContext",
- "highlightTags",
"uptimeAutodetection",
]
)
@@ -135,7 +133,7 @@ class ProjectAdminSerializer(ProjectMemberSerializer):
)
slug = SentrySerializerSlugField(
help_text="Uniquely identifies a project and is used for the interface.",
- max_length=50,
+ max_length=PROJECT_SLUG_MAX_LENGTH,
required=False,
)
platform = serializers.CharField(
@@ -168,14 +166,16 @@ class ProjectAdminSerializer(ProjectMemberSerializer):
)
highlightContext = HighlightContextField(
required=False,
- help_text="A JSON mapping of context types to lists of strings for their keys. E.g. {'user': ['id', 'email']}",
+ help_text="""A JSON mapping of context types to lists of strings for their keys.
+E.g. `{'user': ['id', 'email']}`""",
)
highlightTags = ListField(
child=serializers.CharField(),
required=False,
- help_text="A list of strings with tag keys to highlight on this project's issues. E.g. ['release', 'environment']",
+ help_text="""A list of strings with tag keys to highlight on this project's issues.
+E.g. `['release', 'environment']`""",
)
- # TODO: Add help_text to all the fields for public documentation
+ # TODO: Add help_text to all the fields for public documentation, then remove them from 'exclude_fields'
team = serializers.RegexField(r"^[a-z0-9_\-]+$", max_length=50)
digestsMinDelay = serializers.IntegerField(min_value=60, max_value=3600)
digestsMaxDelay = serializers.IntegerField(min_value=60, max_value=3600)
diff --git a/src/sentry/api/endpoints/project_docs_platform.py b/src/sentry/api/endpoints/project_docs_platform.py
deleted file mode 100644
index 4de3d9d06c7a1..0000000000000
--- a/src/sentry/api/endpoints/project_docs_platform.py
+++ /dev/null
@@ -1,67 +0,0 @@
-from django.urls import reverse
-from rest_framework.request import Request
-from rest_framework.response import Response
-
-from sentry.api.api_owners import ApiOwner
-from sentry.api.api_publish_status import ApiPublishStatus
-from sentry.api.base import region_silo_endpoint
-from sentry.api.bases.project import ProjectEndpoint
-from sentry.api.exceptions import ResourceDoesNotExist
-from sentry.models.projectkey import ProjectKey
-from sentry.utils.http import absolute_uri
-from sentry.utils.integrationdocs import load_doc
-
-
-def replace_keys(html, project_key):
- if project_key is None:
- return html
- html = html.replace("___DSN___", project_key.dsn_private)
- html = html.replace("___PUBLIC_DSN___", project_key.dsn_public)
- html = html.replace("___PUBLIC_KEY___", project_key.public_key)
- html = html.replace("___SECRET_KEY___", project_key.secret_key)
- html = html.replace("___PROJECT_ID___", str(project_key.project_id))
- html = html.replace("___MINIDUMP_URL___", project_key.minidump_endpoint)
- html = html.replace("___UNREAL_URL___", project_key.unreal_endpoint)
- html = html.replace(
- "___RELAY_CDN_URL___",
- absolute_uri(reverse("sentry-js-sdk-loader", args=[project_key.public_key])),
- )
-
- # If we actually render this in the main UI we can also provide
- # extra information about the project (org slug and project slug)
- if "___PROJECT_NAME___" in html or "___ORG_NAME___" in html:
- project = project_key.project
- org = project.organization
- html = html.replace("___ORG_NAME___", str(org.slug))
- html = html.replace("___PROJECT_NAME___", str(project.slug))
-
- return html
-
-
-@region_silo_endpoint
-class ProjectDocsPlatformEndpoint(ProjectEndpoint):
- publish_status = {
- "GET": ApiPublishStatus.PRIVATE,
- }
- owner = ApiOwner.TELEMETRY_EXPERIENCE
-
- def get(self, request: Request, project, platform) -> Response:
- data = load_doc(platform)
- if not data:
- raise ResourceDoesNotExist
- keys = ("id", "name", "html", "link")
- for key in keys:
- if key not in data:
- raise ResourceDoesNotExist
-
- project_key = ProjectKey.get_default(project)
-
- return Response(
- {
- "id": data["id"],
- "name": data["name"],
- "html": replace_keys(data["html"], project_key),
- "link": data["link"],
- "wizardSetup": data.get("wizard_setup", None),
- }
- )
diff --git a/src/sentry/api/endpoints/project_environments.py b/src/sentry/api/endpoints/project_environments.py
index 80536ec9de6f9..687ebe9a8b73f 100644
--- a/src/sentry/api/endpoints/project_environments.py
+++ b/src/sentry/api/endpoints/project_environments.py
@@ -1,3 +1,4 @@
+from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework.request import Request
from rest_framework.response import Response
@@ -6,33 +7,42 @@
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.helpers.environments import environment_visibility_filter_options
from sentry.api.serializers import serialize
+from sentry.api.serializers.models.environment import EnvironmentProjectSerializerResponse
+from sentry.apidocs.constants import RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND, RESPONSE_UNAUTHORIZED
+from sentry.apidocs.examples.environment_examples import EnvironmentExamples
+from sentry.apidocs.parameters import EnvironmentParams, GlobalParams
+from sentry.apidocs.utils import inline_sentry_response_serializer
from sentry.models.environment import EnvironmentProject
+@extend_schema(tags=["Environments"])
@region_silo_endpoint
class ProjectEnvironmentsEndpoint(ProjectEndpoint):
publish_status = {
- "GET": ApiPublishStatus.UNKNOWN,
+ "GET": ApiPublishStatus.PUBLIC,
}
+ @extend_schema(
+ operation_id="List a Project's Environments",
+ parameters=[
+ GlobalParams.ORG_ID_OR_SLUG,
+ GlobalParams.PROJECT_ID_OR_SLUG,
+ EnvironmentParams.VISIBILITY,
+ ],
+ responses={
+ 200: inline_sentry_response_serializer(
+ "ListProjectEnvironments", list[EnvironmentProjectSerializerResponse]
+ ),
+ 400: OpenApiResponse(description="Invalid value for 'visibility'."),
+ 401: RESPONSE_UNAUTHORIZED,
+ 403: RESPONSE_FORBIDDEN,
+ 404: RESPONSE_NOT_FOUND,
+ },
+ examples=EnvironmentExamples.GET_PROJECT_ENVIRONMENTS,
+ )
def get(self, request: Request, project) -> Response:
"""
- List a Project's Environments
- ```````````````````````````````
-
- Return environments for a given project.
-
- :qparam string visibility: when omitted only visible environments are
- returned. Set to ``"hidden"`` for only hidden
- environments, or ``"all"`` for both hidden
- and visible environments.
-
- :pparam string organization_id_or_slug: the id or slug of the organization the project
- belongs to.
-
- :pparam string project_id_or_slug: the id or slug of the project.
-
- :auth: required
+ Lists a project's environments.
"""
queryset = (
diff --git a/src/sentry/api/endpoints/project_ownership.py b/src/sentry/api/endpoints/project_ownership.py
index ea63426734b17..7e7a05c421340 100644
--- a/src/sentry/api/endpoints/project_ownership.py
+++ b/src/sentry/api/endpoints/project_ownership.py
@@ -21,8 +21,9 @@
from sentry.signals import ownership_rule_created
from sentry.utils.audit import create_audit_entry
-MAX_RAW_LENGTH = 100_000
-HIGHER_MAX_RAW_LENGTH = 250_000
+DEFAULT_MAX_RAW_LENGTH = 100_000
+LARGE_MAX_RAW_LENGTH = 250_000
+XLARGE_MAX_RAW_LENGTH = 750_000
class ProjectOwnershipRequestSerializer(serializers.Serializer):
@@ -62,11 +63,12 @@ def _validate_no_codeowners(rules):
)
def get_max_length(self):
- if features.has(
- "organizations:higher-ownership-limit", self.context["ownership"].project.organization
- ):
- return HIGHER_MAX_RAW_LENGTH
- return MAX_RAW_LENGTH
+ organization = self.context["ownership"].project.organization
+ if features.has("organizations:ownership-size-limit-xlarge", organization):
+ return XLARGE_MAX_RAW_LENGTH
+ if features.has("organizations:ownership-size-limit-large", organization):
+ return LARGE_MAX_RAW_LENGTH
+ return DEFAULT_MAX_RAW_LENGTH
def validate_autoAssignment(self, value):
if value not in [
diff --git a/src/sentry/api/endpoints/project_rule_actions.py b/src/sentry/api/endpoints/project_rule_actions.py
index 36bad42c07976..73ba633945a4f 100644
--- a/src/sentry/api/endpoints/project_rule_actions.py
+++ b/src/sentry/api/endpoints/project_rule_actions.py
@@ -1,5 +1,6 @@
import logging
+import sentry_sdk
from rest_framework.exceptions import ValidationError
from rest_framework.request import Request
from rest_framework.response import Response
@@ -13,7 +14,7 @@
from sentry.eventstore.models import GroupEvent
from sentry.models.rule import Rule
from sentry.rules.processing.processor import activate_downstream_actions
-from sentry.shared_integrations.exceptions import IntegrationError
+from sentry.shared_integrations.exceptions import IntegrationFormError
from sentry.utils.safe import safe_execute
from sentry.utils.samples import create_sample_event
@@ -97,7 +98,7 @@ def execute_future_on_test_event(
# safe_execute logs these as exceptions, which can result in
# noisy sentry issues, so log with a warning instead.
- if isinstance(exc, IntegrationError):
+ if isinstance(exc, IntegrationFormError):
logger.warning(
"%s.test_alert.integration_error", callback_name, extra={"exc": exc}
)
@@ -110,7 +111,12 @@ def execute_future_on_test_event(
logger.warning(
"%s.test_alert.unexpected_exception", callback_name, exc_info=True
)
- break
+ error_id = sentry_sdk.capture_exception(exc)
+ action_exceptions.append(
+ f"An unexpected error occurred. Error ID: '{error_id}'"
+ )
+
+ break
status = None
data = None
diff --git a/src/sentry/api/endpoints/project_rule_details.py b/src/sentry/api/endpoints/project_rule_details.py
index 9d655541784a8..009248ae51b0e 100644
--- a/src/sentry/api/endpoints/project_rule_details.py
+++ b/src/sentry/api/endpoints/project_rule_details.py
@@ -26,13 +26,13 @@
from sentry.apidocs.examples.issue_alert_examples import IssueAlertExamples
from sentry.apidocs.parameters import GlobalParams, IssueAlertParams
from sentry.constants import ObjectStatus
+from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.integrations.jira.actions.create_ticket import JiraCreateTicketAction
from sentry.integrations.jira_server.actions.create_ticket import JiraServerCreateTicketAction
from sentry.integrations.slack.tasks.find_channel_id_for_rule import find_channel_id_for_rule
from sentry.integrations.slack.utils.rule_status import RedisRuleStatus
from sentry.mediators.project_rules.updater import Updater
from sentry.models.rule import NeglectedRule, RuleActivity, RuleActivityType
-from sentry.models.scheduledeletion import RegionScheduledDeletion
from sentry.rules.actions import trigger_sentry_app_action_creators_for_issues
from sentry.rules.actions.utils import get_changed_data, get_updated_rule_data
from sentry.signals import alert_rule_edited
diff --git a/src/sentry/api/endpoints/project_rules.py b/src/sentry/api/endpoints/project_rules.py
index fefae983ae090..4658c4ace2488 100644
--- a/src/sentry/api/endpoints/project_rules.py
+++ b/src/sentry/api/endpoints/project_rules.py
@@ -505,8 +505,8 @@ class ProjectRulesPostSerializer(serializers.Serializer):
- `workspace` - The integration ID associated with the Slack workspace.
- `channel` - The name of the channel to send the notification to (e.g., #critical, Jane Schmidt).
- `channel_id` (optional) - The ID of the channel to send the notification to.
-- `tags` - A string of tags to show in the notification, separated by commas (e.g., "environment, user, my_tag").
-- `notes` - Text to show alongside the notification. To @ a user, include their user id like `@`. To include a clickable link, format the link and title like ``.
+- `tags` (optional) - A string of tags to show in the notification, separated by commas (e.g., "environment, user, my_tag").
+- `notes` (optional) - Text to show alongside the notification. To @ a user, include their user id like `@`. To include a clickable link, format the link and title like ``.
```json
{
"id": "sentry.integrations.slack.notify_action.SlackNotifyServiceAction",
@@ -531,7 +531,7 @@ class ProjectRulesPostSerializer(serializers.Serializer):
**Send a Discord notification**
- `server` - The integration ID associated with the Discord server.
- `channel_id` - The ID of the channel to send the notification to.
-- `tags` - A string of tags to show in the notification, separated by commas (e.g., "environment, user, my_tag").
+- `tags` (optional) - A string of tags to show in the notification, separated by commas (e.g., "environment, user, my_tag").
```json
{
"id": "sentry.integrations.discord.notify_action.DiscordNotifyServiceAction",
diff --git a/src/sentry/api/endpoints/project_rules_configuration.py b/src/sentry/api/endpoints/project_rules_configuration.py
index ce4e6c568bfb1..dec553ff70265 100644
--- a/src/sentry/api/endpoints/project_rules_configuration.py
+++ b/src/sentry/api/endpoints/project_rules_configuration.py
@@ -31,9 +31,6 @@ def get(self, request: Request, project) -> Response:
can_create_tickets = features.has(
"organizations:integrations-ticket-rules", project.organization
)
- has_latest_adopted_release = features.has(
- "organizations:latest-adopted-release-filter", project.organization
- )
# TODO: conditions need to be based on actions
for rule_type, rule_cls in rules:
@@ -76,12 +73,6 @@ def get(self, request: Request, project) -> Response:
if rule_type.startswith("condition/"):
condition_list.append(context)
elif rule_type.startswith("filter/"):
- if (
- context["id"]
- == "sentry.rules.filters.latest_adopted_release_filter.LatestAdoptedReleaseFilter"
- and not has_latest_adopted_release
- ):
- continue
filter_list.append(context)
elif rule_type.startswith("action/"):
action_list.append(context)
diff --git a/src/sentry/api/endpoints/project_servicehook_details.py b/src/sentry/api/endpoints/project_servicehook_details.py
index d44bc117494d1..8c1b1b60f5469 100644
--- a/src/sentry/api/endpoints/project_servicehook_details.py
+++ b/src/sentry/api/endpoints/project_servicehook_details.py
@@ -10,8 +10,9 @@
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.serializers import serialize
-from sentry.api.validators import ServiceHookValidator
from sentry.constants import ObjectStatus
+from sentry.sentry_apps.api.parsers.servicehook import ServiceHookValidator
+from sentry.sentry_apps.api.serializers.servicehook import ServiceHookSerializer
from sentry.sentry_apps.models.servicehook import ServiceHook
@@ -42,7 +43,7 @@ def get(self, request: Request, project, hook_id) -> Response:
hook = ServiceHook.objects.get(project_id=project.id, guid=hook_id)
except ServiceHook.DoesNotExist:
raise ResourceDoesNotExist
- return self.respond(serialize(hook, request.user))
+ return self.respond(serialize(hook, request.user, ServiceHookSerializer()))
def put(self, request: Request, project, hook_id) -> Response:
"""
@@ -95,7 +96,7 @@ def put(self, request: Request, project, hook_id) -> Response:
data=hook.get_audit_log_data(),
)
- return self.respond(serialize(hook, request.user))
+ return self.respond(serialize(hook, request.user, ServiceHookSerializer()))
def delete(self, request: Request, project, hook_id) -> Response:
"""
diff --git a/src/sentry/api/endpoints/project_servicehooks.py b/src/sentry/api/endpoints/project_servicehooks.py
index 1c38a74a2b951..cc1af074094d4 100644
--- a/src/sentry/api/endpoints/project_servicehooks.py
+++ b/src/sentry/api/endpoints/project_servicehooks.py
@@ -10,8 +10,9 @@
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import serialize
-from sentry.api.validators import ServiceHookValidator
from sentry.constants import ObjectStatus
+from sentry.sentry_apps.api.parsers.servicehook import ServiceHookValidator
+from sentry.sentry_apps.api.serializers.servicehook import ServiceHookSerializer
from sentry.sentry_apps.models.servicehook import ServiceHook
from sentry.sentry_apps.services.hook import hook_service
@@ -65,7 +66,7 @@ def get(self, request: Request, project) -> Response:
request=request,
queryset=queryset,
order_by="-id",
- on_results=lambda x: serialize(x, request.user),
+ on_results=lambda x: serialize(x, request.user, ServiceHookSerializer()),
)
def post(self, request: Request, project) -> Response:
@@ -130,5 +131,6 @@ def post(self, request: Request, project) -> Response:
)
return self.respond(
- serialize(ServiceHook.objects.get(id=hook.id), request.user), status=201
+ serialize(ServiceHook.objects.get(id=hook.id), request.user, ServiceHookSerializer()),
+ status=201,
)
diff --git a/src/sentry/api/endpoints/project_user_reports.py b/src/sentry/api/endpoints/project_user_reports.py
index 3660faf94254d..3b9b5a988c803 100644
--- a/src/sentry/api/endpoints/project_user_reports.py
+++ b/src/sentry/api/endpoints/project_user_reports.py
@@ -1,9 +1,11 @@
+from datetime import UTC, datetime, timedelta
from typing import NotRequired, TypedDict
from rest_framework import serializers
from rest_framework.request import Request
from rest_framework.response import Response
+from sentry import quotas
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.authentication import DSNAuthentication
@@ -17,6 +19,7 @@
from sentry.models.environment import Environment
from sentry.models.projectkey import ProjectKey
from sentry.models.userreport import UserReport
+from sentry.utils.dates import epoch
class UserReportSerializer(serializers.ModelSerializer):
@@ -61,7 +64,11 @@ def get(self, request: Request, project) -> Response:
except Environment.DoesNotExist:
queryset = UserReport.objects.none()
else:
- queryset = UserReport.objects.filter(project_id=project.id, group_id__isnull=False)
+ retention = quotas.backend.get_event_retention(organization=project.organization)
+ start = datetime.now(UTC) - timedelta(days=retention) if retention else epoch
+ queryset = UserReport.objects.filter(
+ project_id=project.id, group_id__isnull=False, date_added__gte=start
+ )
if environment is not None:
queryset = queryset.filter(environment_id=environment.id)
@@ -113,7 +120,7 @@ def post(self, request: Request, project) -> Response:
:param string comments: comments supplied by user
"""
if hasattr(request.auth, "project_id") and project.id != request.auth.project_id:
- return self.respond(status=400)
+ return self.respond(status=401)
serializer = UserReportSerializer(data=request.data)
if not serializer.is_valid():
diff --git a/src/sentry/api/endpoints/secret_scanning/github.py b/src/sentry/api/endpoints/secret_scanning/github.py
new file mode 100644
index 0000000000000..c96362be3b714
--- /dev/null
+++ b/src/sentry/api/endpoints/secret_scanning/github.py
@@ -0,0 +1,176 @@
+import hashlib
+import logging
+
+import sentry_sdk
+from django.http import HttpResponse
+from django.utils import timezone
+from django.utils.decorators import method_decorator
+from django.views.decorators.csrf import csrf_exempt
+from django.views.generic.base import View
+
+from sentry import options
+from sentry.hybridcloud.models import ApiTokenReplica, OrgAuthTokenReplica
+from sentry.models.apitoken import ApiToken
+from sentry.models.orgauthtoken import OrgAuthToken
+from sentry.organizations.absolute_url import generate_organization_url
+from sentry.organizations.services.organization import organization_service
+from sentry.types.token import AuthTokenType
+from sentry.users.models.user import User
+from sentry.utils import json, metrics
+from sentry.utils.email import MessageBuilder
+from sentry.utils.github import verify_signature
+from sentry.utils.http import absolute_uri
+from sentry.web.frontend.base import control_silo_view
+
+logger = logging.getLogger(__name__)
+
+TOKEN_TYPE_HUMAN_READABLE = {
+ AuthTokenType.USER: "User Auth Token",
+ AuthTokenType.ORG: "Organization Auth Token",
+}
+
+REVOKE_URLS = {
+ AuthTokenType.USER: "/settings/account/api/auth-tokens/",
+ AuthTokenType.ORG: "/settings/auth-tokens/",
+}
+
+
+@control_silo_view
+class SecretScanningGitHubEndpoint(View):
+ @method_decorator(csrf_exempt)
+ def dispatch(self, request, *args, **kwargs):
+ if request.method != "POST":
+ return HttpResponse(status=405)
+
+ response = super().dispatch(request, *args, **kwargs)
+ metrics.incr(
+ "secret-scanning.github.webhooks",
+ 1,
+ tags={"status": response.status_code},
+ skip_internal=False,
+ )
+ return response
+
+ def post(self, request):
+ if request.headers.get("Content-Type") != "application/json":
+ return HttpResponse(
+ json.dumps({"details": "invalid content type specified"}), status=400
+ )
+
+ payload = request.body.decode("utf-8")
+ signature = request.headers.get("Github-Public-Key-Signature")
+ key_id = request.headers.get("Github-Public-Key-Identifier")
+
+ try:
+ if options.get("secret-scanning.github.enable-signature-verification"):
+ verify_signature(
+ payload,
+ signature,
+ key_id,
+ "secret_scanning",
+ )
+ except ValueError as e:
+ sentry_sdk.capture_exception(e)
+ return HttpResponse(json.dumps({"details": "invalid signature"}), status=400)
+
+ secret_alerts = json.loads(payload)
+ response = []
+ for secret_alert in secret_alerts:
+ alerted_token_str = secret_alert["token"]
+ hashed_alerted_token = hashlib.sha256(alerted_token_str.encode()).hexdigest()
+
+ # no prefix tokens could indicate old user auth tokens with no prefixes
+ token_type = AuthTokenType.USER
+ if alerted_token_str.startswith(AuthTokenType.ORG):
+ token_type = AuthTokenType.ORG
+ elif alerted_token_str.startswith((AuthTokenType.USER_APP, AuthTokenType.INTEGRATION)):
+ # TODO: add support for other token types
+ return HttpResponse(
+ json.dumps({"details": "auth token type is not implemented"}), status=501
+ )
+
+ try:
+ token: ApiToken | OrgAuthToken
+
+ if token_type == AuthTokenType.USER:
+ token = ApiToken.objects.get(hashed_token=hashed_alerted_token)
+
+ if token_type == AuthTokenType.ORG:
+ token = OrgAuthToken.objects.get(
+ token_hashed=hashed_alerted_token, date_deactivated=None
+ )
+
+ extra = {
+ "exposed_source": secret_alert["source"],
+ "exposed_url": secret_alert["url"],
+ "hashed_token": hashed_alerted_token,
+ "token_type": token_type,
+ }
+ logger.info("found an exposed auth token", extra=extra)
+
+ # TODO: mark an API token as exposed in the database
+
+ # TODO: expose this option in the UI
+ revoke_action_enabled = False
+ if revoke_action_enabled:
+ # TODO: revoke token
+ pass
+
+ # Send an email
+ url_prefix = options.get("system.url-prefix")
+ if isinstance(token, ApiToken):
+ # for user token, send an alert to the token owner
+ users = User.objects.filter(id=token.user_id)
+ elif isinstance(token, OrgAuthToken):
+ # for org token, send an alert to all organization owners
+ organization = organization_service.get(id=token.organization_id)
+ if organization is None:
+ continue
+
+ owner_members = organization_service.get_organization_owner_members(
+ organization_id=organization.id
+ )
+ user_ids = [om.user_id for om in owner_members]
+ users = User.objects.filter(id__in=user_ids)
+
+ url_prefix = generate_organization_url(organization.slug)
+
+ token_type_human_readable = TOKEN_TYPE_HUMAN_READABLE.get(token_type, "Auth Token")
+
+ revoke_url = absolute_uri(REVOKE_URLS.get(token_type, "/"), url_prefix=url_prefix)
+
+ context = {
+ "datetime": timezone.now(),
+ "token_name": token.name,
+ "token_type": token_type_human_readable,
+ "token_redacted": f"{token_type}...{token.token_last_characters}",
+ "hashed_token": hashed_alerted_token,
+ "exposed_source": secret_alert["source"],
+ "exposed_url": secret_alert["url"],
+ "revoke_url": revoke_url,
+ }
+
+ subject = f"Action Required: {token_type_human_readable} Exposed"
+ msg = MessageBuilder(
+ subject="{}{}".format(options.get("mail.subject-prefix"), subject),
+ template="sentry/emails/secret-scanning/body.txt",
+ html_template="sentry/emails/secret-scanning/body.html",
+ type="user.secret-scanning-alert",
+ context=context,
+ )
+ msg.send_async([u.username for u in users])
+ except (
+ ApiToken.DoesNotExist,
+ ApiTokenReplica.DoesNotExist,
+ OrgAuthToken.DoesNotExist,
+ OrgAuthTokenReplica.DoesNotExist,
+ ):
+ response.append(
+ {
+ "token_hash": hashed_alerted_token,
+ "token_type": secret_alert["type"],
+ "label": "false_positive",
+ }
+ )
+
+ return HttpResponse(json.dumps(response), status=200)
diff --git a/src/sentry/api/endpoints/seer_rpc.py b/src/sentry/api/endpoints/seer_rpc.py
index f468d0f5ae5a3..8ad06295ed6c8 100644
--- a/src/sentry/api/endpoints/seer_rpc.py
+++ b/src/sentry/api/endpoints/seer_rpc.py
@@ -17,6 +17,7 @@
from rest_framework.response import Response
from sentry_sdk import Scope, capture_exception
+from sentry import options
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.authentication import AuthenticationSiloLimit, StandardAuthentication
@@ -153,8 +154,9 @@ def get_organization_slug(*, org_id: int) -> dict:
def get_organization_autofix_consent(*, org_id: int) -> dict:
org: Organization = Organization.objects.get(id=org_id)
consent = org.get_option("sentry:gen_ai_consent", False)
+ github_extension_enabled = org_id in options.get("github-extension.enabled-orgs")
return {
- "consent": consent,
+ "consent": consent or github_extension_enabled,
}
diff --git a/src/sentry/api/endpoints/team_details.py b/src/sentry/api/endpoints/team_details.py
index 87d276d1bb279..158720816c691 100644
--- a/src/sentry/api/endpoints/team_details.py
+++ b/src/sentry/api/endpoints/team_details.py
@@ -23,14 +23,15 @@
)
from sentry.apidocs.examples.team_examples import TeamExamples
from sentry.apidocs.parameters import GlobalParams, TeamParams
-from sentry.models.scheduledeletion import RegionScheduledDeletion
+from sentry.db.models.fields.slug import DEFAULT_SLUG_MAX_LENGTH
+from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.models.team import Team, TeamStatus
@extend_schema_serializer(exclude_fields=["name"])
class TeamDetailsSerializer(CamelSnakeModelSerializer):
slug = SentrySerializerSlugField(
- max_length=50,
+ max_length=DEFAULT_SLUG_MAX_LENGTH,
help_text="Uniquely identifies a team. This is must be available.",
)
@@ -55,6 +56,13 @@ class TeamDetailsEndpoint(TeamEndpoint):
"GET": ApiPublishStatus.PUBLIC,
"PUT": ApiPublishStatus.PUBLIC,
}
+ # OrganizationSCIMTeamDetails inherits this endpoint, but toggles this setting
+ _allow_idp_changes = False
+
+ def can_modify_idp_team(self, team: Team):
+ if not team.idp_provisioned:
+ return True
+ return self._allow_idp_changes
@extend_schema(
operation_id="Retrieve a Team",
@@ -106,6 +114,13 @@ def put(self, request: Request, team) -> Response:
Update various attributes and configurable settings for the given
team.
"""
+
+ if not self.can_modify_idp_team(team):
+ return Response(
+ {"detail": "This team is managed through your organization's identity provider."},
+ status=403,
+ )
+
serializer = TeamDetailsSerializer(team, data=request.data, partial=True)
if serializer.is_valid():
team = serializer.save()
@@ -140,6 +155,13 @@ def delete(self, request: Request, team) -> Response:
**Note:** Deletion happens asynchronously and therefore is not
immediate. Teams will have their slug released while waiting for deletion.
"""
+
+ if not self.can_modify_idp_team(team):
+ return Response(
+ {"detail": "This team is managed through your organization's identity provider."},
+ status=403,
+ )
+
suffix = uuid4().hex
new_slug = f"{team.slug}-{suffix}"[0:50]
try:
diff --git a/src/sentry/api/endpoints/team_projects.py b/src/sentry/api/endpoints/team_projects.py
index 73ce44a11de87..98fadbfd2a714 100644
--- a/src/sentry/api/endpoints/team_projects.py
+++ b/src/sentry/api/endpoints/team_projects.py
@@ -22,7 +22,7 @@
from sentry.apidocs.parameters import CursorQueryParam, GlobalParams
from sentry.apidocs.utils import inline_sentry_response_serializer
from sentry.constants import RESERVED_PROJECT_SLUGS, ObjectStatus
-from sentry.models.project import Project
+from sentry.models.project import PROJECT_SLUG_MAX_LENGTH, Project
from sentry.models.team import Team
from sentry.seer.similarity.utils import project_is_seer_eligible
from sentry.signals import project_created
@@ -38,7 +38,7 @@ class ProjectPostSerializer(serializers.Serializer):
slug = SentrySerializerSlugField(
help_text="""Uniquely identifies a project and is used for the interface.
If not provided, it is automatically generated from the name.""",
- max_length=50,
+ max_length=PROJECT_SLUG_MAX_LENGTH,
required=False,
allow_null=True,
)
diff --git a/src/sentry/api/endpoints/user_notification_settings_options.py b/src/sentry/api/endpoints/user_notification_settings_options.py
index 20b0ce6089b12..a381d1eee7858 100644
--- a/src/sentry/api/endpoints/user_notification_settings_options.py
+++ b/src/sentry/api/endpoints/user_notification_settings_options.py
@@ -22,8 +22,6 @@ class UserNotificationSettingsOptionsEndpoint(UserEndpoint):
"PUT": ApiPublishStatus.PRIVATE,
}
owner = ApiOwner.ALERTS_NOTIFICATIONS
- # TODO(Steve): Make not private when we launch new system
- private = True
def get(self, request: Request, user: User) -> Response:
"""
diff --git a/src/sentry/api/endpoints/user_notification_settings_options_detail.py b/src/sentry/api/endpoints/user_notification_settings_options_detail.py
index d39efa93cf8fe..53e73e039e850 100644
--- a/src/sentry/api/endpoints/user_notification_settings_options_detail.py
+++ b/src/sentry/api/endpoints/user_notification_settings_options_detail.py
@@ -17,8 +17,6 @@ class UserNotificationSettingsOptionsDetailEndpoint(UserEndpoint):
"DELETE": ApiPublishStatus.PRIVATE,
}
owner = ApiOwner.ALERTS_NOTIFICATIONS
- # TODO(Steve): Make not private when we launch new system
- private = True
def convert_args(
self,
diff --git a/src/sentry/api/endpoints/user_notification_settings_providers.py b/src/sentry/api/endpoints/user_notification_settings_providers.py
index 945bf75477f4d..dff386dae2882 100644
--- a/src/sentry/api/endpoints/user_notification_settings_providers.py
+++ b/src/sentry/api/endpoints/user_notification_settings_providers.py
@@ -25,8 +25,6 @@ class UserNotificationSettingsProvidersEndpoint(UserEndpoint):
"PUT": ApiPublishStatus.PRIVATE,
}
owner = ApiOwner.ALERTS_NOTIFICATIONS
- # TODO(Steve): Make not private when we launch new system
- private = True
def get(self, request: Request, user: User) -> Response:
"""
diff --git a/src/sentry/api/endpoints/warmup.py b/src/sentry/api/endpoints/warmup.py
new file mode 100644
index 0000000000000..4d989835f992c
--- /dev/null
+++ b/src/sentry/api/endpoints/warmup.py
@@ -0,0 +1,20 @@
+from rest_framework.request import Request
+from rest_framework.response import Response
+
+from sentry.api.api_owners import ApiOwner
+from sentry.api.api_publish_status import ApiPublishStatus
+from sentry.api.base import Endpoint, all_silo_endpoint
+from sentry.ratelimits.config import RateLimitConfig
+
+
+@all_silo_endpoint
+class WarmupEndpoint(Endpoint):
+ publish_status = {
+ "GET": ApiPublishStatus.PRIVATE,
+ }
+ owner = ApiOwner.UNOWNED
+ permission_classes = ()
+ rate_limits = RateLimitConfig(group="INTERNAL")
+
+ def get(self, request: Request) -> Response:
+ return Response(200)
diff --git a/src/sentry/api/event_search.py b/src/sentry/api/event_search.py
index 7aba67336a779..7341c7bcb7dc7 100644
--- a/src/sentry/api/event_search.py
+++ b/src/sentry/api/event_search.py
@@ -571,13 +571,14 @@ def create_from(cls, search_config: SearchConfig, **overrides):
class SearchVisitor(NodeVisitor):
unwrapped_exceptions = (InvalidSearchQuery,)
- def __init__(self, config=None, params=None, builder=None):
+ def __init__(self, config=None, params=None, builder=None, get_field_type=None):
super().__init__()
if config is None:
config = SearchConfig()
self.config = config
self.params = params if params is not None else {}
+ self.get_field_type = get_field_type
if builder is None:
# Avoid circular import
from sentry.search.events.builder.discover import UnresolvedQuery
@@ -590,6 +591,10 @@ def __init__(self, config=None, params=None, builder=None):
)
else:
self.builder = builder
+ if get_field_type is None:
+ self.get_field_type = self.builder.get_field_type
+ else:
+ self.get_field_type = get_field_type
@cached_property
def key_mappings_lookup(self):
@@ -604,7 +609,7 @@ def is_numeric_key(self, key):
key in self.config.numeric_keys
or is_measurement(key)
or is_span_op_breakdown(key)
- or self.builder.get_field_type(key) == "number"
+ or self.get_field_type(key) == "number"
or self.is_duration_key(key)
)
@@ -614,11 +619,11 @@ def is_duration_key(self, key):
key in self.config.duration_keys
or is_duration_measurement(key)
or is_span_op_breakdown(key)
- or self.builder.get_field_type(key) in duration_types
+ or self.get_field_type(key) in duration_types
)
def is_size_key(self, key):
- return self.builder.get_field_type(key) in SIZE_UNITS
+ return self.get_field_type(key) in SIZE_UNITS
def is_date_key(self, key):
return key in self.config.date_keys
@@ -1241,7 +1246,7 @@ def generic_visit(self, node, children):
def parse_search_query(
- query, config=None, params=None, builder=None, config_overrides=None
+ query, config=None, params=None, builder=None, config_overrides=None, get_field_type=None
) -> list[
SearchFilter
]: # TODO: use the `Sequence[QueryToken]` type and update the code that fails type checking.
@@ -1264,4 +1269,6 @@ def parse_search_query(
if config_overrides:
config = SearchConfig.create_from(config, **config_overrides)
- return SearchVisitor(config, params=params, builder=builder).visit(tree)
+ return SearchVisitor(
+ config, params=params, builder=builder, get_field_type=get_field_type
+ ).visit(tree)
diff --git a/src/sentry/api/fields/sentry_slug.py b/src/sentry/api/fields/sentry_slug.py
index 6301e9483eaff..24eecce61da29 100644
--- a/src/sentry/api/fields/sentry_slug.py
+++ b/src/sentry/api/fields/sentry_slug.py
@@ -4,6 +4,7 @@
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
+from sentry.db.models.fields.slug import DEFAULT_SLUG_MAX_LENGTH
from sentry.slug.errors import DEFAULT_SLUG_ERROR_MESSAGE, ORG_SLUG_ERROR_MESSAGE
from sentry.slug.patterns import MIXED_SLUG_PATTERN, ORG_SLUG_PATTERN
@@ -24,6 +25,7 @@ def __init__(
self,
error_messages=None,
org_slug: bool = False,
+ max_length: int = DEFAULT_SLUG_MAX_LENGTH,
*args,
**kwargs,
):
@@ -37,4 +39,6 @@ def __init__(
pattern = ORG_SLUG_PATTERN
error_messages["invalid"] = ORG_SLUG_ERROR_MESSAGE
- super().__init__(pattern, error_messages=error_messages, *args, **kwargs)
+ super().__init__(
+ pattern, error_messages=error_messages, max_length=max_length, *args, **kwargs
+ )
diff --git a/src/sentry/api/helpers/actionable_items_helper.py b/src/sentry/api/helpers/actionable_items_helper.py
index fccc127cf20c1..f66d12cb3cff3 100644
--- a/src/sentry/api/helpers/actionable_items_helper.py
+++ b/src/sentry/api/helpers/actionable_items_helper.py
@@ -38,6 +38,8 @@ class ActionPriority:
EventError.INVALID_ENVIRONMENT: ActionPriority.LOW,
EventError.NATIVE_BAD_DSYM: ActionPriority.LOW,
EventError.NATIVE_MISSING_DSYM: ActionPriority.LOW,
+ EventError.NATIVE_INTERNAL_FAILURE: ActionPriority.LOW,
+ EventError.NATIVE_SYMBOLICATOR_FAILED: ActionPriority.LOW,
EventError.NATIVE_MISSING_OPTIONALLY_BUNDLED_DSYM: ActionPriority.LOW,
EventError.PAST_TIMESTAMP: ActionPriority.LOW,
EventError.PROGUARD_MISSING_LINENO: ActionPriority.LOW,
@@ -66,12 +68,10 @@ class ActionPriority:
EventError.JS_SCRAPING_DISABLED,
EventError.JS_TOO_MANY_REMOTE_SOURCES,
EventError.MISSING_ATTRIBUTE,
- EventError.NATIVE_INTERNAL_FAILURE,
EventError.NATIVE_MISSING_SYMBOL,
EventError.NATIVE_MISSING_SYSTEM_DSYM,
EventError.NATIVE_NO_CRASHED_THREAD,
EventError.NATIVE_SIMULATOR_FRAME,
- EventError.NATIVE_SYMBOLICATOR_FAILED,
EventError.NATIVE_UNKNOWN_IMAGE,
EventError.UNKNOWN_ERROR,
EventError.VALUE_TOO_LONG,
diff --git a/src/sentry/api/helpers/group_index/delete.py b/src/sentry/api/helpers/group_index/delete.py
index b2bd74552f181..d930632b674e1 100644
--- a/src/sentry/api/helpers/group_index/delete.py
+++ b/src/sentry/api/helpers/group_index/delete.py
@@ -10,13 +10,14 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry import audit_log, eventstream
+from sentry import audit_log, eventstream, features
from sentry.api.base import audit_logger
from sentry.deletions.tasks.groups import delete_groups as delete_groups_task
from sentry.issues.grouptype import GroupCategory
from sentry.models.group import Group, GroupStatus
from sentry.models.grouphash import GroupHash
from sentry.models.groupinbox import GroupInbox
+from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.signals import issue_deleted
from sentry.tasks.delete_seer_grouping_records import call_delete_seer_grouping_records_by_hash
@@ -44,10 +45,24 @@ def delete_group_list(
if not group_list:
return
+ issue_platform_deletion_allowed = features.has(
+ "organizations:issue-platform-deletion", project.organization, actor=request.user
+ )
+
# deterministic sort for sanity, and for very large deletions we'll
# delete the "smaller" groups first
group_list.sort(key=lambda g: (g.times_seen, g.id))
- group_ids = [g.id for g in group_list]
+ group_ids = []
+ non_error_group_found = False
+ for g in group_list:
+ group_ids.append(g.id)
+ if not non_error_group_found and g.issue_category != GroupCategory.ERROR:
+ non_error_group_found = True
+
+ countdown = 3600
+ # With ClickHouse light deletes we want to get rid of the long delay
+ if issue_platform_deletion_allowed and non_error_group_found:
+ countdown = 0
Group.objects.filter(id__in=group_ids).exclude(
status__in=[GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]
@@ -73,7 +88,7 @@ def delete_group_list(
"transaction_id": transaction_id,
"eventstream_state": eventstream_state,
},
- countdown=3600,
+ countdown=countdown,
)
for group in group_list:
@@ -140,7 +155,12 @@ def delete_groups(
if not group_list:
return Response(status=204)
- if any(group.issue_category != GroupCategory.ERROR for group in group_list):
+ org = Organization.objects.get_from_cache(id=organization_id)
+ issue_platform_deletion_allowed = features.has(
+ "organizations:issue-platform-deletion", org, actor=request.user
+ )
+ non_error_group_found = any(group.issue_category != GroupCategory.ERROR for group in group_list)
+ if not issue_platform_deletion_allowed and non_error_group_found:
raise rest_framework.exceptions.ValidationError(detail="Only error issues can be deleted.")
groups_by_project_id = defaultdict(list)
diff --git a/src/sentry/api/paginator.py b/src/sentry/api/paginator.py
index 61684a9161f3e..173ce25e87148 100644
--- a/src/sentry/api/paginator.py
+++ b/src/sentry/api/paginator.py
@@ -537,7 +537,7 @@ def get_result(self, limit, cursor=None):
prev=Cursor(0, max(0, offset - limit), True, offset > 0),
next=Cursor(0, max(0, offset + limit), False, has_more),
)
- # TODO use Cursor.value as the `end` argument to data_fn() so that
+ # TODO: use Cursor.value as the `end` argument to data_fn() so that
# subsequent pages returned using these cursors are using the same end
# date for queries, this should stop drift from new incoming events.
diff --git a/src/sentry/api/serializers/base.py b/src/sentry/api/serializers/base.py
index 7a8982affab98..98ccdc9362e94 100644
--- a/src/sentry/api/serializers/base.py
+++ b/src/sentry/api/serializers/base.py
@@ -61,10 +61,10 @@ def serialize(
pass
else:
return objects
- with sentry_sdk.start_span(op="serialize", description=type(serializer).__name__) as span:
+ with sentry_sdk.start_span(op="serialize", name=type(serializer).__name__) as span:
span.set_data("Object Count", len(objects))
- with sentry_sdk.start_span(op="serialize.get_attrs", description=type(serializer).__name__):
+ with sentry_sdk.start_span(op="serialize.get_attrs", name=type(serializer).__name__):
attrs = serializer.get_attrs(
# avoid passing NoneType's to the serializer as they're allowed and
# filtered out of serialize()
@@ -73,7 +73,7 @@ def serialize(
**kwargs,
)
- with sentry_sdk.start_span(op="serialize.iterate", description=type(serializer).__name__):
+ with sentry_sdk.start_span(op="serialize.iterate", name=type(serializer).__name__):
return [serializer(o, attrs=attrs.get(o, {}), user=user, **kwargs) for o in objects]
diff --git a/src/sentry/api/serializers/models/__init__.py b/src/sentry/api/serializers/models/__init__.py
index 24e2717084e9c..55cd1ce1872ce 100644
--- a/src/sentry/api/serializers/models/__init__.py
+++ b/src/sentry/api/serializers/models/__init__.py
@@ -4,7 +4,6 @@
from .apiauthorization import * # noqa: F401,F403
from .apikey import * # noqa: F401,F403
from .apitoken import * # noqa: F401,F403
-from .app_platform_event import * # noqa: F401,F403
from .auditlogentry import * # noqa: F401,F403
from .auth_provider import * # noqa: F401,F403
from .broadcast import * # noqa: F401,F403
@@ -37,7 +36,6 @@
from .organization_member.utils import * # noqa: F401,F403
from .organization_plugin import * # noqa: F401,F403
from .orgauthtoken import * # noqa: F401,F403
-from .platformexternalissue import * # noqa: F401,F403
from .plugin import * # noqa: F401,F403
from .project import * # noqa: F401,F403
from .project_key import * # noqa: F401,F403
@@ -57,11 +55,6 @@
from .role import * # noqa: F401,F403
from .rule import * # noqa: F401,F403
from .savedsearch import * # noqa: F401,F403
-from .sentry_app import * # noqa: F401,F403
-from .sentry_app_avatar import * # noqa: F401,F403
-from .sentry_app_component import * # noqa: F401,F403
-from .sentry_app_installation import * # noqa: F401,F403
-from .servicehook import * # noqa: F401,F403
from .tagvalue import * # noqa: F401,F403
from .team import * # noqa: F401,F403
from .user_social_auth import * # noqa: F401,F403
diff --git a/src/sentry/api/serializers/models/apiapplication.py b/src/sentry/api/serializers/models/apiapplication.py
index d1cdcbbbb8e21..a2d4ab707d7ab 100644
--- a/src/sentry/api/serializers/models/apiapplication.py
+++ b/src/sentry/api/serializers/models/apiapplication.py
@@ -20,4 +20,5 @@ def serialize(self, obj, attrs, user, **kwargs):
"termsUrl": obj.terms_url,
"allowedOrigins": obj.get_allowed_origins(),
"redirectUris": obj.get_redirect_uris(),
+ "scopes": obj.scopes,
}
diff --git a/src/sentry/api/serializers/models/dashboard.py b/src/sentry/api/serializers/models/dashboard.py
index 50d53311004a0..47c25c81b3275 100644
--- a/src/sentry/api/serializers/models/dashboard.py
+++ b/src/sentry/api/serializers/models/dashboard.py
@@ -6,14 +6,15 @@
from sentry import features
from sentry.api.serializers import Serializer, register, serialize
from sentry.constants import ALL_ACCESS_PROJECTS
-from sentry.discover.models import DatasetSourcesTypes
from sentry.models.dashboard import Dashboard
+from sentry.models.dashboard_permissions import DashboardPermissions
from sentry.models.dashboard_widget import (
DashboardWidget,
DashboardWidgetDisplayTypes,
DashboardWidgetQuery,
DashboardWidgetQueryOnDemand,
DashboardWidgetTypes,
+ DatasetSourcesTypes,
)
from sentry.snuba.metrics.extraction import OnDemandMetricSpecVersioning
from sentry.users.api.serializers.user import UserSerializerResponse
@@ -41,6 +42,7 @@ class DashboardWidgetQueryResponse(TypedDict):
widgetId: str
onDemand: list[OnDemandResponse]
isHidden: bool
+ selectedAggregate: int | None
class ThresholdType(TypedDict):
@@ -63,6 +65,10 @@ class DashboardWidgetResponse(TypedDict):
layout: dict[str, int]
+class DashboardPermissionsResponse(TypedDict):
+ is_creator_only_editable: bool
+
+
@register(DashboardWidget)
class DashboardWidgetSerializer(Serializer):
def get_attrs(self, item_list, user, **kwargs):
@@ -164,6 +170,15 @@ def serialize(self, obj, attrs, user, **kwargs) -> DashboardWidgetQueryResponse:
"widgetId": str(obj.widget_id),
"onDemand": attrs["onDemand"],
"isHidden": obj.is_hidden,
+ "selectedAggregate": obj.selected_aggregate,
+ }
+
+
+@register(DashboardPermissions)
+class DashboardPermissionsSerializer(Serializer):
+ def serialize(self, obj, attrs, user, **kwargs) -> DashboardPermissionsResponse:
+ return {
+ "is_creator_only_editable": obj.is_creator_only_editable,
}
@@ -257,6 +272,7 @@ class DashboardDetailsResponse(DashboardDetailsResponseOptional):
widgets: list[DashboardWidgetResponse]
projects: list[int]
filters: DashboardFilters
+ permissions: DashboardPermissionsResponse | None
@register(Dashboard)
@@ -292,6 +308,7 @@ def serialize(self, obj, attrs, user, **kwargs) -> DashboardDetailsResponse:
"widgets": attrs["widgets"],
"projects": [project.id for project in obj.projects.all()],
"filters": {},
+ "permissions": serialize(obj.permissions) if hasattr(obj, "permissions") else None,
}
if obj.filters is not None:
diff --git a/src/sentry/api/serializers/models/environment.py b/src/sentry/api/serializers/models/environment.py
index f4a1bc9440e1b..5f4413f45b901 100644
--- a/src/sentry/api/serializers/models/environment.py
+++ b/src/sentry/api/serializers/models/environment.py
@@ -1,5 +1,6 @@
from collections import namedtuple
from datetime import timedelta
+from typing import TypedDict
from django.utils import timezone
@@ -11,15 +12,28 @@
StatsPeriod = namedtuple("StatsPeriod", ("segments", "interval"))
+class EnvironmentSerializerResponse(TypedDict):
+ id: str
+ name: str
+
+
+class EnvironmentProjectSerializerResponse(TypedDict):
+ id: str
+ name: str
+ isHidden: bool
+
+
@register(Environment)
class EnvironmentSerializer(Serializer):
- def serialize(self, obj, attrs, user, **kwargs):
+ def serialize(self, obj: Environment, attrs, user, **kwargs) -> EnvironmentSerializerResponse:
return {"id": str(obj.id), "name": obj.name}
@register(EnvironmentProject)
class EnvironmentProjectSerializer(Serializer):
- def serialize(self, obj, attrs, user, **kwargs):
+ def serialize(
+ self, obj: EnvironmentProject, attrs, user, **kwargs
+ ) -> EnvironmentProjectSerializerResponse:
return {
"id": str(obj.id),
"name": obj.environment.name,
diff --git a/src/sentry/api/serializers/models/event.py b/src/sentry/api/serializers/models/event.py
index 264faaab6b30e..ccde2d14b2d63 100644
--- a/src/sentry/api/serializers/models/event.py
+++ b/src/sentry/api/serializers/models/event.py
@@ -4,7 +4,7 @@
from collections import defaultdict
from collections.abc import Sequence
from datetime import datetime, timezone
-from typing import Any
+from typing import Any, TypedDict, cast
import sentry_sdk
import sqlparse
@@ -12,7 +12,10 @@
from sentry.api.serializers import Serializer, register, serialize
from sentry.api.serializers.models.release import GroupEventReleaseSerializer
-from sentry.eventstore.models import Event, GroupEvent
+from sentry.api.serializers.models.userreport import UserReportSerializerResponse
+from sentry.api.serializers.types import GroupEventReleaseSerializerResponse
+from sentry.eventstore.models import BaseEvent, Event, GroupEvent
+from sentry.interfaces.user import EventUserApiContext, User
from sentry.models.eventattachment import EventAttachment
from sentry.models.eventerror import EventError
from sentry.models.release import Release
@@ -20,7 +23,6 @@
from sentry.sdk_updates import SdkSetupState, get_suggested_updates
from sentry.search.utils import convert_user_tag_to_query, map_device_class_level
from sentry.stacktraces.processing import find_stacktraces_in_data
-from sentry.users.models.user import User
from sentry.utils.json import prune_empty_keys
from sentry.utils.safe import get_path
@@ -34,6 +36,15 @@
MAX_SQL_FORMAT_LENGTH = 1500
+class EventTagOptional(TypedDict, total=False):
+ query: str
+
+
+class EventTag(EventTagOptional):
+ key: str
+ value: str
+
+
def get_crash_files(events):
event_ids = [x.event_id for x in events if x.platform == "native"]
if event_ids:
@@ -85,7 +96,7 @@ def get_tags_with_meta(event):
tags_meta = prune_empty_keys({str(i): e.pop("_meta") for i, e in enumerate(tags)})
- return (tags, meta_with_chunks(tags, tags_meta))
+ return (cast(list[EventTag], tags), meta_with_chunks(tags, tags_meta))
def get_entries(event: Event | GroupEvent, user: User, is_public: bool = False):
@@ -122,6 +133,64 @@ def get_entries(event: Event | GroupEvent, user: User, is_public: bool = False):
)
+class BaseEventSerializerResponse(TypedDict):
+ id: str
+ groupID: str | None
+ eventID: str
+ projectID: str
+ message: str | None
+ title: str
+ location: str | None
+ user: EventUserApiContext | None
+ tags: list[EventTag]
+ platform: str
+ dateReceived: datetime | None
+ contexts: dict[str, Any] | None
+ size: int | None
+ entries: list[Any]
+ dist: str | None
+ sdk: dict[str, str]
+ context: dict[str, Any] | None
+ packages: dict[str, Any]
+ type: str
+ metadata: Any
+ errors: list[Any]
+ occurrence: Any
+ _meta: dict[str, Any]
+
+
+class ErrorEventFields(TypedDict, total=False):
+ crashFile: str | None
+ culprit: str | None
+ dateCreated: datetime
+ fingerprints: list[str]
+ groupingConfig: Any
+
+
+class TransactionEventFields(TypedDict, total=False):
+ startTimestamp: datetime
+ endTimestamp: datetime
+ measurements: Any
+ breakdowns: Any
+ _metrics_summary: Any
+
+
+class EventSerializerResponse(
+ BaseEventSerializerResponse, ErrorEventFields, TransactionEventFields
+):
+ pass
+
+
+class SqlFormatEventSerializerResponse(EventSerializerResponse):
+ release: GroupEventReleaseSerializerResponse | None
+
+
+class IssueEventSerializerResponse(SqlFormatEventSerializerResponse):
+ userReport: UserReportSerializerResponse | None
+ sdkUpdates: list[dict[str, Any]]
+ resolvedWith: list[str]
+
+
@register(GroupEvent)
@register(Event)
class EventSerializer(Serializer):
@@ -151,7 +220,7 @@ def _get_attr_with_meta(self, event, attr, default=None):
def _get_legacy_message_with_meta(self, event):
meta = event.data.get("_meta")
- message = get_path(event.data, "logentry", "formatted")
+ message: str | None = get_path(event.data, "logentry", "formatted")
msg_meta = get_path(meta, "logentry", "formatted")
if not message:
@@ -217,7 +286,7 @@ def should_display_error(self, error):
and ".frames." not in name
)
- def serialize(self, obj, attrs, user, **kwargs):
+ def serialize(self, obj, attrs, user, **kwargs) -> EventSerializerResponse:
from sentry.api.serializers.rest_framework import convert_dict_key_case, snake_to_camel_case
errors = [
@@ -233,18 +302,19 @@ def serialize(self, obj, attrs, user, **kwargs):
(context, context_meta) = self._get_attr_with_meta(obj, "extra", {})
(packages, packages_meta) = self._get_attr_with_meta(obj, "modules", {})
- received = obj.data.get("received")
- if received:
+ received_data = obj.data.get("received")
+ received: datetime | None = None
+ if received_data:
# Sentry at one point attempted to record invalid types here.
# Remove after June 2 2016
try:
- received = datetime.fromtimestamp(received, timezone.utc)
+ received = datetime.fromtimestamp(received_data, timezone.utc)
except TypeError:
received = None
occurrence = getattr(obj, "occurrence", None)
- d = {
+ event_data: EventSerializerResponse = {
"id": obj.event_id,
"groupID": str(obj.group_id) if obj.group_id else None,
"eventID": obj.event_id,
@@ -286,16 +356,21 @@ def serialize(self, obj, attrs, user, **kwargs):
}
# Serialize attributes that are specific to different types of events.
if obj.get_event_type() == "transaction":
- d.update(self.__serialize_transaction_attrs(attrs, obj))
+ return {
+ **event_data,
+ **self.__serialize_transaction_attrs(attrs, obj),
+ }
else:
- d.update(self.__serialize_error_attrs(attrs, obj))
- return d
+ return {
+ **event_data,
+ **self.__serialize_error_attrs(attrs, obj),
+ }
- def __serialize_transaction_attrs(self, attrs, obj):
+ def __serialize_transaction_attrs(self, attrs, obj) -> TransactionEventFields:
"""
Add attributes that are only present on transaction events.
"""
- transaction_attrs = {
+ transaction_attrs: TransactionEventFields = {
"startTimestamp": obj.data.get("start_timestamp"),
"endTimestamp": obj.data.get("timestamp"),
"measurements": obj.data.get("measurements"),
@@ -308,7 +383,7 @@ def __serialize_transaction_attrs(self, attrs, obj):
return transaction_attrs
- def __serialize_error_attrs(self, attrs, obj):
+ def __serialize_error_attrs(self, attrs, obj) -> ErrorEventFields:
"""
Add attributes that are present on error and default event types
"""
@@ -362,7 +437,7 @@ def _format_sql_query(self, message: str):
return formatted
def _format_breadcrumb_messages(
- self, event_data: dict[str, Any], event: Event | GroupEvent, user: User
+ self, event_data: EventSerializerResponse, event: Event | GroupEvent, user: User
):
try:
breadcrumbs = next(
@@ -386,7 +461,9 @@ def _format_breadcrumb_messages(
sentry_sdk.capture_exception(exc)
return event_data
- def _get_release_info(self, user, event, include_full_release_data: bool):
+ def _get_release_info(
+ self, user, event, include_full_release_data: bool
+ ) -> GroupEventReleaseSerializerResponse | None:
version = event.get_tag("sentry:release")
if not version:
return None
@@ -403,7 +480,9 @@ def _get_release_info(self, user, event, include_full_release_data: bool):
else:
return serialize(release, user, GroupEventReleaseSerializer())
- def _format_db_spans(self, event_data: dict[str, Any], event: Event | GroupEvent, user: User):
+ def _format_db_spans(
+ self, event_data: EventSerializerResponse, event: Event | GroupEvent, user: User
+ ):
try:
spans = next(
filter(lambda entry: entry["type"] == "spans", event_data.get("entries", ())),
@@ -423,15 +502,17 @@ def _format_db_spans(self, event_data: dict[str, Any], event: Event | GroupEvent
sentry_sdk.capture_exception(exc)
return event_data
- def serialize(self, obj, attrs, user, include_full_release_data=False):
+ def serialize(
+ self, obj, attrs, user, include_full_release_data=False
+ ) -> SqlFormatEventSerializerResponse:
result = super().serialize(obj, attrs, user)
- with sentry_sdk.start_span(op="serialize", description="Format SQL"):
+ with sentry_sdk.start_span(op="serialize", name="Format SQL"):
result = self._format_breadcrumb_messages(result, obj, user)
result = self._format_db_spans(result, obj, user)
- result["release"] = self._get_release_info(user, obj, include_full_release_data)
+ release_info = self._get_release_info(user, obj, include_full_release_data)
- return result
+ return {**result, "release": release_info}
class IssueEventSerializer(SqlFormatEventSerializer):
@@ -459,12 +540,17 @@ def _get_resolved_with(self, obj: Event) -> list[str]:
return list(unique_resolution_methods)
- def serialize(self, obj, attrs, user, include_full_release_data=False):
+ def serialize(
+ self, obj, attrs, user, include_full_release_data=False
+ ) -> IssueEventSerializerResponse:
result = super().serialize(obj, attrs, user, include_full_release_data)
- result["userReport"] = self._get_user_report(user, obj)
- result["sdkUpdates"] = self._get_sdk_updates(obj)
- result["resolvedWith"] = self._get_resolved_with(obj)
- return result
+
+ return {
+ **result,
+ "userReport": self._get_user_report(user, obj),
+ "sdkUpdates": self._get_sdk_updates(obj),
+ "resolvedWith": self._get_resolved_with(obj),
+ }
class SharedEventSerializer(EventSerializer):
@@ -483,6 +569,27 @@ def serialize(self, obj, attrs, user, **kwargs):
return result
+SimpleEventSerializerResponse = TypedDict(
+ "SimpleEventSerializerResponse",
+ {
+ "id": str,
+ "event.type": str,
+ "groupID": str | None,
+ "eventID": str,
+ "projectID": str,
+ "message": str,
+ "title": str,
+ "location": str | None,
+ "culprit": str,
+ "user": EventUserApiContext | None,
+ "tags": list[EventTag],
+ "platform": str,
+ "dateCreated": datetime,
+ "crashFile": str | None,
+ },
+)
+
+
class SimpleEventSerializer(EventSerializer):
"""
Simple event serializer that renders a basic outline of an event without
@@ -505,17 +612,19 @@ def get_attrs(self, item_list, user, **kwargs):
}
return {event: {"crash_file": serialized_files.get(event.event_id)} for event in item_list}
- def serialize(self, obj, attrs, user, **kwargs):
- tags = [{"key": key.split("sentry:", 1)[-1], "value": value} for key, value in obj.tags]
+ def serialize(self, obj: BaseEvent, attrs, user, **kwargs) -> SimpleEventSerializerResponse:
+ tags: list[EventTag] = [
+ {"key": key.split("sentry:", 1)[-1], "value": value} for key, value in obj.tags
+ ]
for tag in tags:
query = convert_user_tag_to_query(tag["key"], tag["value"])
if query:
tag["query"] = query
map_device_class_tags(tags)
- user = obj.get_minimal_user()
+ event_user = obj.get_minimal_user()
- return {
+ response: SimpleEventSerializerResponse = {
"id": str(obj.event_id),
"event.type": str(obj.get_event_type()),
"groupID": str(obj.group_id) if obj.group_id else None,
@@ -527,7 +636,7 @@ def serialize(self, obj, attrs, user, **kwargs):
"title": obj.title,
"location": obj.location,
"culprit": obj.culprit,
- "user": user and user.get_api_context(),
+ "user": event_user and event_user.get_api_context(),
"tags": tags,
"platform": obj.platform,
"dateCreated": obj.datetime,
@@ -535,6 +644,8 @@ def serialize(self, obj, attrs, user, **kwargs):
"crashFile": attrs["crash_file"],
}
+ return response
+
class ExternalEventSerializer(EventSerializer):
"""
diff --git a/src/sentry/api/serializers/models/eventuser.py b/src/sentry/api/serializers/models/eventuser.py
index 89328f031d77c..2fd7ee04dfb3d 100644
--- a/src/sentry/api/serializers/models/eventuser.py
+++ b/src/sentry/api/serializers/models/eventuser.py
@@ -1,11 +1,26 @@
+from typing import TypedDict
+
from sentry.api.serializers import Serializer, register
from sentry.utils.avatar import get_gravatar_url
from sentry.utils.eventuser import EventUser
+class EventUserSerializerResponse(TypedDict):
+ id: str | None
+ tagValue: str
+ identifier: str
+ username: str
+ email: str
+ name: str
+ ipAddress: str
+ avatarUrl: str
+ hash: str
+ dateCreated: None
+
+
@register(EventUser)
class EventUserSerializer(Serializer):
- def serialize(self, obj, attrs, user, **kwargs):
+ def serialize(self, obj, attrs, user, **kwargs) -> EventUserSerializerResponse:
return {
"id": str(obj.id) if obj.id is not None else obj.id,
"tagValue": obj.tag_value,
diff --git a/src/sentry/api/serializers/models/group.py b/src/sentry/api/serializers/models/group.py
index 924aafb1d71e5..e056c3113faf9 100644
--- a/src/sentry/api/serializers/models/group.py
+++ b/src/sentry/api/serializers/models/group.py
@@ -648,7 +648,7 @@ def _resolve_resolutions(
@staticmethod
def _resolve_external_issue_annotations(groups: Sequence[Group]) -> Mapping[int, Sequence[Any]]:
- from sentry.models.platformexternalissue import PlatformExternalIssue
+ from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
# find the external issues for sentry apps and add them in
return (
diff --git a/src/sentry/api/serializers/models/group_stream.py b/src/sentry/api/serializers/models/group_stream.py
index a072fa1c2dad3..78eaf1e48262d 100644
--- a/src/sentry/api/serializers/models/group_stream.py
+++ b/src/sentry/api/serializers/models/group_stream.py
@@ -19,7 +19,6 @@
SeenStats,
snuba_tsdb,
)
-from sentry.api.serializers.models.platformexternalissue import PlatformExternalIssueSerializer
from sentry.api.serializers.models.plugin import is_plugin_deprecated
from sentry.constants import StatsPeriod
from sentry.integrations.api.serializers.models.external_issue import ExternalIssueSerializer
@@ -31,7 +30,10 @@
from sentry.models.groupinbox import get_inbox_details
from sentry.models.grouplink import GroupLink
from sentry.models.groupowner import get_owner_details
-from sentry.models.platformexternalissue import PlatformExternalIssue
+from sentry.sentry_apps.api.serializers.platform_external_issue import (
+ PlatformExternalIssueSerializer,
+)
+from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
from sentry.snuba.dataset import Dataset
from sentry.tsdb.base import TSDBModel
from sentry.utils import metrics
diff --git a/src/sentry/api/serializers/models/organization.py b/src/sentry/api/serializers/models/organization.py
index 0b4d6c72caaf1..53b13775a81c5 100644
--- a/src/sentry/api/serializers/models/organization.py
+++ b/src/sentry/api/serializers/models/organization.py
@@ -48,9 +48,11 @@
SAFE_FIELDS_DEFAULT,
SCRAPE_JAVASCRIPT_DEFAULT,
SENSITIVE_FIELDS_DEFAULT,
+ TARGET_SAMPLE_RATE_DEFAULT,
UPTIME_AUTODETECTION,
ObjectStatus,
)
+from sentry.db.models.fields.slug import DEFAULT_SLUG_MAX_LENGTH
from sentry.dynamic_sampling.tasks.common import get_organization_volume
from sentry.dynamic_sampling.tasks.helpers.sliding_window import get_sliding_window_org_sample_rate
from sentry.killswitches import killswitch_matches_context
@@ -101,7 +103,7 @@ class BaseOrganizationSerializer(serializers.Serializer):
# 3. cannot end with a dash
slug = SentrySerializerSlugField(
org_slug=True,
- max_length=50,
+ max_length=DEFAULT_SLUG_MAX_LENGTH,
)
def validate_slug(self, value: str) -> str:
@@ -265,7 +267,7 @@ def get_feature_set(
]
feature_set = set()
- with sentry_sdk.start_span(op="features.check", description="check batch features"):
+ with sentry_sdk.start_span(op="features.check", name="check batch features"):
# Check features in batch using the entity handler
batch_features = features.batch_has(org_features, actor=user, organization=obj)
@@ -281,7 +283,7 @@ def get_feature_set(
# This feature_name was found via `batch_has`, don't check again using `has`
org_features.remove(feature_name)
- with sentry_sdk.start_span(op="features.check", description="check individual features"):
+ with sentry_sdk.start_span(op="features.check", name="check individual features"):
# Remaining features should not be checked via the entity handler
for feature_name in org_features:
if features.has(feature_name, obj, actor=user, skip_entity=True):
@@ -420,7 +422,7 @@ def serialize(
class _DetailedOrganizationSerializerResponseOptional(OrganizationSerializerResponse, total=False):
- role: Any # TODO replace with enum/literal
+ role: Any # TODO: replace with enum/literal
orgRole: str
uptimeAutodetection: bool
@@ -611,6 +613,11 @@ def serialize( # type: ignore[explicit-override, override]
obj.get_option("sentry:uptime_autodetection", UPTIME_AUTODETECTION)
)
+ if features.has("organizations:dynamic-sampling-custom", obj, actor=user):
+ context["targetSampleRate"] = float(
+ obj.get_option("sentry:target_sample_rate", TARGET_SAMPLE_RATE_DEFAULT)
+ )
+
trusted_relays_raw = obj.get_option("sentry:trusted-relays") or []
# serialize trusted relays info into their external form
context["trustedRelays"] = [TrustedRelaySerializer(raw).data for raw in trusted_relays_raw]
diff --git a/src/sentry/api/serializers/models/project.py b/src/sentry/api/serializers/models/project.py
index 821d3ccf82c0d..3465fa28b34c9 100644
--- a/src/sentry/api/serializers/models/project.py
+++ b/src/sentry/api/serializers/models/project.py
@@ -275,7 +275,7 @@ class ProjectSerializerResponse(ProjectSerializerBaseResponse):
isPublic: bool
avatar: SerializedAvatarFields
color: str
- status: str # TODO enum/literal
+ status: str # TODO: enum/literal
@register(Project)
diff --git a/src/sentry/api/serializers/models/release.py b/src/sentry/api/serializers/models/release.py
index 8b73b870a314b..4077b86c0fcfe 100644
--- a/src/sentry/api/serializers/models/release.py
+++ b/src/sentry/api/serializers/models/release.py
@@ -11,7 +11,11 @@
from sentry import release_health, tagstore
from sentry.api.serializers import Serializer, register, serialize
-from sentry.api.serializers.types import ReleaseSerializerResponse
+from sentry.api.serializers.release_details_types import VersionInfo
+from sentry.api.serializers.types import (
+ GroupEventReleaseSerializerResponse,
+ ReleaseSerializerResponse,
+)
from sentry.models.commit import Commit
from sentry.models.commitauthor import CommitAuthor
from sentry.models.deploy import Deploy
@@ -27,7 +31,7 @@
from sentry.utils.hashlib import md5_text
-def expose_version_info(info):
+def expose_version_info(info) -> VersionInfo | None:
if info is None:
return None
version = {"raw": info["version_raw"]}
@@ -616,7 +620,7 @@ def get_attrs(self, item_list, user, **kwargs):
result[item] = p
return result
- def serialize(self, obj, attrs, user, **kwargs):
+ def serialize(self, obj, attrs, user, **kwargs) -> GroupEventReleaseSerializerResponse:
return {
"id": obj.id,
"commitCount": obj.commit_count,
diff --git a/src/sentry/api/serializers/models/tagvalue.py b/src/sentry/api/serializers/models/tagvalue.py
index 66b050ff4d71b..ff615926f58ee 100644
--- a/src/sentry/api/serializers/models/tagvalue.py
+++ b/src/sentry/api/serializers/models/tagvalue.py
@@ -1,3 +1,5 @@
+from typing import Any, cast
+
from sentry.api.serializers import Serializer
from sentry.search.utils import convert_user_tag_to_query
from sentry.utils.eventuser import EventUser
@@ -20,8 +22,9 @@ def get_attrs(self, item_list, user, **kwargs):
return result
def serialize(self, obj, attrs, user, **kwargs):
+ result: dict[str, Any] = {}
if isinstance(attrs["user"], EventUser):
- result = attrs["user"].serialize()
+ result = cast(dict[str, Any], attrs["user"].serialize())
else:
result = {"id": None}
diff --git a/src/sentry/api/serializers/models/userreport.py b/src/sentry/api/serializers/models/userreport.py
index 6ec62d5051a79..8b92841bef0fe 100644
--- a/src/sentry/api/serializers/models/userreport.py
+++ b/src/sentry/api/serializers/models/userreport.py
@@ -1,11 +1,36 @@
-from sentry import eventstore
+from datetime import timedelta
+from typing import Any, TypedDict
+
+from django.utils import timezone
+
+from sentry import eventstore, quotas
from sentry.api.serializers import Serializer, register, serialize
from sentry.eventstore.models import Event
from sentry.models.group import Group
from sentry.models.project import Project
from sentry.models.userreport import UserReport
from sentry.snuba.dataset import Dataset
-from sentry.utils.eventuser import EventUser
+from sentry.utils.eventuser import EventUser, SerializedEventUser
+
+
+class UserReportEvent(TypedDict):
+ id: str
+ eventID: str
+
+
+class UserReportSerializerResponse(TypedDict):
+ id: str
+ eventID: str
+ name: str | None
+ email: str | None
+ comments: str
+ dateCreated: str
+ user: SerializedEventUser | None
+ event: UserReportEvent
+
+
+class UserReportWithGroupSerializerResponse(UserReportSerializerResponse):
+ issue: dict[str, Any]
@register(UserReport)
@@ -14,11 +39,13 @@ def get_attrs(self, item_list, user, **kwargs):
attrs = {}
project = Project.objects.get(id=item_list[0].project_id)
+ retention = quotas.backend.get_event_retention(organization=project.organization)
events = eventstore.backend.get_events(
filter=eventstore.Filter(
event_ids=[item.event_id for item in item_list],
project_ids=[project.id],
+ start=timezone.now() - timedelta(days=retention) if retention else None,
),
referrer="UserReportSerializer.get_attrs",
dataset=Dataset.Events,
@@ -28,14 +55,16 @@ def get_attrs(self, item_list, user, **kwargs):
events_dict: dict[str, Event] = {event.event_id: event for event in events}
for item in item_list:
attrs[item] = {
- "event_user": EventUser.from_event(events_dict[item.event_id])
- if events_dict.get(item.event_id)
- else {}
+ "event_user": (
+ EventUser.from_event(events_dict[item.event_id])
+ if events_dict.get(item.event_id)
+ else {}
+ )
}
return attrs
- def serialize(self, obj, attrs, user, **kwargs):
+ def serialize(self, obj, attrs, user, **kwargs) -> UserReportSerializerResponse:
# TODO(dcramer): add in various context from the event
# context == user / http / extra interfaces
@@ -86,7 +115,9 @@ def get_attrs(self, item_list, user, **kwargs):
)
return attrs
- def serialize(self, obj, attrs, user, **kwargs):
+ def serialize(self, obj, attrs, user, **kwargs) -> UserReportWithGroupSerializerResponse:
context = super().serialize(obj, attrs, user)
- context["issue"] = attrs["group"]
- return context
+ return {
+ **context,
+ "issue": attrs["group"],
+ }
diff --git a/src/sentry/api/serializers/release_details_types.py b/src/sentry/api/serializers/release_details_types.py
index 7cacff2a0927c..b524d2671330a 100644
--- a/src/sentry/api/serializers/release_details_types.py
+++ b/src/sentry/api/serializers/release_details_types.py
@@ -41,7 +41,7 @@ class VersionInfoOptional(TypedDict, total=False):
class VersionInfo(VersionInfoOptional):
package: str | None
- version: dict[str, str]
+ version: dict[str, Any]
buildHash: str | None
@@ -51,7 +51,7 @@ class LastDeployOptional(TypedDict, total=False):
class LastDeploy(LastDeployOptional):
- id: int
+ id: str
environment: str
dateFinished: str
name: str
diff --git a/src/sentry/api/serializers/rest_framework/__init__.py b/src/sentry/api/serializers/rest_framework/__init__.py
index a53794edadd18..8125cd6632d14 100644
--- a/src/sentry/api/serializers/rest_framework/__init__.py
+++ b/src/sentry/api/serializers/rest_framework/__init__.py
@@ -11,6 +11,3 @@
from .project_key import * # noqa: F401,F403
from .release import * # noqa: F401,F403
from .rule import * # noqa: F401,F403
-from .sentry_app import * # noqa: F401,F403
-from .sentry_app_installation import * # noqa: F401,F403
-from .sentry_app_request import * # noqa: F401,F403
diff --git a/src/sentry/api/serializers/rest_framework/dashboard.py b/src/sentry/api/serializers/rest_framework/dashboard.py
index 71f9d0140b763..67d87c831afa9 100644
--- a/src/sentry/api/serializers/rest_framework/dashboard.py
+++ b/src/sentry/api/serializers/rest_framework/dashboard.py
@@ -160,6 +160,8 @@ class DashboardWidgetQuerySerializer(CamelSnakeSerializer[Dashboard]):
on_demand_extraction = DashboardWidgetQueryOnDemandSerializer(many=False, required=False)
on_demand_extraction_disabled = serializers.BooleanField(required=False)
+ selected_aggregate = serializers.IntegerField(required=False, allow_null=True)
+
required_for_create = {"fields", "conditions"}
validate_id = validate_id
@@ -341,7 +343,8 @@ def validate(self, data):
if (
ondemand_feature
- and data.get("widget_type") == DashboardWidgetTypes.DISCOVER
+ and data.get("widget_type")
+ in [DashboardWidgetTypes.DISCOVER, DashboardWidgetTypes.TRANSACTION_LIKE]
and not query.get("on_demand_extraction_disabled", False)
):
if query.get("columns"):
@@ -453,6 +456,12 @@ def validate(self, data):
return data
+class DashboardPermissionsSerializer(CamelSnakeSerializer[Dashboard]):
+ is_creator_only_editable = serializers.BooleanField(
+ help_text="Whether the dashboard is editable only by the creator.",
+ )
+
+
class DashboardDetailsSerializer(CamelSnakeSerializer[Dashboard]):
# Is a string because output serializers also make it a string.
id = serializers.CharField(required=False, help_text="A dashboard's unique id.")
@@ -491,6 +500,11 @@ class DashboardDetailsSerializer(CamelSnakeSerializer[Dashboard]):
help_text="Setting that lets you display saved time range for this dashboard in UTC.",
)
validate_id = validate_id
+ permissions = DashboardPermissionsSerializer(
+ required=False,
+ allow_null=True,
+ help_text="Permissions that restrict users from editing dashboards",
+ )
def validate_projects(self, projects):
from sentry.api.validators import validate_project_ids
@@ -646,12 +660,16 @@ def create_widget(self, dashboard, widget_data, order):
orderby=query.get("orderby", ""),
order=i,
is_hidden=query.get("is_hidden", False),
+ selected_aggregate=query.get("selected_aggregate"),
)
)
DashboardWidgetQuery.objects.bulk_create(new_queries)
- if widget.widget_type == DashboardWidgetTypes.DISCOVER:
+ if widget.widget_type in [
+ DashboardWidgetTypes.DISCOVER,
+ DashboardWidgetTypes.TRANSACTION_LIKE,
+ ]:
self._check_query_cardinality(new_queries)
def _check_query_cardinality(self, new_queries: Sequence[DashboardWidgetQuery]):
@@ -724,13 +742,17 @@ def update_widget_queries(self, widget, data):
is_hidden=query_data.get("is_hidden", False),
orderby=query_data.get("orderby", ""),
order=next_order + i,
+ selected_aggregate=query_data.get("selected_aggregate"),
)
)
else:
raise serializers.ValidationError("You cannot use a query not owned by this widget")
DashboardWidgetQuery.objects.bulk_create(new_queries)
- if widget.widget_type == DashboardWidgetTypes.DISCOVER:
+ if widget.widget_type in [
+ DashboardWidgetTypes.DISCOVER,
+ DashboardWidgetTypes.TRANSACTION_LIKE,
+ ]:
self._check_query_cardinality(new_queries + update_queries)
def update_widget_query(self, query, data, order):
@@ -742,6 +764,7 @@ def update_widget_query(self, query, data, order):
query.columns = data.get("columns", query.columns)
query.field_aliases = data.get("field_aliases", query.field_aliases)
query.is_hidden = data.get("is_hidden", query.is_hidden)
+ query.selected_aggregate = data.get("selected_aggregate", query.selected_aggregate)
query.order = order
query.save()
diff --git a/src/sentry/api/serializers/types.py b/src/sentry/api/serializers/types.py
index 460477b2a11b7..0d58ed915feb0 100644
--- a/src/sentry/api/serializers/types.py
+++ b/src/sentry/api/serializers/types.py
@@ -66,9 +66,26 @@ class ReleaseSerializerResponse(ReleaseSerializerResponseOptional):
newGroups: int
status: str
shortVersion: str
- versionInfo: VersionInfo
+ versionInfo: VersionInfo | None
data: dict[str, Any]
commitCount: int
deployCount: int
authors: list[Author]
projects: list[Project]
+
+
+class GroupEventReleaseSerializerResponse(TypedDict, total=False):
+ id: int
+ commitCount: int
+ data: dict[str, Any]
+ dateCreated: datetime
+ dateReleased: datetime | None
+ deployCount: int
+ ref: str | None
+ lastCommit: dict[str, Any] | None
+ lastDeploy: LastDeploy | None
+ status: str
+ url: str | None
+ userAgent: str | None
+ version: str | None
+ versionInfo: VersionInfo | None
diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py
index b57570d5f0507..801ce25ee35cb 100644
--- a/src/sentry/api/urls.py
+++ b/src/sentry/api/urls.py
@@ -7,7 +7,6 @@
from sentry.api.endpoints.group_autofix_setup_check import GroupAutofixSetupCheck
from sentry.api.endpoints.group_integration_details import GroupIntegrationDetailsEndpoint
from sentry.api.endpoints.group_integrations import GroupIntegrationsEndpoint
-from sentry.api.endpoints.issues.related_issues import RelatedIssuesEndpoint
from sentry.api.endpoints.org_auth_token_details import OrgAuthTokenDetailsEndpoint
from sentry.api.endpoints.org_auth_tokens import OrgAuthTokensEndpoint
from sentry.api.endpoints.organization_events_anomalies import OrganizationEventsAnomaliesEndpoint
@@ -56,6 +55,7 @@
from sentry.api.endpoints.relocations.recover import RelocationRecoverEndpoint
from sentry.api.endpoints.relocations.retry import RelocationRetryEndpoint
from sentry.api.endpoints.relocations.unpause import RelocationUnpauseEndpoint
+from sentry.api.endpoints.secret_scanning.github import SecretScanningGitHubEndpoint
from sentry.api.endpoints.seer_rpc import SeerRpcServiceEndpoint
from sentry.api.endpoints.source_map_debug_blue_thunder_edition import (
SourceMapDebugBlueThunderEditionEndpoint,
@@ -74,6 +74,11 @@
DiscoverSavedQueryDetailEndpoint,
DiscoverSavedQueryVisitEndpoint,
)
+from sentry.flags.endpoints.hooks import OrganizationFlagsHooksEndpoint
+from sentry.flags.endpoints.logs import (
+ OrganizationFlagLogDetailsEndpoint,
+ OrganizationFlagLogIndexEndpoint,
+)
from sentry.incidents.endpoints.organization_alert_rule_activations import (
OrganizationAlertRuleActivationsEndpoint,
)
@@ -181,6 +186,8 @@
GroupParticipantsEndpoint,
GroupSimilarIssuesEmbeddingsEndpoint,
GroupSimilarIssuesEndpoint,
+ GroupTombstoneDetailsEndpoint,
+ GroupTombstoneEndpoint,
OrganizationGroupIndexEndpoint,
OrganizationGroupIndexStatsEndpoint,
OrganizationGroupSearchViewsEndpoint,
@@ -191,6 +198,7 @@
ProjectGroupIndexEndpoint,
ProjectGroupStatsEndpoint,
ProjectStacktraceLinkEndpoint,
+ RelatedIssuesEndpoint,
SharedGroupDetailsEndpoint,
ShortIdLookupEndpoint,
SourceMapDebugEndpoint,
@@ -242,10 +250,6 @@
from sentry.monitors.endpoints.project_processing_errors_index import (
ProjectProcessingErrorsIndexEndpoint,
)
-from sentry.remote_config.endpoints import (
- ProjectConfigurationEndpoint,
- ProjectConfigurationProxyEndpoint,
-)
from sentry.replays.endpoints.organization_replay_count import OrganizationReplayCountEndpoint
from sentry.replays.endpoints.organization_replay_details import OrganizationReplayDetailsEndpoint
from sentry.replays.endpoints.organization_replay_events_meta import (
@@ -255,9 +259,6 @@
from sentry.replays.endpoints.organization_replay_selector_index import (
OrganizationReplaySelectorIndexEndpoint,
)
-from sentry.replays.endpoints.project_replay_accessibility_issues import (
- ProjectReplayAccessibilityIssuesEndpoint,
-)
from sentry.replays.endpoints.project_replay_clicks_index import ProjectReplayClicksIndexEndpoint
from sentry.replays.endpoints.project_replay_details import ProjectReplayDetailsEndpoint
from sentry.replays.endpoints.project_replay_recording_segment_details import (
@@ -308,7 +309,15 @@
)
from sentry.sentry_apps.api.endpoints.sentry_app_requests import SentryAppRequestsEndpoint
from sentry.sentry_apps.api.endpoints.sentry_app_rotate_secret import SentryAppRotateSecretEndpoint
+from sentry.sentry_apps.api.endpoints.sentry_app_stats_details import SentryAppStatsEndpoint
from sentry.sentry_apps.api.endpoints.sentry_apps import SentryAppsEndpoint
+from sentry.sentry_apps.api.endpoints.sentry_apps_stats import SentryAppsStatsEndpoint
+from sentry.sentry_apps.api.endpoints.sentry_internal_app_token_details import (
+ SentryInternalAppTokenDetailsEndpoint,
+)
+from sentry.sentry_apps.api.endpoints.sentry_internal_app_tokens import (
+ SentryInternalAppTokensEndpoint,
+)
from sentry.uptime.endpoints.project_uptime_alert_details import ProjectUptimeAlertDetailsEndpoint
from sentry.uptime.endpoints.project_uptime_alert_index import ProjectUptimeAlertIndexEndpoint
from sentry.users.api.endpoints.authenticator_index import AuthenticatorIndexEndpoint
@@ -392,17 +401,9 @@
from .endpoints.group_tagkey_details import GroupTagKeyDetailsEndpoint
from .endpoints.group_tagkey_values import GroupTagKeyValuesEndpoint
from .endpoints.group_tags import GroupTagsEndpoint
-from .endpoints.group_tombstone import GroupTombstoneEndpoint
-from .endpoints.group_tombstone_details import GroupTombstoneDetailsEndpoint
from .endpoints.group_user_reports import GroupUserReportsEndpoint
from .endpoints.grouping_configs import GroupingConfigsEndpoint
from .endpoints.index import IndexEndpoint
-from .endpoints.integrations.sentry_apps import (
- SentryAppsStatsEndpoint,
- SentryAppStatsEndpoint,
- SentryInternalAppTokenDetailsEndpoint,
- SentryInternalAppTokensEndpoint,
-)
from .endpoints.internal import (
InternalBeaconEndpoint,
InternalEnvironmentEndpoint,
@@ -586,7 +587,6 @@
from .endpoints.project_create_sample import ProjectCreateSampleEndpoint
from .endpoints.project_create_sample_transaction import ProjectCreateSampleTransactionEndpoint
from .endpoints.project_details import ProjectDetailsEndpoint
-from .endpoints.project_docs_platform import ProjectDocsPlatformEndpoint
from .endpoints.project_environment_details import ProjectEnvironmentDetailsEndpoint
from .endpoints.project_environments import ProjectEnvironmentsEndpoint
from .endpoints.project_filter_details import ProjectFilterDetailsEndpoint
@@ -714,7 +714,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
name=f"{name_prefix}-group-events",
),
re_path(
- r"^(?P[^\/]+)/events/(?P(?:latest|oldest|helpful|recommended|\d+|[A-Fa-f0-9-]{32,36}))/$",
+ r"^(?P[^\/]+)/events/(?P(?:latest|oldest|recommended|\d+|[A-Fa-f0-9-]{32,36}))/$",
GroupEventDetailsEndpoint.as_view(),
name=f"{name_prefix}-group-event-details",
),
@@ -2031,6 +2031,23 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
OrganizationRelayUsage.as_view(),
name="sentry-api-0-organization-relay-usage",
),
+ # Flags
+ re_path(
+ r"^(?P[^\/]+)/flags/logs/$",
+ OrganizationFlagLogIndexEndpoint.as_view(),
+ name="sentry-api-0-organization-flag-logs",
+ ),
+ re_path(
+ r"^(?P[^\/]+)/flags/logs/(?P\d+)/$",
+ OrganizationFlagLogDetailsEndpoint.as_view(),
+ name="sentry-api-0-organization-flag-log",
+ ),
+ re_path(
+ r"^(?P[^\/]+)/flags/hooks/provider/(?P[\w-]+)/$",
+ OrganizationFlagsHooksEndpoint.as_view(),
+ name="sentry-api-0-organization-flag-hooks",
+ ),
+ # Replays
re_path(
r"^(?P[^\/]+)/replays/$",
OrganizationReplayIndexEndpoint.as_view(),
@@ -2249,11 +2266,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
ProjectCreateSampleTransactionEndpoint.as_view(),
name="sentry-api-0-project-create-sample-transaction",
),
- re_path(
- r"^(?P[^\/]+)/(?P[^\/]+)/docs/(?P[\w-]+)/$",
- ProjectDocsPlatformEndpoint.as_view(),
- name="sentry-api-0-project-docs-platform",
- ),
re_path(
r"^(?P[^\/]+)/(?P[^\/]+)/environments/$",
ProjectEnvironmentsEndpoint.as_view(),
@@ -2422,11 +2434,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
r"^(?P[^\/]+)/(?P[^\/]+)/keys/(?P[^\/]+)/stats/$",
ProjectKeyStatsEndpoint.as_view(),
),
- re_path(
- r"^(?P[^\/]+)/(?P[^\/]+)/configuration/$",
- ProjectConfigurationEndpoint.as_view(),
- name="sentry-api-0-project-key-configuration",
- ),
re_path(
r"^(?P[^/]+)/(?P[^/]+)/members/$",
ProjectMemberIndexEndpoint.as_view(),
@@ -2532,11 +2539,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
ProjectReplayViewedByEndpoint.as_view(),
name="sentry-api-0-project-replay-viewed-by",
),
- re_path(
- r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/accessibility-issues/$",
- ProjectReplayAccessibilityIssuesEndpoint.as_view(),
- name="sentry-api-0-project-replay-accessibility-issues",
- ),
re_path(
r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/clicks/$",
ProjectReplayClicksIndexEndpoint.as_view(),
@@ -3282,11 +3284,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
SetupWizard.as_view(),
name="sentry-api-0-project-wizard",
),
- re_path(
- r"^remote-config/projects/(?P[^\/]+)/$",
- ProjectConfigurationProxyEndpoint.as_view(),
- name="sentry-api-0-project-remote-configuration",
- ),
# Internal
re_path(
r"^internal/",
@@ -3302,6 +3299,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
RelocationPublicKeyEndpoint.as_view(),
name="sentry-api-0-relocations-public-key",
),
+ # Secret Scanning
+ re_path(
+ r"^secret-scanning/github/$",
+ SecretScanningGitHubEndpoint.as_view(),
+ name="sentry-api-0-secret-scanning-github",
+ ),
# Catch all
re_path(
r"^$",
diff --git a/src/sentry/api/validators/__init__.py b/src/sentry/api/validators/__init__.py
index 9836bf781fe22..455db4b7f387b 100644
--- a/src/sentry/api/validators/__init__.py
+++ b/src/sentry/api/validators/__init__.py
@@ -7,5 +7,4 @@
from .notifications import * # noqa: F401,F403
from .project import * # noqa: F401,F403
from .project_codeowners import * # noqa: F401,F403
-from .servicehook import * # noqa: F401,F403
from .userrole import * # noqa: F401,F403
diff --git a/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py b/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py
index 94ba8c7342e7c..7f5ef84ac62f9 100644
--- a/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py
+++ b/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py
@@ -317,4 +317,5 @@
"/extensions/jira/uninstalled/",
"/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/filters/",
"/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/alerts-triggered/",
+ "/_warmup/",
]
diff --git a/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py b/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py
index 553132be5907b..7201ea8861bd7 100644
--- a/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py
+++ b/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py
@@ -15,11 +15,10 @@
"/api/0/relays/{relay_id}/": {"DELETE"},
"/api/0/{var}/{issue_id}/": {"DELETE", "GET", "PUT"},
"/api/0/{var}/{issue_id}/activities/": {"GET"},
- "/api/0/{var}/{issue_id}/events/": {"GET"},
"/api/0/{var}/{issue_id}/events/{event_id}/": {"GET"},
"/api/0/{var}/{issue_id}/{var}/": {"GET", "POST"},
"/api/0/{var}/{issue_id}/{var}/{note_id}/": {"DELETE", "PUT"},
- "/api/0/{var}/{issue_id}/hashes/": {"GET", "DELETE"},
+ "/api/0/{var}/{issue_id}/hashes/": {"GET", "DELETE", "PUT"},
"/api/0/{var}/{issue_id}/reprocessing/": {"POST"},
"/api/0/{var}/{issue_id}/stats/": {"GET"},
"/api/0/{var}/{issue_id}/tags/": {"GET"},
@@ -83,6 +82,7 @@
"/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/hashes/": {
"GET",
"DELETE",
+ "PUT",
},
"/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/reprocessing/": {"POST"},
"/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/stats/": {"GET"},
diff --git a/src/sentry/apidocs/examples/dashboard_examples.py b/src/sentry/apidocs/examples/dashboard_examples.py
index 07d3d21c790ae..4ee6029151a05 100644
--- a/src/sentry/apidocs/examples/dashboard_examples.py
+++ b/src/sentry/apidocs/examples/dashboard_examples.py
@@ -56,6 +56,7 @@
}
],
"isHidden": False,
+ "selectedAggregate": None,
}
],
"limit": None,
@@ -66,6 +67,7 @@
"projects": [1],
"filters": {},
"period": "7d",
+ "permissions": {"is_creator_only_editable": False},
}
DASHBOARDS_OBJECT = [
diff --git a/src/sentry/apidocs/examples/environment_examples.py b/src/sentry/apidocs/examples/environment_examples.py
index 485ffc527ec02..c13738180beb4 100644
--- a/src/sentry/apidocs/examples/environment_examples.py
+++ b/src/sentry/apidocs/examples/environment_examples.py
@@ -7,11 +7,11 @@ class EnvironmentExamples:
"List an Organization's Environments",
value=[
{
- "id": 1,
+ "id": "1",
"name": "Production",
},
{
- "id": 2,
+ "id": "2",
"name": "Staging",
},
],
@@ -19,3 +19,15 @@ class EnvironmentExamples:
response_only=True,
)
]
+
+ GET_PROJECT_ENVIRONMENTS = [
+ OpenApiExample(
+ "List a Project's Environments",
+ value=[
+ {"id": "1", "name": "Production", "isHidden": False},
+ {"id": "2", "name": "Staging", "isHidden": True},
+ ],
+ status_codes=["200"],
+ response_only=True,
+ )
+ ]
diff --git a/src/sentry/apidocs/examples/event_examples.py b/src/sentry/apidocs/examples/event_examples.py
new file mode 100644
index 0000000000000..22797eafad053
--- /dev/null
+++ b/src/sentry/apidocs/examples/event_examples.py
@@ -0,0 +1,442 @@
+from datetime import datetime
+
+from drf_spectacular.utils import OpenApiExample
+
+from sentry.issues.endpoints.project_event_details import GroupEventDetailsResponse
+
+SIMPLE_EVENT = {
+ "eventID": "9fac2ceed9344f2bbfdd1fdacb0ed9b1",
+ "tags": [
+ {"key": "browser", "value": "Chrome 60.0"},
+ {"key": "device", "value": "Other"},
+ {"key": "environment", "value": "production"},
+ {"value": "fatal", "key": "level"},
+ {"key": "os", "value": "Mac OS X 10.12.6"},
+ {"value": "CPython 2.7.16", "key": "runtime"},
+ {"key": "release", "value": "17642328ead24b51867165985996d04b29310337"},
+ {"key": "server_name", "value": "web1.example.com"},
+ ],
+ "dateCreated": "2020-09-11T17:46:36Z",
+ "user": None,
+ "message": "",
+ "title": "This is an example Python exception",
+ "id": "dfb1a2d057194e76a4186cc8a5271553",
+ "platform": "python",
+ "event.type": "error",
+ "groupID": "1889724436",
+ "crashFile": None,
+ "location": "example.py:123",
+ "culprit": "/books/new/",
+ "projectID": "49271",
+}
+
+GROUP_EVENT: GroupEventDetailsResponse = {
+ "groupID": "1341191803",
+ "eventID": "9999aaaaca8b46d797c23c6077c6ff01",
+ "dist": None,
+ "userReport": None,
+ "previousEventID": None,
+ "message": "",
+ "title": "This is an example Python exception",
+ "id": "9999aaafcc8b46d797c23c6077c6ff01",
+ "size": 107762,
+ "errors": [
+ {
+ "data": {
+ "column": 8,
+ "source": "https://s1.sentry-cdn.com/_static/bloopbloop/sentry/dist/app.js.map",
+ "row": 15,
+ },
+ "message": "Invalid location in sourcemap",
+ "type": "js_invalid_sourcemap_location",
+ }
+ ],
+ "platform": "javascript",
+ "nextEventID": "99f9e199e9a74a14bfef6196ad741619",
+ "type": "error",
+ "metadata": {
+ "type": "ForbiddenError",
+ "value": "GET /organizations/hellboy-meowmeow/users/ 403",
+ },
+ "tags": [
+ {"value": "Chrome 83.0.4103", "key": "browser"},
+ {"value": "Chrome", "key": "browser.name"},
+ {"value": "prod", "key": "environment"},
+ {"value": "yes", "key": "handled"},
+ {"value": "error", "key": "level"},
+ {"value": "generic", "key": "mechanism"},
+ ],
+ "dateCreated": datetime.fromisoformat("2020-06-17T22:26:56.098086Z"),
+ "dateReceived": datetime.fromisoformat("2020-06-17T22:26:56.428721Z"),
+ "user": {
+ "username": None,
+ "name": "Hell Boy",
+ "ip_address": "192.168.1.1",
+ "email": "hell@boy.cat",
+ "data": {"isStaff": False},
+ "id": "550747",
+ },
+ "entries": [
+ {
+ "type": "exception",
+ "data": {
+ "values": [
+ {
+ "stacktrace": {
+ "frames": [
+ {
+ "function": "ignoreOnError",
+ "errors": None,
+ "colNo": 23,
+ "vars": None,
+ "package": None,
+ "absPath": "webpack:////usr/src/getsentry/src/sentry/node_modules/@sentry/browser/esm/helpers.js",
+ "inApp": False,
+ "lineNo": 71,
+ "module": "usr/src/getsentry/src/sentry/node_modules/@sentry/browser/esm/helpers",
+ "filename": "/usr/src/getsentry/src/sentry/node_modules/@sentry/browser/esm/helpers.js",
+ "platform": None,
+ "instructionAddr": None,
+ "context": [
+ [66, " }"],
+ [67, " // Attempt to invoke user-land function"],
+ [
+ 68,
+ " // NOTE: If you are a Sentry user, and you are seeing this stack frame, it",
+ ],
+ [
+ 69,
+ " // means the sentry.javascript SDK caught an error invoking your application code. This",
+ ],
+ [
+ 70,
+ " // is expected behavior and NOT indicative of a bug with sentry.javascript.",
+ ],
+ [
+ 71,
+ " return fn.apply(this, wrappedArguments);",
+ ],
+ [72, " // tslint:enable:no-unsafe-any"],
+ [73, " }"],
+ [74, " catch (ex) {"],
+ [75, " ignoreNextOnError();"],
+ [76, " withScope(function (scope) {"],
+ ],
+ "symbolAddr": None,
+ "trust": None,
+ "symbol": None,
+ },
+ {
+ "function": "apply",
+ "errors": None,
+ "colNo": 24,
+ "vars": None,
+ "package": None,
+ "absPath": "webpack:////usr/src/getsentry/src/sentry/node_modules/reflux-core/lib/PublisherMethods.js",
+ "inApp": False,
+ "lineNo": 74,
+ "module": "usr/src/getsentry/src/sentry/node_modules/reflux-core/lib/PublisherMethods",
+ "filename": "/usr/src/getsentry/src/sentry/node_modules/reflux-core/lib/PublisherMethods.js",
+ "platform": None,
+ "instructionAddr": None,
+ "context": [
+ [69, " */"],
+ [70, " triggerAsync: function triggerAsync() {"],
+ [71, " var args = arguments,"],
+ [72, " me = this;"],
+ [73, " _.nextTick(function () {"],
+ [74, " me.trigger.apply(me, args);"],
+ [75, " });"],
+ [76, " },"],
+ [77, ""],
+ [78, " /**"],
+ [
+ 79,
+ " * Wraps the trigger mechanism with a deferral function.",
+ ],
+ ],
+ "symbolAddr": None,
+ "trust": None,
+ "symbol": None,
+ },
+ ],
+ "framesOmitted": None,
+ "registers": None,
+ "hasSystemFrames": True,
+ },
+ "module": None,
+ "rawStacktrace": {
+ "frames": [
+ {
+ "function": "a",
+ "errors": None,
+ "colNo": 88800,
+ "vars": None,
+ "package": None,
+ "absPath": "https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
+ "inApp": False,
+ "lineNo": 81,
+ "module": None,
+ "filename": "/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
+ "platform": None,
+ "instructionAddr": None,
+ "context": [
+ [76, "/*!"],
+ [77, " Copyright (c) 2018 Jed Watson."],
+ [78, " Licensed under the MIT License (MIT), see"],
+ [79, " http://jedwatson.github.io/react-select"],
+ [80, "*/"],
+ [
+ 81,
+ "{snip} e,t)}));return e.handleEvent?e.handleEvent.apply(this,s):e.apply(this,s)}catch(e){throw c(),Object(o.m)((function(n){n.addEventProcessor((fu {snip}",
+ ],
+ [82, "/*!"],
+ [83, " * JavaScript Cookie v2.2.1"],
+ [84, " * https://github.com/js-cookie/js-cookie"],
+ [85, " *"],
+ [86, " * Copyright 2006, 2015 Klaus Hartl & Fagner Brack"],
+ ],
+ "symbolAddr": None,
+ "trust": None,
+ "symbol": None,
+ },
+ {
+ "function": None,
+ "errors": None,
+ "colNo": 149484,
+ "vars": None,
+ "package": None,
+ "absPath": "https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
+ "inApp": False,
+ "lineNo": 119,
+ "module": None,
+ "filename": "/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js",
+ "platform": None,
+ "instructionAddr": None,
+ "context": [
+ [114, "/* @license"],
+ [115, "Papa Parse"],
+ [116, "v5.2.0"],
+ [117, "https://github.com/mholt/PapaParse"],
+ [118, "License: MIT"],
+ [
+ 119,
+ "{snip} (){var e=arguments,t=this;r.nextTick((function(){t.trigger.apply(t,e)}))},deferWith:function(e){var t=this.trigger,n=this,r=function(){t.app {snip}",
+ ],
+ [120, "/**!"],
+ [
+ 121,
+ " * @fileOverview Kickass library to create and place poppers near their reference elements.",
+ ],
+ [122, " * @version 1.16.1"],
+ [123, " * @license"],
+ [
+ 124,
+ " * Copyright (c) 2016 Federico Zivolo and contributors",
+ ],
+ ],
+ "symbolAddr": None,
+ "trust": None,
+ "symbol": None,
+ },
+ ],
+ "framesOmitted": None,
+ "registers": None,
+ "hasSystemFrames": True,
+ },
+ "mechanism": {"type": "generic", "handled": True},
+ "threadId": None,
+ "value": "GET /organizations/hellboy-meowmeow/users/ 403",
+ "type": "ForbiddenError",
+ }
+ ],
+ "excOmitted": None,
+ "hasSystemFrames": True,
+ },
+ },
+ {
+ "type": "breadcrumbs",
+ "data": {
+ "values": [
+ {
+ "category": "tracing",
+ "level": "debug",
+ "event_id": None,
+ "timestamp": "2020-06-17T22:26:55.266586Z",
+ "data": None,
+ "message": "[Tracing] pushActivity: idleTransactionStarted#1",
+ "type": "debug",
+ },
+ {
+ "category": "xhr",
+ "level": "info",
+ "event_id": None,
+ "timestamp": "2020-06-17T22:26:55.619446Z",
+ "data": {
+ "url": "/api/0/internal/health/",
+ "status_code": 200,
+ "method": "GET",
+ },
+ "message": None,
+ "type": "http",
+ },
+ {
+ "category": "sentry.transaction",
+ "level": "info",
+ "event_id": None,
+ "timestamp": "2020-06-17T22:26:55.945016Z",
+ "data": None,
+ "message": "7787a027f3fb46c985aaa2287b3f4d09",
+ "type": "default",
+ },
+ ]
+ },
+ },
+ {
+ "type": "request",
+ "data": {
+ "fragment": None,
+ "cookies": [],
+ "inferredContentType": None,
+ "env": None,
+ "headers": [
+ [
+ "User-Agent",
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36",
+ ]
+ ],
+ "url": "https://sentry.io/organizations/hellboy-meowmeow/issues/",
+ "query": [["project", "5236886"]],
+ "data": None,
+ "method": None,
+ },
+ },
+ ],
+ "packages": {},
+ "sdk": {"version": "5.17.0", "name": "sentry.javascript.browser"},
+ "_meta": {
+ "user": None,
+ "context": None,
+ "entries": {},
+ "contexts": None,
+ "message": None,
+ "packages": None,
+ "tags": {},
+ "sdk": None,
+ },
+ "contexts": {
+ "ForbiddenError": {
+ "status": 403,
+ "statusText": "Forbidden",
+ "responseJSON": {"detail": "You do not have permission to perform this action."},
+ "type": "default",
+ },
+ "browser": {"version": "83.0.4103", "type": "browser", "name": "Chrome"},
+ "os": {"version": "10", "type": "os", "name": "Windows"},
+ "trace": {
+ "span_id": "83db1ad17e67dfe7",
+ "type": "trace",
+ "trace_id": "da6caabcd90e45fdb81f6655824a5f88",
+ "op": "navigation",
+ },
+ "organization": {"type": "default", "id": "323938", "slug": "hellboy-meowmeow"},
+ },
+ "fingerprints": ["fbe908cc63d63ea9763fd84cb6bad177"],
+ "context": {
+ "resp": {
+ "status": 403,
+ "responseJSON": {"detail": "You do not have permission to perform this action."},
+ "name": "ForbiddenError",
+ "statusText": "Forbidden",
+ "message": "GET /organizations/hellboy-meowmeow/users/ 403",
+ "stack": "Error\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:480441\n at u (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:165:51006)\n at Generator._invoke (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:165:50794)\n at Generator.A.forEach.e. [as next] (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:165:51429)\n at n (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68684)\n at s (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68895)\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68954\n at new Promise ()\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:16:68835\n at v (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:480924)\n at m (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:480152)\n at t.fetchMemberList (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:902983)\n at t.componentDidMount (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:900527)\n at t.componentDidMount (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:15597)\n at Pc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:101023)\n at t.unstable_runWithPriority (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:3462)\n at Ko (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45529)\n at Rc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:97371)\n at Oc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:87690)\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45820\n at t.unstable_runWithPriority (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:3462)\n at Ko (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45529)\n at Zo (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45765)\n at Jo (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:45700)\n at gc (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:84256)\n at Object.enqueueSetState (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:181:50481)\n at t.M.setState (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:173:1439)\n at t.onUpdate (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:543076)\n at a.n (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149090)\n at a.emit (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:6550)\n at p.trigger (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149379)\n at p.onInitializeUrlState (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/app.js:1:541711)\n at a.n (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149090)\n at a.emit (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:189:6550)\n at Function.trigger (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149379)\n at https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:119:149484\n at a (https://s1.sentry-cdn.com/_static/dde778f9f93a48e2b6e58ecb0c5eb8f2/sentry/dist/vendor.js:81:88800)",
+ }
+ },
+ "release": {
+ "dateReleased": datetime.fromisoformat("2020-06-17T19:21:02.186004Z"),
+ "commitCount": 11,
+ "url": "https://freight.getsentry.net/deploys/getsentry/production/8868/",
+ "data": {},
+ "lastDeploy": {
+ "name": "b65bc521378269d3eaefdc964f8ef56621414943 to prod",
+ "url": None,
+ "environment": "prod",
+ "dateStarted": None,
+ "dateFinished": "2020-06-17T19:20:55.641748Z",
+ "id": "6883490",
+ },
+ "deployCount": 1,
+ "dateCreated": datetime.fromisoformat("2020-06-17T18:45:31.042157Z"),
+ "version": "b65bc521378269d3eaefdc964f8ef56621414943",
+ "lastCommit": {
+ "repository": {
+ "status": "active",
+ "integrationId": "2933",
+ "externalSlug": "getsentry/getsentry",
+ "name": "getsentry/getsentry",
+ "provider": {"id": "integrations:github", "name": "GitHub"},
+ "url": "https://github.com/getsentry/getsentry",
+ "id": "2",
+ "dateCreated": "2016-10-10T21:36:45.373994Z",
+ },
+ "releases": [
+ {
+ "dateReleased": datetime.fromisoformat("2020-06-23T13:26:18.427090Z"),
+ "url": "https://freight.getsentry.net/deploys/getsentry/staging/2077/",
+ "dateCreated": "2020-06-23T13:22:50.420265Z",
+ "version": "f3783e5fe710758724f14267439fd46cc2bf5918",
+ "shortVersion": "f3783e5fe710758724f14267439fd46cc2bf5918",
+ "ref": "perf/source-maps-test",
+ },
+ {
+ "dateReleased": datetime.fromisoformat("2020-06-17T19:21:02.186004Z"),
+ "url": "https://freight.getsentry.net/deploys/getsentry/production/8868/",
+ "dateCreated": datetime.fromisoformat("2020-06-17T18:45:31.042157Z"),
+ "version": "b65bc521378269d3eaefdc964f8ef56621414943",
+ "shortVersion": "b65bc521378269d3eaefdc964f8ef56621414943",
+ "ref": "master",
+ },
+ ],
+ "dateCreated": datetime.fromisoformat("2020-06-17T18:43:37Z"),
+ "message": "feat(billing): Get a lot of money",
+ "id": "b65bc521378269d3eaefdc964f8ef56621414943",
+ },
+ "ref": "master",
+ },
+ "crashFile": None,
+ "location": "example.py:123",
+ "culprit": "/books/new/",
+ "groupingConfig": {"enhancements": "abc", "id": "2359823092345612392"},
+ "occurrence": None,
+ "projectID": "5236886",
+ "resolvedWith": [],
+ "sdkUpdates": [],
+ "userReport": None,
+}
+
+
+class EventExamples:
+ PROJECT_EVENTS_SIMPLE = [
+ OpenApiExample(
+ "Return a list of error events bound to a project",
+ value=[SIMPLE_EVENT],
+ response_only=True,
+ status_codes=["200"],
+ )
+ ]
+ GROUP_EVENTS_SIMPLE = [
+ OpenApiExample(
+ "Return a list of error events bound to an issue",
+ value=[SIMPLE_EVENT],
+ response_only=True,
+ status_codes=["200"],
+ )
+ ]
+ GROUP_EVENT_DETAILS = [
+ OpenApiExample(
+ "Return an issue event",
+ value=GROUP_EVENT,
+ response_only=True,
+ status_codes=["200"],
+ )
+ ]
diff --git a/src/sentry/apidocs/examples/integration_examples.py b/src/sentry/apidocs/examples/integration_examples.py
index 9691785cae31e..44ec0f5052db4 100644
--- a/src/sentry/apidocs/examples/integration_examples.py
+++ b/src/sentry/apidocs/examples/integration_examples.py
@@ -55,3 +55,34 @@ class IntegrationExamples:
response_only=True,
)
]
+
+ EXTERNAL_USER_CREATE = [
+ OpenApiExample(
+ "Create an external user",
+ value={
+ "externalName": "@Billybob",
+ "provider": "github",
+ "userId": "1",
+ "integrationId": "1",
+ "id": "1",
+ },
+ status_codes=["200", "201"],
+ response_only=True,
+ )
+ ]
+
+ EXTERNAL_TEAM_CREATE = [
+ OpenApiExample(
+ "Create an external team",
+ value={
+ "externalId": "asdf",
+ "externalName": "@team-foo",
+ "provider": "slack",
+ "integrationId": "1",
+ "id": "1",
+ "teamId": "2",
+ },
+ status_codes=["200", "201"],
+ response_only=True,
+ )
+ ]
diff --git a/src/sentry/apidocs/examples/organization_examples.py b/src/sentry/apidocs/examples/organization_examples.py
index 0542f224ee4b5..109ae5d434152 100644
--- a/src/sentry/apidocs/examples/organization_examples.py
+++ b/src/sentry/apidocs/examples/organization_examples.py
@@ -800,7 +800,7 @@ class OrganizationExamples:
},
"deployCount": 1,
"lastDeploy": {
- "id": 53070941,
+ "id": "53070941",
"environment": "canary-test-control",
"dateStarted": None,
"dateFinished": "2024-05-21T11:26:17.597793Z",
@@ -893,21 +893,6 @@ class OrganizationExamples:
)
]
- EXTERNAL_USER_CREATE = [
- OpenApiExample(
- "Create an external user",
- value={
- "id": 123,
- "provider": "github",
- "external_name": "@billy",
- "integration_id": 123,
- "user_id": 123,
- },
- status_codes=["200", "201"],
- response_only=True,
- )
- ]
-
GET_HISTORICAL_ANOMALIES = [
OpenApiExample(
"Identify anomalies in historical data",
diff --git a/src/sentry/apidocs/hooks.py b/src/sentry/apidocs/hooks.py
index 364ca5e144409..49e22c21e6767 100644
--- a/src/sentry/apidocs/hooks.py
+++ b/src/sentry/apidocs/hooks.py
@@ -224,6 +224,8 @@ def _validate_request_body(
def custom_postprocessing_hook(result: Any, generator: Any, **kwargs: Any) -> Any:
+ _fix_issue_paths(result)
+
# Fetch schema component references
schema_components = result["components"]["schemas"]
@@ -281,3 +283,40 @@ def _check_tag(method_info: Mapping[str, Any], endpoint_name: str) -> None:
def _check_description(json_body: Mapping[str, Any], err_str: str) -> None:
if json_body.get("description") is None:
raise SentryApiBuildError(err_str)
+
+
+def _fix_issue_paths(result: Any) -> Any:
+ """
+ The way we define `/issues/` paths causes some problems with drf-spectacular:
+ - The path may be defined twice, with `/organizations/{organization_id_slug}` prefix and without
+ - The `/issues/` part of the path is defined as `issues|groups` for compatibility reasons,
+ but we only want to use `issues` in the docs
+
+ This function removes duplicate paths, removes the `issues|groups` path parameter and
+ replaces it with `issues` in the path.
+ """
+ items = list(result["paths"].items())
+
+ modified_paths = []
+
+ for path, endpoint in items:
+ if "{var}/{issue_id}" in path:
+ modified_paths.append(path)
+
+ for path in modified_paths:
+ updated_path = path.replace("{var}/{issue_id}", "issues/{issue_id}")
+ if path.startswith("/api/0/organizations/{organization_id_or_slug}/"):
+ updated_path = updated_path.replace(
+ "/api/0/organizations/{organization_id_or_slug}/", "/api/0/"
+ )
+ endpoint = result["paths"][path]
+ for method in endpoint.keys():
+ endpoint[method]["parameters"] = [
+ param
+ for param in endpoint[method]["parameters"]
+ if not (
+ param["in"] == "path" and param["name"] in ("var", "organization_id_or_slug")
+ )
+ ]
+ result["paths"][updated_path] = endpoint
+ del result["paths"][path]
diff --git a/src/sentry/apidocs/parameters.py b/src/sentry/apidocs/parameters.py
index 53ca248c2b8c2..8bcc38d34c9bf 100644
--- a/src/sentry/apidocs/parameters.py
+++ b/src/sentry/apidocs/parameters.py
@@ -95,7 +95,8 @@ class EnvironmentParams:
location="query",
required=False,
type=str,
- description="""The visibility of the environments to filter by. The options are: `all`, `hidden`, `visible`. Defaults to `visible`.""",
+ description="""The visibility of the environments to filter by. Defaults to `visible`.""",
+ enum=["all", "hidden", "visible"],
)
@@ -179,6 +180,14 @@ class OrganizationParams:
description="The ID of the external user object. This is returned when creating an external user.",
)
+ EXTERNAL_TEAM_ID = OpenApiParameter(
+ name="external_team_id",
+ location="path",
+ required=True,
+ type=int,
+ description="The ID of the external team object. This is returned when creating an external team.",
+ )
+
class ReleaseParams:
VERSION = OpenApiParameter(
@@ -253,6 +262,23 @@ class SCIMParams:
)
+class IssueParams:
+ ISSUES_OR_GROUPS = OpenApiParameter(
+ name="var",
+ location="path",
+ required=False,
+ type=str,
+ description="Issue URLs may be accessed with either `issues` or `groups`. This parameter is will be removed when building the API docs.",
+ )
+ ISSUE_ID = OpenApiParameter(
+ name="issue_id",
+ location="path",
+ required=True,
+ type=int,
+ description="The ID of the issue you'd like to query.",
+ )
+
+
class IssueAlertParams:
ISSUE_RULE_ID = OpenApiParameter(
name="rule_id",
diff --git a/src/sentry/audit_log/events.py b/src/sentry/audit_log/events.py
index f831b0754060a..6fdb3b3a6bebe 100644
--- a/src/sentry/audit_log/events.py
+++ b/src/sentry/audit_log/events.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+from datetime import datetime
from typing import TYPE_CHECKING
from sentry.audit_log.manager import AuditLogEvent
@@ -351,11 +352,17 @@ def __init__(self):
def render(self, audit_log_entry: AuditLogEntry):
entry_data = audit_log_entry.data
- access_start = entry_data.get("access_start", None)
- access_end = entry_data.get("access_end", None)
+ access_start = entry_data.get("access_start")
+ access_end = entry_data.get("access_end")
rendered_text = "waived data secrecy"
if access_start is not None and access_end is not None:
- rendered_text += f" from {access_start} to {access_end}"
+ start_dt = datetime.fromisoformat(access_start)
+ end_dt = datetime.fromisoformat(access_end)
+
+ formatted_start = start_dt.strftime("%b %d, %Y %I:%M %p UTC")
+ formatted_end = end_dt.strftime("%b %d, %Y %I:%M %p UTC")
+
+ rendered_text += f" from {formatted_start} to {formatted_end}"
return rendered_text
diff --git a/src/sentry/backup/comparators.py b/src/sentry/backup/comparators.py
index 9155f6de851d3..445d5f3078108 100644
--- a/src/sentry/backup/comparators.py
+++ b/src/sentry/backup/comparators.py
@@ -865,13 +865,27 @@ def get_default_comparators() -> dict[str, list[JSONScrubbingComparator]]:
],
"sentry.userrole": [DateUpdatedComparator("date_updated")],
"sentry.userroleuser": [DateUpdatedComparator("date_updated")],
+ "workflow_engine.action": [DateUpdatedComparator("date_updated", "date_added")],
+ "workflow_engine.datacondition": [DateUpdatedComparator("date_updated", "date_added")],
+ "workflow_engine.dataconditiongroup": [
+ DateUpdatedComparator("date_updated", "date_added")
+ ],
+ "workflow_engine.dataconditiongroupaction": [
+ DateUpdatedComparator("date_updated", "date_added")
+ ],
"workflow_engine.datasource": [DateUpdatedComparator("date_updated", "date_added")],
"workflow_engine.datasourcedetector": [
DateUpdatedComparator("date_updated", "date_added")
],
"workflow_engine.detector": [DateUpdatedComparator("date_updated", "date_added")],
+ "workflow_engine.detectorstate": [DateUpdatedComparator("date_updated", "date_added")],
+ "workflow_engine.detectorworkflow": [
+ DateUpdatedComparator("date_updated", "date_added")
+ ],
"workflow_engine.workflow": [DateUpdatedComparator("date_updated", "date_added")],
- "workflow_engine.workflowaction": [DateUpdatedComparator("date_updated", "date_added")],
+ "workflow_engine.workflowdataconditiongroup": [
+ DateUpdatedComparator("date_updated", "date_added")
+ ],
},
)
diff --git a/src/sentry/buffer/base.py b/src/sentry/buffer/base.py
index 5cdb624d011e9..a9cde77448f9e 100644
--- a/src/sentry/buffer/base.py
+++ b/src/sentry/buffer/base.py
@@ -148,7 +148,6 @@ def process(
extra: dict[str, Any] | None = None,
signal_only: bool | None = None,
) -> None:
- from sentry.event_manager import ScoreClause
from sentry.models.group import Group
created = False
@@ -162,12 +161,6 @@ def process(
# HACK(dcramer): this is gross, but we don't have a good hook to compute this property today
# XXX(dcramer): remove once we can replace 'priority' with something reasonable via Snuba
if model is Group:
- if "last_seen" in update_kwargs and "times_seen" in update_kwargs:
- update_kwargs["score"] = ScoreClause(
- group=None,
- times_seen=update_kwargs["times_seen"],
- last_seen=update_kwargs["last_seen"],
- )
# XXX: create_or_update doesn't fire `post_save` signals, and so this update never
# ends up in the cache. This causes issues when handling issue alerts, and likely
# elsewhere. Use `update` here since we're already special casing, and we know that
diff --git a/src/sentry/buffer/redis.py b/src/sentry/buffer/redis.py
index 3ae1a951421c5..5b0ffbd01aa25 100644
--- a/src/sentry/buffer/redis.py
+++ b/src/sentry/buffer/redis.py
@@ -34,7 +34,7 @@
# load everywhere
_last_validation_log: float | None = None
Pipeline = Any
-# TODO type Pipeline instead of using Any here
+# TODO: type Pipeline instead of using Any here
def _get_model_key(model: type[models.Model]) -> str:
@@ -123,7 +123,9 @@ def queue(self, model_key: str) -> str | None:
"""
Get the queue name for the given model_key.
"""
+ metrics.incr(f"pendingbuffer-router.queue.{model_key}")
if model_key in self.pending_buffer_router:
+ metrics.incr(f"pendingbuffer-router.queue-found.{model_key}")
generate_queue = self.pending_buffer_router[model_key].generate_queue
if generate_queue is not None:
return generate_queue(model_key)
@@ -158,6 +160,7 @@ def assign_queue(self, model: type[models.Model], generate_queue: ChooseQueueFun
A queue can be assigned to a model by passing in the generate_queue function.
"""
key = _get_model_key(model=model)
+ metrics.incr(f"redisbuffer-router.assign_queue.{key}")
self._routers[key] = generate_queue
def create_pending_buffers_router(self, incr_batch_size: int) -> PendingBufferRouter:
diff --git a/src/sentry/celery.py b/src/sentry/celery.py
index d3d723a08a315..d3ce67dcaeeec 100644
--- a/src/sentry/celery.py
+++ b/src/sentry/celery.py
@@ -14,8 +14,8 @@
[
# basic tasks that must be passed models still
"sentry.tasks.process_buffer.process_incr",
- "sentry.tasks.process_resource_change_bound",
- "sentry.tasks.sentry_apps.send_alert_event",
+ "sentry.sentry_apps.tasks.sentry_apps.process_resource_change_bound",
+ "sentry.sentry_apps.tasks.sentry_apps.send_alert_event",
"sentry.tasks.unmerge",
"src.sentry.notifications.utils.async_send_notification",
# basic tasks that can already deal with primary keys passed
diff --git a/src/sentry/charts/chartcuterie.py b/src/sentry/charts/chartcuterie.py
index 956d01afd64c8..263ef4e7df96a 100644
--- a/src/sentry/charts/chartcuterie.py
+++ b/src/sentry/charts/chartcuterie.py
@@ -67,7 +67,7 @@ def generate_chart(self, style: ChartType, data: Any, size: ChartSize | None = N
with sentry_sdk.start_span(
op="charts.chartcuterie.generate_chart",
- description=type(self).__name__,
+ name=type(self).__name__,
):
# Using sentry json formatter to handle datetime objects
@@ -90,7 +90,7 @@ def generate_chart(self, style: ChartType, data: Any, size: ChartSize | None = N
with sentry_sdk.start_span(
op="charts.chartcuterie.upload",
- description=type(self).__name__,
+ name=type(self).__name__,
):
storage = get_storage(self.storage_options)
storage.save(file_name, BytesIO(resp.content))
diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py
index fd98edfc9b8a4..00df2f3c2f683 100644
--- a/src/sentry/conf/server.py
+++ b/src/sentry/conf/server.py
@@ -20,12 +20,13 @@
from sentry.conf.api_pagination_allowlist_do_not_modify import (
SENTRY_API_PAGINATION_ALLOWLIST_DO_NOT_MODIFY,
)
+from sentry.conf.types.celery import SplitQueueSize, SplitQueueTaskRoute
from sentry.conf.types.kafka_definition import ConsumerDefinition
from sentry.conf.types.logging_config import LoggingConfig
from sentry.conf.types.role_dict import RoleDict
from sentry.conf.types.sdk_config import ServerSdkConfig
from sentry.utils import json # NOQA (used in getsentry config)
-from sentry.utils.celery import crontab_with_minute_jitter
+from sentry.utils.celery import crontab_with_minute_jitter, make_split_task_queues
from sentry.utils.types import Type, type_from_value
@@ -345,6 +346,7 @@ def env(
"sentry.middleware.locale.SentryLocaleMiddleware",
"sentry.middleware.ratelimit.RatelimitMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
+ "sentry.middleware.devtoolbar.DevToolbarAnalyticsMiddleware",
)
ROOT_URLCONF = "sentry.conf.urls"
@@ -392,12 +394,14 @@ def env(
"sentry",
"sentry.analytics",
"sentry.incidents.apps.Config",
+ "sentry.deletions",
"sentry.discover",
"sentry.analytics.events",
"sentry.nodestore",
"sentry.users",
"sentry.sentry_apps",
"sentry.integrations",
+ "sentry.flags",
"sentry.monitors",
"sentry.uptime",
"sentry.replays",
@@ -750,6 +754,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
"sentry.integrations.github.tasks.pr_comment",
"sentry.integrations.jira.tasks",
"sentry.integrations.opsgenie.tasks",
+ "sentry.sentry_apps.tasks",
"sentry.snuba.tasks",
"sentry.replays.tasks",
"sentry.monitors.tasks.clock_pulse",
@@ -819,6 +824,34 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
"sentry.integrations.tasks",
)
+# tmp(michal): Default configuration for post_process* queues split
+SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER: dict[str, Callable[[], str]] = {}
+
+# Enable split queue routing
+CELERY_ROUTES = ("sentry.queue.routers.SplitQueueTaskRouter",)
+
+# Mapping from task names to split queues. This can be used when the
+# task does not have to specify the queue and can rely on Celery to
+# do the routing.
+# Each route has a task name as key and a tuple containing a list of queues
+# and a default one as destination. The default one is used when the
+# rollout option is not active.
+CELERY_SPLIT_QUEUE_TASK_ROUTES_REGION: Mapping[str, SplitQueueTaskRoute] = {
+ "sentry.tasks.store.save_event_transaction": {
+ "default_queue": "events.save_event_transaction",
+ "queues_config": {
+ "total": 3,
+ "in_use": 3,
+ },
+ }
+}
+CELERY_SPLIT_TASK_QUEUES_REGION = make_split_task_queues(CELERY_SPLIT_QUEUE_TASK_ROUTES_REGION)
+
+# Mapping from queue name to split queues to be used by SplitQueueRouter.
+# This is meant to be used in those case where we have to specify the
+# queue name when issuing a task. Example: post process.
+CELERY_SPLIT_QUEUE_ROUTES: Mapping[str, SplitQueueSize] = {}
+
default_exchange = Exchange("default", type="direct")
control_exchange = default_exchange
@@ -1239,16 +1272,19 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
CELERYBEAT_SCHEDULE_FILENAME = os.path.join(tempfile.gettempdir(), "sentry-celerybeat-control")
CELERYBEAT_SCHEDULE = CELERYBEAT_SCHEDULE_CONTROL
CELERY_QUEUES = CELERY_QUEUES_CONTROL
+ CELERY_SPLIT_QUEUE_TASK_ROUTES: Mapping[str, SplitQueueTaskRoute] = {}
elif SILO_MODE == "REGION":
CELERYBEAT_SCHEDULE_FILENAME = os.path.join(tempfile.gettempdir(), "sentry-celerybeat-region")
CELERYBEAT_SCHEDULE = CELERYBEAT_SCHEDULE_REGION
- CELERY_QUEUES = CELERY_QUEUES_REGION
+ CELERY_QUEUES = CELERY_QUEUES_REGION + CELERY_SPLIT_TASK_QUEUES_REGION
+ CELERY_SPLIT_QUEUE_TASK_ROUTES = CELERY_SPLIT_QUEUE_TASK_ROUTES_REGION
else:
CELERYBEAT_SCHEDULE = {**CELERYBEAT_SCHEDULE_CONTROL, **CELERYBEAT_SCHEDULE_REGION}
CELERYBEAT_SCHEDULE_FILENAME = os.path.join(tempfile.gettempdir(), "sentry-celerybeat")
- CELERY_QUEUES = CELERY_QUEUES_REGION + CELERY_QUEUES_CONTROL
+ CELERY_QUEUES = CELERY_QUEUES_REGION + CELERY_QUEUES_CONTROL + CELERY_SPLIT_TASK_QUEUES_REGION
+ CELERY_SPLIT_QUEUE_TASK_ROUTES = CELERY_SPLIT_QUEUE_TASK_ROUTES_REGION
for queue in CELERY_QUEUES:
queue.durable = False
@@ -1730,7 +1766,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
SENTRY_METRICS_INDEXER = "sentry.sentry_metrics.indexer.postgres.postgres_v2.PostgresIndexer"
SENTRY_METRICS_INDEXER_OPTIONS: dict[str, Any] = {}
SENTRY_METRICS_INDEXER_CACHE_TTL = 3600 * 2
-SENTRY_METRICS_INDEXER_TRANSACTIONS_SAMPLE_RATE = 0.1
+SENTRY_METRICS_INDEXER_TRANSACTIONS_SAMPLE_RATE = 0.1 # relative to SENTRY_BACKEND_APM_SAMPLING
SENTRY_METRICS_INDEXER_SPANNER_OPTIONS: dict[str, Any] = {}
@@ -2183,9 +2219,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
# This flag activates consuming GroupAttribute messages in the development environment
SENTRY_USE_GROUP_ATTRIBUTES = True
-# This flag activates replay analyzer service in the development environment
-SENTRY_USE_REPLAY_ANALYZER_SERVICE = False
-
# This flag activates Spotlight Sidecar in the development environment
SENTRY_USE_SPOTLIGHT = False
@@ -2435,14 +2468,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
"only_if": settings.SENTRY_USE_PROFILING,
}
),
- "session-replay-analyzer": lambda settings, options: (
- {
- "image": "ghcr.io/getsentry/session-replay-analyzer:latest",
- "environment": {},
- "ports": {"3000/tcp": 3000},
- "only_if": settings.SENTRY_USE_REPLAY_ANALYZER_SERVICE,
- }
- ),
"spotlight-sidecar": lambda settings, options: (
{
"image": "ghcr.io/getsentry/spotlight:latest",
@@ -2467,7 +2492,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
SENTRY_SELF_HOSTED_ERRORS_ONLY = False
# only referenced in getsentry to provide the stable beacon version
# updated with scripts/bump-version.sh
-SELF_HOSTED_STABLE_VERSION = "24.9.0"
+SELF_HOSTED_STABLE_VERSION = "24.10.0"
# Whether we should look at X-Forwarded-For header or not
# when checking REMOTE_ADDR ip addresses
@@ -3115,6 +3140,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE = True
ZERO_DOWNTIME_MIGRATIONS_LOCK_TIMEOUT = None
ZERO_DOWNTIME_MIGRATIONS_STATEMENT_TIMEOUT = None
+ZERO_DOWNTIME_MIGRATIONS_LOCK_TIMEOUT_FORCE = False
if int(PG_VERSION.split(".", maxsplit=1)[0]) < 12:
# In v0.6 of django-pg-zero-downtime-migrations this settings is deprecated for PostreSQLv12+
@@ -3187,7 +3213,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
# We should not run access logging middleware on some endpoints as
# it is very noisy, and these views are hit by internal services.
-ACCESS_LOGS_EXCLUDE_PATHS = ("/api/0/internal/", "/api/0/relays/")
+ACCESS_LOGS_EXCLUDE_PATHS = ("/api/0/internal/", "/api/0/relays/", "/_warmup/")
VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON = True
DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL = False
@@ -3514,7 +3540,3 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
SENTRY_WEB_PORT = int(bind[1])
CELERYBEAT_SCHEDULE_FILENAME = f"celerybeat-schedule-{SILO_MODE}"
-
-
-# tmp(michal): Default configuration for post_process* queueus split
-SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER: dict[str, Callable[[], str]] = {}
diff --git a/src/sentry/conf/types/celery.py b/src/sentry/conf/types/celery.py
new file mode 100644
index 0000000000000..809afab5d7af5
--- /dev/null
+++ b/src/sentry/conf/types/celery.py
@@ -0,0 +1,44 @@
+from __future__ import annotations
+
+from typing import NotRequired, TypedDict
+
+
+class SplitQueueSize(TypedDict):
+ # The total number of queues to create to split a single queue.
+ # This number triggers the creation of the queues themselves
+ # when the application starts.
+ total: int
+ # The number of queues to actually use. It has to be smaller or
+ # equal to `total`.
+ # This is the number of queues the router uses when the split
+ # is enable on this queue.
+ # This number exists in order to be able to safely increase or
+ # decrease the number of queues as the queues have to be created
+ # first, then we have to start consuming from them, only then
+ # we can start producing.
+ in_use: int
+
+
+class SplitQueueTaskRoute(TypedDict):
+ """
+ This is used to provide the routes tasks invocations have to be
+ routed to when the Celery router is used.
+ """
+
+ # This represents both the name of the default queue in use when
+ # the router is not deployed and the prefix for all split queue
+ # names for this task.
+ #
+ # Example: my_queue, becomes my_queue_1, my_queue_2 if there are
+ # two split queues.
+ default_queue: str
+
+ # Configures the number of queues to create and to use.
+ #
+ # This can be None if we do not want to set up multiple queue in
+ # an environment. In order to use the SplitQueue router, the queue
+ # name has to be removed from the task definition, which means we
+ # must go through the router in all cases. Thus the router has
+ # to provide a default queue even if we do not want a split in an
+ # environment.
+ queues_config: NotRequired[SplitQueueSize]
diff --git a/src/sentry/conf/urls.py b/src/sentry/conf/urls.py
index 09bc7fe52a756..62f54146cd013 100644
--- a/src/sentry/conf/urls.py
+++ b/src/sentry/conf/urls.py
@@ -1,31 +1,11 @@
from __future__ import annotations
-from django.urls import URLPattern, URLResolver, re_path
-
-from sentry.web.frontend import csrf_failure
from sentry.web.frontend.error_404 import Error404View
from sentry.web.frontend.error_500 import Error500View
from sentry.web.urls import urlpatterns as web_urlpatterns
+# XXX: remove after getsentry does not reference these
handler404 = Error404View.as_view()
handler500 = Error500View.as_view()
-urlpatterns: list[URLResolver | URLPattern] = [
- re_path(
- r"^500/",
- handler500,
- name="error-500",
- ),
- re_path(
- r"^404/",
- handler404,
- name="error-404",
- ),
- re_path(
- r"^403-csrf-failure/",
- csrf_failure.view,
- name="error-403-csrf-failure",
- ),
-]
-
-urlpatterns += web_urlpatterns
+urlpatterns = web_urlpatterns
diff --git a/src/sentry/constants.py b/src/sentry/constants.py
index 9e76eb1690772..41bee5a4a9224 100644
--- a/src/sentry/constants.py
+++ b/src/sentry/constants.py
@@ -710,6 +710,7 @@ class InsightModules(Enum):
METRICS_ACTIVATE_LAST_FOR_GAUGES_DEFAULT = False
DATA_CONSENT_DEFAULT = False
UPTIME_AUTODETECTION = True
+TARGET_SAMPLE_RATE_DEFAULT = 1.0
# `sentry:events_member_admin` - controls whether the 'member' role gets the event:admin scope
EVENTS_MEMBER_ADMIN_DEFAULT = True
@@ -781,6 +782,7 @@ class InsightModules(Enum):
"*/ready",
"*/readyz",
"*/ping",
+ "*/up",
]
diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py
index 975da68126ea5..77231e1b1621c 100644
--- a/src/sentry/consumers/__init__.py
+++ b/src/sentry/consumers/__init__.py
@@ -162,6 +162,19 @@ def ingest_events_options() -> list[click.Option]:
return options
+def ingest_transactions_options() -> list[click.Option]:
+ options = ingest_events_options()
+ options.append(
+ click.Option(
+ ["--no-celery-mode", "no_celery_mode"],
+ default=False,
+ is_flag=True,
+ help="Save event directly in consumer without celery",
+ )
+ )
+ return options
+
+
_METRICS_INDEXER_OPTIONS = [
click.Option(["--input-block-size"], type=int, default=None),
click.Option(["--output-block-size"], type=int, default=None),
@@ -312,20 +325,8 @@ def ingest_events_options() -> list[click.Option]:
},
"ingest-transactions": {
"topic": Topic.INGEST_TRANSACTIONS,
- "strategy_factory": "sentry.ingest.consumer.factory.IngestStrategyFactory",
- "click_options": ingest_events_options(),
- "static_args": {
- "consumer_type": ConsumerType.Transactions,
- },
- "dlq_topic": Topic.INGEST_TRANSACTIONS_DLQ,
- },
- "ingest-transactions-inc847": {
- "topic": Topic.INGEST_TRANSACTIONS,
- "strategy_factory": "sentry.ingest.consumer.factory.IngestStrategyFactory",
- "click_options": ingest_events_options(),
- "static_args": {
- "consumer_type": ConsumerType.Transactions,
- },
+ "strategy_factory": "sentry.ingest.consumer.factory.IngestTransactionsStrategyFactory",
+ "click_options": ingest_transactions_options(),
"dlq_topic": Topic.INGEST_TRANSACTIONS_DLQ,
},
"ingest-metrics": {
diff --git a/src/sentry/coreapi.py b/src/sentry/coreapi.py
index ce544b3fe4e06..4e1f58c91d0a7 100644
--- a/src/sentry/coreapi.py
+++ b/src/sentry/coreapi.py
@@ -1,12 +1,6 @@
from __future__ import annotations
import logging
-from time import time
-
-from sentry.attachments import attachment_cache
-from sentry.eventstore.processing import event_processing_store
-from sentry.ingest.consumer.processors import CACHE_TIMEOUT
-from sentry.tasks.store import preprocess_event, preprocess_event_from_reprocessing
# TODO: We should make the API a class, and UDP/HTTP just inherit from it
# This will make it so we can more easily control logging with various
@@ -35,33 +29,3 @@ class APIUnauthorized(APIError):
class APIForbidden(APIError):
http_status = 403
-
-
-def insert_data_to_database_legacy(
- data, start_time=None, from_reprocessing=False, attachments=None
-):
- """
- Yet another "fast path" to ingest an event without making it go
- through Relay. Please consider using functions from the ingest consumer
- instead, or, if you're within tests, to use `TestCase.store_event`.
- """
-
- # XXX(markus): Delete this function and merge with ingest consumer logic.
-
- if start_time is None:
- start_time = time()
-
- # we might be passed some subclasses of dict that fail dumping
- if not isinstance(data, dict):
- data = dict(data.items())
-
- cache_key = event_processing_store.store(data)
-
- # Attachments will be empty or None if the "event-attachments" feature
- # is turned off. For native crash reports it will still contain the
- # crash dump (e.g. minidump) so we can load it during processing.
- if attachments is not None:
- attachment_cache.set(cache_key, attachments, cache_timeout=CACHE_TIMEOUT)
-
- task = from_reprocessing and preprocess_event_from_reprocessing or preprocess_event
- task.delay(cache_key=cache_key, start_time=start_time, event_id=data["event_id"])
diff --git a/src/sentry/data_secrecy/api/waive_data_secrecy.py b/src/sentry/data_secrecy/api/waive_data_secrecy.py
index 8af1d9bab7e48..5f59ba863423c 100644
--- a/src/sentry/data_secrecy/api/waive_data_secrecy.py
+++ b/src/sentry/data_secrecy/api/waive_data_secrecy.py
@@ -1,3 +1,4 @@
+import logging
from collections.abc import Mapping
from typing import Any
@@ -20,6 +21,8 @@
from sentry.data_secrecy.models import DataSecrecyWaiver
from sentry.models.organization import Organization
+logger = logging.getLogger("sentry.data_secrecy")
+
class WaiveDataSecrecyPermission(OrganizationPermission):
scope_map = {
@@ -119,25 +122,34 @@ def put(self, request: Request, organization: Organization):
serialize(ds, request.user, DataSecrecyWaiverSerializer()), status=status.HTTP_200_OK
)
- def delete(self, request: Request, organization):
+ def delete(self, request: Request, organization: Organization):
"""
Reinstates data secrecy for an organization.
"""
try:
- ds = get_object_or_404(DataSecrecyWaiver, organization=organization)
- ds.delete()
-
- self.create_audit_entry(
- request=request,
- organization=organization,
- event=audit_log.get_event_id("DATA_SECRECY_REINSTATED"),
+ logger.info("Reinstating data secrecy for organization %s", organization.id)
+ ds = DataSecrecyWaiver.objects.get(organization=organization)
+ logger.info(
+ "Data secrecy waiver found for organization %s",
+ organization.id,
+ extra={"ds": ds.id},
)
- return Response(
- {"detail": "Data secrecy has been reinstated."},
- status=status.HTTP_204_NO_CONTENT,
- )
- except Http404:
+ except DataSecrecyWaiver.DoesNotExist:
+ logger.info("No data secrecy waiver found for organization %s", organization.id)
return Response(
{"detail": "No data secrecy waiver found for this organization."},
status=status.HTTP_404_NOT_FOUND,
)
+
+ ds.delete()
+ logger.info("Data secrecy waiver deleted for organization %s", organization.id)
+
+ self.create_audit_entry(
+ request=request,
+ organization=organization,
+ event=audit_log.get_event_id("DATA_SECRECY_REINSTATED"),
+ )
+ return Response(
+ {"detail": "Data secrecy has been reinstated."},
+ status=status.HTTP_204_NO_CONTENT,
+ )
diff --git a/src/sentry/db/models/fields/node.py b/src/sentry/db/models/fields/node.py
index 7e3844319f67e..c58cad00fbb32 100644
--- a/src/sentry/db/models/fields/node.py
+++ b/src/sentry/db/models/fields/node.py
@@ -192,7 +192,7 @@ def to_python(self, value):
try:
value = pickle.loads(decompress(value))
except Exception as e:
- # TODO this is a bit dangerous as a failure to read/decode the
+ # TODO: this is a bit dangerous as a failure to read/decode the
# node_id will end up with this record being replaced with an
# empty value under a new key, potentially orphaning an
# original value in nodestore. OTOH if we can't decode the info
diff --git a/src/sentry/db/models/fields/slug.py b/src/sentry/db/models/fields/slug.py
index fa435e4a93066..ebb57ea2efbbc 100644
--- a/src/sentry/db/models/fields/slug.py
+++ b/src/sentry/db/models/fields/slug.py
@@ -3,6 +3,8 @@
from sentry.slug.validators import no_numeric_validator, org_slug_validator
+DEFAULT_SLUG_MAX_LENGTH = 50
+
class SentrySlugField(SlugField):
default_validators = [*SlugField.default_validators, no_numeric_validator]
diff --git a/src/sentry/db/postgres/schema.py b/src/sentry/db/postgres/schema.py
index ad5efc10188a9..65e500578163c 100644
--- a/src/sentry/db/postgres/schema.py
+++ b/src/sentry/db/postgres/schema.py
@@ -1,10 +1,17 @@
+from contextlib import contextmanager
+
+from django.conf import settings
+from django.db.backends.ddl_references import Statement
from django.db.backends.postgresql.schema import (
DatabaseSchemaEditor as PostgresDatabaseSchemaEditor,
)
from django.db.models import Field
from django.db.models.base import ModelBase
from django_zero_downtime_migrations.backends.postgres.schema import (
+ DUMMY_SQL,
DatabaseSchemaEditorMixin,
+ MultiStatementSQL,
+ PGLock,
Unsafe,
UnsafeOperationException,
)
@@ -69,6 +76,12 @@ class SafePostgresDatabaseSchemaEditor(DatabaseSchemaEditorMixin, PostgresDataba
PostgresDatabaseSchemaEditor.alter_db_tablespace
)
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.LOCK_TIMEOUT_FORCE = getattr(
+ settings, "ZERO_DOWNTIME_MIGRATIONS_LOCK_TIMEOUT_FORCE", False
+ )
+
def alter_db_table(self, model, old_db_table, new_db_table):
"""
This didn't work correctly in django_zero_downtime_migrations, so implementing here. This
@@ -85,7 +98,7 @@ def delete_model(self, model):
"""
raise UnsafeOperationException(
f"Deleting the {model.__name__} model is unsafe.\n"
- "More info here: https://develop.sentry.dev/database-migrations/#tables"
+ "More info here: https://develop.sentry.dev/database-migrations/#deleting-tables"
)
def remove_field(self, model, field):
@@ -94,9 +107,74 @@ def remove_field(self, model, field):
"""
raise UnsafeOperationException(
f"Removing the {model.__name__}.{field.name} field is unsafe.\n"
- "More info here: https://develop.sentry.dev/database-migrations/#columns"
+ "More info here: https://develop.sentry.dev/database-migrations/#deleting-columns"
)
+ def execute(self, sql, params=()):
+ if sql is DUMMY_SQL:
+ return
+ statements = []
+ if isinstance(sql, MultiStatementSQL):
+ statements.extend(sql)
+ elif isinstance(sql, Statement) and isinstance(sql.template, MultiStatementSQL):
+ statements.extend(Statement(s, **sql.parts) for s in sql.template)
+ else:
+ statements.append(sql)
+ for statement in statements:
+ if isinstance(statement, PGLock):
+ use_timeouts = statement.use_timeouts
+ disable_statement_timeout = statement.disable_statement_timeout
+ statement = statement.sql
+ elif isinstance(statement, Statement) and isinstance(statement.template, PGLock):
+ use_timeouts = statement.template.use_timeouts
+ disable_statement_timeout = statement.template.disable_statement_timeout
+ statement = Statement(statement.template.sql, **statement.parts)
+ else:
+ use_timeouts = False
+ disable_statement_timeout = False
+
+ if use_timeouts:
+ with self._set_operation_timeout(self.STATEMENT_TIMEOUT, self.LOCK_TIMEOUT):
+ PostgresDatabaseSchemaEditor.execute(self, statement, params)
+ elif self.LOCK_TIMEOUT_FORCE:
+ with self._set_operation_timeout(lock_timeout=self.LOCK_TIMEOUT):
+ PostgresDatabaseSchemaEditor.execute(self, statement, params)
+ elif disable_statement_timeout and self.FLEXIBLE_STATEMENT_TIMEOUT:
+ with self._set_operation_timeout(self.ZERO_TIMEOUT):
+ PostgresDatabaseSchemaEditor.execute(self, statement, params)
+ else:
+ PostgresDatabaseSchemaEditor.execute(self, statement, params)
+
+ @contextmanager
+ def _set_operation_timeout(self, statement_timeout=None, lock_timeout=None):
+ if self.collect_sql:
+ previous_statement_timeout = self.ZERO_TIMEOUT
+ previous_lock_timeout = self.ZERO_TIMEOUT
+ else:
+ with self.connection.cursor() as cursor:
+ cursor.execute(self.sql_get_statement_timeout)
+ (previous_statement_timeout,) = cursor.fetchone()
+ cursor.execute(self.sql_get_lock_timeout)
+ (previous_lock_timeout,) = cursor.fetchone()
+ if statement_timeout is not None:
+ PostgresDatabaseSchemaEditor.execute(
+ self, self.sql_set_statement_timeout % {"statement_timeout": statement_timeout}
+ )
+ if lock_timeout is not None:
+ PostgresDatabaseSchemaEditor.execute(
+ self, self.sql_set_lock_timeout % {"lock_timeout": lock_timeout}
+ )
+ yield
+ if statement_timeout is not None:
+ PostgresDatabaseSchemaEditor.execute(
+ self,
+ self.sql_set_statement_timeout % {"statement_timeout": previous_statement_timeout},
+ )
+ if lock_timeout is not None:
+ PostgresDatabaseSchemaEditor.execute(
+ self, self.sql_set_lock_timeout % {"lock_timeout": previous_lock_timeout}
+ )
+
class DatabaseSchemaEditorProxy:
"""
diff --git a/src/sentry/deletions/__init__.py b/src/sentry/deletions/__init__.py
index e5e7e0a7ec59f..59e13f34e9a13 100644
--- a/src/sentry/deletions/__init__.py
+++ b/src/sentry/deletions/__init__.py
@@ -200,13 +200,6 @@ def get_manager() -> DeletionTaskManager:
return _default_manager
-def __getattr__(name: str) -> Any:
- # Shim for getsentry
- if name == "default_manager":
- return get_manager()
- raise AttributeError(f"module {__name__} has no attribute {name}")
-
-
def get(
task: type[BaseDeletionTask[Any]] | None = None,
**kwargs: Any,
diff --git a/src/sentry/deletions/base.py b/src/sentry/deletions/base.py
index 856d5ff2f5f49..3655e95ee8bdf 100644
--- a/src/sentry/deletions/base.py
+++ b/src/sentry/deletions/base.py
@@ -178,7 +178,7 @@ def mark_deletion_in_progress(self, instance_list: Sequence[ModelT]) -> None:
class ModelDeletionTask(BaseDeletionTask[ModelT]):
- DEFAULT_QUERY_LIMIT = None
+ DEFAULT_QUERY_LIMIT: int | None = None
manager_name = "objects"
def __init__(
diff --git a/src/sentry/deletions/defaults/group.py b/src/sentry/deletions/defaults/group.py
index c0da32c0bb6cc..41df5aa42270a 100644
--- a/src/sentry/deletions/defaults/group.py
+++ b/src/sentry/deletions/defaults/group.py
@@ -2,14 +2,20 @@
import os
from collections import defaultdict
-from collections.abc import Sequence
+from collections.abc import Mapping, Sequence
from typing import Any
-from sentry import eventstore, eventstream, models, nodestore
+from sentry_sdk import set_tag
+from snuba_sdk import DeleteQuery, Request
+
+from sentry import eventstore, eventstream, features, models, nodestore
from sentry.eventstore.models import Event
+from sentry.issues.grouptype import GroupCategory
from sentry.models.group import Group, GroupStatus
from sentry.models.rulefirehistory import RuleFireHistory
+from sentry.snuba.dataset import Dataset
from sentry.tasks.delete_seer_grouping_records import call_delete_seer_grouping_records_by_hash
+from sentry.utils.snuba import bulk_snuba_queries
from ..base import BaseDeletionTask, BaseRelation, ModelDeletionTask, ModelRelation
from ..manager import DeletionTaskManager
@@ -48,22 +54,35 @@
)
-class EventDataDeletionTask(BaseDeletionTask[Group]):
+class EventsBaseDeletionTask(BaseDeletionTask[Group]):
"""
- Deletes nodestore data, EventAttachment and UserReports for group
+ Base class to delete events associated to groups and its related models.
"""
# Number of events fetched from eventstore per chunk() call.
DEFAULT_CHUNK_SIZE = 10000
+ referrer = "deletions.group"
+ dataset: Dataset
def __init__(
self, manager: DeletionTaskManager, groups: Sequence[Group], **kwargs: Any
) -> None:
self.groups = groups
+ # Use self.last_event to keep track of the last event processed in the chunk method.
self.last_event: Event | None = None
+ self.set_group_and_project_ids()
super().__init__(manager, **kwargs)
- def chunk(self) -> bool:
+ def set_group_and_project_ids(self) -> None:
+ group_ids = []
+ self.project_groups = defaultdict(list)
+ for group in self.groups:
+ self.project_groups[group.project_id].append(group.id)
+ group_ids.append(group.id)
+ self.group_ids = group_ids
+ self.project_ids = list(self.project_groups.keys())
+
+ def get_unfetched_events(self) -> list[Event]:
conditions = []
if self.last_event is not None:
conditions.extend(
@@ -76,49 +95,124 @@ def chunk(self) -> bool:
]
)
- group_ids = []
- project_groups = defaultdict(list)
- for group in self.groups:
- project_groups[group.project_id].append(group.id)
- group_ids.append(group.id)
- project_ids = list(project_groups.keys())
-
events = eventstore.backend.get_unfetched_events(
filter=eventstore.Filter(
- conditions=conditions, project_ids=project_ids, group_ids=group_ids
+ conditions=conditions, project_ids=self.project_ids, group_ids=self.group_ids
),
limit=self.DEFAULT_CHUNK_SIZE,
- referrer="deletions.group",
+ referrer=self.referrer,
orderby=["-timestamp", "-event_id"],
- tenant_ids=(
- {"organization_id": self.groups[0].project.organization_id} if self.groups else None
- ),
+ tenant_ids=self.tenant_ids,
+ dataset=self.dataset,
)
- if not events:
- # Remove all group events now that their node data has been removed.
- for project_id, group_ids in project_groups.items():
- eventstream_state = eventstream.backend.start_delete_groups(project_id, group_ids)
- eventstream.backend.end_delete_groups(eventstream_state)
- return False
+ return events
+
+ @property
+ def tenant_ids(self) -> Mapping[str, Any]:
+ result = {"referrer": self.referrer}
+ if self.groups:
+ result["organization_id"] = self.groups[0].project.organization_id
+ return result
+
- self.last_event = events[-1]
+class ErrorEventsDeletionTask(EventsBaseDeletionTask):
+ """
+ Deletes nodestore data, EventAttachment and UserReports for requested groups.
+
+ This class uses the old Snuba deletion method.
+ """
+
+ dataset = Dataset.Events
+
+ def chunk(self) -> bool:
+ """This method is called to delete chunks of data. It returns a boolean to say
+ if the deletion has completed and if it needs to be called again."""
+ events = self.get_unfetched_events()
+ if events:
+ self.delete_events_from_nodestore(events)
+ self.delete_dangling_attachments_and_user_reports(events)
+ # This value will be used in the next call to chunk
+ self.last_event = events[-1]
+ # As long as it returns True the task will keep iterating
+ return True
+ else:
+ # Now that all events have been deleted from the eventstore, we can delete the events from snuba
+ self.delete_events_from_snuba()
+ return False
+ def delete_events_from_nodestore(self, events: Sequence[Event]) -> None:
# Remove from nodestore
node_ids = [Event.generate_node_id(event.project_id, event.event_id) for event in events]
nodestore.backend.delete_multi(node_ids)
+ def delete_dangling_attachments_and_user_reports(self, events: Sequence[Event]) -> None:
# Remove EventAttachment and UserReport *again* as those may not have a
# group ID, therefore there may be dangling ones after "regular" model
# deletion.
event_ids = [event.event_id for event in events]
models.EventAttachment.objects.filter(
- event_id__in=event_ids, project_id__in=project_ids
+ event_id__in=event_ids, project_id__in=self.project_ids
).delete()
models.UserReport.objects.filter(
- event_id__in=event_ids, project_id__in=project_ids
+ event_id__in=event_ids, project_id__in=self.project_ids
).delete()
- return True
+ def delete_events_from_snuba(self) -> None:
+ # Remove all group events now that their node data has been removed.
+ for project_id, group_ids in self.project_groups.items():
+ eventstream_state = eventstream.backend.start_delete_groups(project_id, group_ids)
+ eventstream.backend.end_delete_groups(eventstream_state)
+
+
+class IssuePlatformEventsDeletionTask(EventsBaseDeletionTask):
+ """
+ This class helps delete Issue Platform events which use the new Clickhouse light deletes.
+ """
+
+ dataset = Dataset.IssuePlatform
+
+ def chunk(self) -> bool:
+ """This method is called to delete chunks of data. It returns a boolean to say
+ if the deletion has completed and if it needs to be called again."""
+ events = self.get_unfetched_events()
+ if events:
+ # Ideally, in some cases, we should also delete the associated event from the Nodestore.
+ # In the occurrence_consumer [1] we sometimes create a new event but it's hard in post-ingestion to distinguish between
+ # a created event and an existing one.
+ # https://github.com/getsentry/sentry/blob/a86b9b672709bc9c4558cffb2c825965b8cee0d1/src/sentry/issues/occurrence_consumer.py#L324-L339
+ self.delete_events_from_nodestore(events)
+ # This value will be used in the next call to chunk
+ self.last_event = events[-1]
+ # As long as it returns True the task will keep iterating
+ return True
+ else:
+ # Now that all events have been deleted from the eventstore, we can delete the occurrences from Snuba
+ self.delete_events_from_snuba()
+ return False
+
+ def delete_events_from_nodestore(self, events: Sequence[Event]) -> None:
+ # We delete by the occurrence_id instead of the event_id
+ node_ids = [
+ Event.generate_node_id(event.project_id, event._snuba_data["occurrence_id"])
+ for event in events
+ ]
+ nodestore.backend.delete_multi(node_ids)
+
+ def delete_events_from_snuba(self) -> None:
+ requests = []
+ for project_id, group_ids in self.project_groups.items():
+ query = DeleteQuery(
+ self.dataset.value,
+ column_conditions={"project_id": [project_id], "group_id": list(group_ids)},
+ )
+ request = Request(
+ dataset=self.dataset.value,
+ app_id=self.referrer,
+ query=query,
+ tenant_ids=self.tenant_ids,
+ )
+ requests.append(request)
+ bulk_snuba_queries(requests)
class GroupDeletionTask(ModelDeletionTask[Group]):
@@ -131,30 +225,59 @@ def delete_bulk(self, instance_list: Sequence[Group]) -> bool:
Group deletion operates as a quasi-bulk operation so that we don't flood
snuba replacements with deletions per group.
"""
- self.mark_deletion_in_progress(instance_list)
+ if not instance_list:
+ return True
- group_ids = [group.id for group in instance_list]
+ self.mark_deletion_in_progress(instance_list)
+ error_group_ids = [
+ group.id for group in instance_list if group.issue_category == GroupCategory.ERROR
+ ]
# Tell seer to delete grouping records with these group hashes
- call_delete_seer_grouping_records_by_hash(group_ids)
+ call_delete_seer_grouping_records_by_hash(error_group_ids)
+
+ self._delete_children(instance_list)
+
+ # Remove group objects with children removed.
+ self.delete_instance_bulk(instance_list)
+
+ return False
+ def _delete_children(self, instance_list: Sequence[Group]) -> None:
+ group_ids = [group.id for group in instance_list]
# Remove child relations for all groups first.
child_relations: list[BaseRelation] = []
for model in _GROUP_RELATED_MODELS:
child_relations.append(ModelRelation(model, {"group_id__in": group_ids}))
+ org = instance_list[0].project.organization
+ issue_platform_deletion_allowed = features.has(
+ "organizations:issue-platform-deletion", org, actor=None
+ )
+ error_groups, issue_platform_groups = separate_by_group_category(instance_list)
+
# If this isn't a retention cleanup also remove event data.
if not os.environ.get("_SENTRY_CLEANUP"):
- child_relations.append(
- BaseRelation(params={"groups": instance_list}, task=EventDataDeletionTask)
- )
-
- self.delete_children(child_relations)
+ if not issue_platform_deletion_allowed:
+ params = {"groups": instance_list}
+ child_relations.append(BaseRelation(params=params, task=ErrorEventsDeletionTask))
+ else:
+ if error_groups:
+ params = {"groups": error_groups}
+ child_relations.append(
+ BaseRelation(params=params, task=ErrorEventsDeletionTask)
+ )
- # Remove group objects with children removed.
- self.delete_instance_bulk(instance_list)
+ if issue_platform_groups:
+ # This helps creating custom Sentry alerts;
+ # remove when #proj-snuba-lightweight_delets is done
+ set_tag("issue_platform_deletion", True)
+ params = {"groups": issue_platform_groups}
+ child_relations.append(
+ BaseRelation(params=params, task=IssuePlatformEventsDeletionTask)
+ )
- return False
+ self.delete_children(child_relations)
def delete_instance(self, instance: Group) -> None:
from sentry import similarity
@@ -168,3 +291,15 @@ def mark_deletion_in_progress(self, instance_list: Sequence[Group]) -> None:
Group.objects.filter(id__in=[i.id for i in instance_list]).exclude(
status=GroupStatus.DELETION_IN_PROGRESS
).update(status=GroupStatus.DELETION_IN_PROGRESS, substatus=None)
+
+
+def separate_by_group_category(instance_list: Sequence[Group]) -> tuple[list[Group], list[Group]]:
+ error_groups = []
+ issue_platform_groups = []
+ for group in instance_list:
+ (
+ error_groups.append(group)
+ if group.issue_category == GroupCategory.ERROR
+ else issue_platform_groups.append(group)
+ )
+ return error_groups, issue_platform_groups
diff --git a/src/sentry/deletions/defaults/platform_external_issue.py b/src/sentry/deletions/defaults/platform_external_issue.py
index ac8ecc3132829..d17e208dfb2db 100644
--- a/src/sentry/deletions/defaults/platform_external_issue.py
+++ b/src/sentry/deletions/defaults/platform_external_issue.py
@@ -1,7 +1,7 @@
from collections.abc import Sequence
from sentry.deletions.base import ModelDeletionTask
-from sentry.models.platformexternalissue import PlatformExternalIssue
+from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
class PlatformExternalIssueDeletionTask(ModelDeletionTask[PlatformExternalIssue]):
diff --git a/src/sentry/deletions/defaults/repository.py b/src/sentry/deletions/defaults/repository.py
index 960befb3b93d0..41d2adedc1600 100644
--- a/src/sentry/deletions/defaults/repository.py
+++ b/src/sentry/deletions/defaults/repository.py
@@ -29,7 +29,7 @@ def get_child_relations(self, instance: Repository) -> list[BaseRelation]:
return _get_repository_child_relations(instance)
def delete_instance(self, instance: Repository) -> None:
- # TODO child_relations should also send pending_delete so we
+ # TODO: child_relations should also send pending_delete so we
# don't have to do this here.
pending_delete.send(sender=type(instance), instance=instance, actor=self.get_actor())
diff --git a/src/sentry/deletions/manager.py b/src/sentry/deletions/manager.py
index 7f4e3615fbbc4..c24a9cc98c901 100644
--- a/src/sentry/deletions/manager.py
+++ b/src/sentry/deletions/manager.py
@@ -1,8 +1,11 @@
+from __future__ import annotations
+
from collections.abc import MutableMapping
-from typing import Any
+from typing import TYPE_CHECKING, Any
-from sentry.db.models.base import Model
-from sentry.deletions.base import BaseDeletionTask
+if TYPE_CHECKING:
+ from sentry.db.models.base import Model
+ from sentry.deletions.base import BaseDeletionTask
__all__ = ["DeletionTaskManager"]
diff --git a/src/sentry/deletions/models/__init__.py b/src/sentry/deletions/models/__init__.py
new file mode 100644
index 0000000000000..6390b946de4da
--- /dev/null
+++ b/src/sentry/deletions/models/__init__.py
@@ -0,0 +1,3 @@
+from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
+
+__all__ = ("RegionScheduledDeletion",)
diff --git a/src/sentry/models/scheduledeletion.py b/src/sentry/deletions/models/scheduleddeletion.py
similarity index 99%
rename from src/sentry/models/scheduledeletion.py
rename to src/sentry/deletions/models/scheduleddeletion.py
index 318ba96a29491..ca2d460da00bf 100644
--- a/src/sentry/models/scheduledeletion.py
+++ b/src/sentry/deletions/models/scheduleddeletion.py
@@ -104,7 +104,7 @@ def schedule(
return record
@classmethod
- def cancel(cls, instance: Model):
+ def cancel(cls, instance: Model) -> None:
model_name = type(instance).__name__
try:
deletion = cls.objects.get(
diff --git a/src/sentry/deletions/tasks/scheduled.py b/src/sentry/deletions/tasks/scheduled.py
index e0ae8daa4f6f1..2f8cfab6a3f1a 100644
--- a/src/sentry/deletions/tasks/scheduled.py
+++ b/src/sentry/deletions/tasks/scheduled.py
@@ -8,12 +8,12 @@
from django.db import router, transaction
from django.utils import timezone
-from sentry.exceptions import DeleteAborted
-from sentry.models.scheduledeletion import (
+from sentry.deletions.models.scheduleddeletion import (
BaseScheduledDeletion,
RegionScheduledDeletion,
ScheduledDeletion,
)
+from sentry.exceptions import DeleteAborted
from sentry.signals import pending_delete
from sentry.silo.base import SiloMode
from sentry.tasks.base import instrumented_task, retry
diff --git a/src/sentry/discover/dashboard_widget_split.py b/src/sentry/discover/dashboard_widget_split.py
index 15433740edac4..a5f4a61751566 100644
--- a/src/sentry/discover/dashboard_widget_split.py
+++ b/src/sentry/discover/dashboard_widget_split.py
@@ -5,7 +5,9 @@
from snuba_sdk.query_visitors import InvalidQueryError
from sentry import features
+from sentry.api.serializers.rest_framework.dashboard import is_aggregate
from sentry.constants import ObjectStatus
+from sentry.discover.arithmetic import ArithmeticParseError
from sentry.discover.dataset_split import (
SplitDataset,
_dataset_split_decision_inferred_from_query,
@@ -81,6 +83,30 @@ def _get_and_save_split_decision_for_dashboard_widget(
projects = dashboard.projects.all() or Project.objects.filter(
organization_id=dashboard.organization.id, status=ObjectStatus.ACTIVE
)
+
+ # Handle cases where the organization has no projects at all.
+ # No projects means a downstream check will fail and we can default
+ # to the errors dataset.
+ if not projects.exists():
+ if not dry_run:
+ sentry_sdk.set_context(
+ "dashboard",
+ {
+ "dashboard_id": dashboard.id,
+ "widget_id": widget.id,
+ "org_slug": dashboard.organization.slug,
+ },
+ )
+ sentry_sdk.capture_message(
+ "No projects found in organization for dashboard, defaulting to errors dataset"
+ )
+ _save_split_decision_for_widget(
+ widget,
+ DashboardWidgetTypes.ERROR_EVENTS,
+ DatasetSourcesTypes.FORCED,
+ )
+ return DashboardWidgetTypes.ERROR_EVENTS, False
+
snuba_dataclass = _get_snuba_dataclass_for_dashboard_widget(widget, list(projects))
selected_columns = _get_field_list(widget_query.fields or [])
@@ -137,7 +163,7 @@ def _get_and_save_split_decision_for_dashboard_widget(
_save_split_decision_for_widget(
widget,
widget_dataset,
- DatasetSourcesTypes.INFERRED,
+ DatasetSourcesTypes.SPLIT_VERSION_2,
)
return widget_dataset, False
@@ -146,7 +172,7 @@ def _get_and_save_split_decision_for_dashboard_widget(
and not equations
):
try:
- metrics_query(
+ metrics_query_result = metrics_query(
selected_columns,
query,
snuba_dataclass,
@@ -155,27 +181,42 @@ def _get_and_save_split_decision_for_dashboard_widget(
offset=None,
limit=1,
referrer="tasks.performance.split_discover_dataset",
+ transform_alias_to_input_format=True,
)
- if dry_run:
- logger.info(
- "Split decision for %s: %s (inferred from running metrics query)",
- widget.id,
- DashboardWidgetTypes.TRANSACTION_LIKE,
- )
- else:
- _save_split_decision_for_widget(
- widget,
- DashboardWidgetTypes.TRANSACTION_LIKE,
- DatasetSourcesTypes.INFERRED,
+ has_metrics_data = (
+ metrics_query_result.get("data")
+ # No results were returned at all
+ and len(metrics_query_result["data"]) > 0
+ and any(
+ metrics_query_result["data"][0][column] > 0
+ for column in selected_columns
+ if is_aggregate(column)
)
+ )
+ if has_metrics_data:
+ if dry_run:
+ logger.info(
+ "Split decision for %s: %s (inferred from running metrics query)",
+ widget.id,
+ DashboardWidgetTypes.TRANSACTION_LIKE,
+ )
+ else:
+ _save_split_decision_for_widget(
+ widget,
+ DashboardWidgetTypes.TRANSACTION_LIKE,
+ DatasetSourcesTypes.SPLIT_VERSION_2,
+ )
- return DashboardWidgetTypes.TRANSACTION_LIKE, True
+ return DashboardWidgetTypes.TRANSACTION_LIKE, True
except (
IncompatibleMetricsQuery,
snuba.QueryIllegalTypeOfArgument,
snuba.UnqualifiedQueryError,
InvalidQueryError,
+ snuba.QueryExecutionError,
+ snuba.SnubaError,
+ ArithmeticParseError,
):
pass
@@ -187,7 +228,14 @@ def _get_and_save_split_decision_for_dashboard_widget(
)
)
has_errors = len(error_results["data"]) > 0
- except (snuba.QueryIllegalTypeOfArgument, snuba.UnqualifiedQueryError, InvalidQueryError):
+ except (
+ snuba.QueryIllegalTypeOfArgument,
+ snuba.UnqualifiedQueryError,
+ InvalidQueryError,
+ snuba.QueryExecutionError,
+ snuba.SnubaError,
+ ArithmeticParseError,
+ ):
pass
if has_errors:
@@ -201,7 +249,7 @@ def _get_and_save_split_decision_for_dashboard_widget(
_save_split_decision_for_widget(
widget,
DashboardWidgetTypes.ERROR_EVENTS,
- DatasetSourcesTypes.INFERRED,
+ DatasetSourcesTypes.SPLIT_VERSION_2,
)
return DashboardWidgetTypes.ERROR_EVENTS, True
@@ -213,7 +261,14 @@ def _get_and_save_split_decision_for_dashboard_widget(
)
)
has_transactions = len(transaction_results["data"]) > 0
- except (snuba.QueryIllegalTypeOfArgument, snuba.UnqualifiedQueryError, InvalidQueryError):
+ except (
+ snuba.QueryIllegalTypeOfArgument,
+ snuba.UnqualifiedQueryError,
+ InvalidQueryError,
+ snuba.QueryExecutionError,
+ snuba.SnubaError,
+ ArithmeticParseError,
+ ):
pass
if has_transactions:
@@ -227,7 +282,7 @@ def _get_and_save_split_decision_for_dashboard_widget(
_save_split_decision_for_widget(
widget,
DashboardWidgetTypes.TRANSACTION_LIKE,
- DatasetSourcesTypes.INFERRED,
+ DatasetSourcesTypes.SPLIT_VERSION_2,
)
return DashboardWidgetTypes.TRANSACTION_LIKE, True
diff --git a/src/sentry/discover/dataset_split.py b/src/sentry/discover/dataset_split.py
index 78157d2a2b835..dbd9a8508e92f 100644
--- a/src/sentry/discover/dataset_split.py
+++ b/src/sentry/discover/dataset_split.py
@@ -19,7 +19,7 @@
from sentry.api.utils import get_date_range_from_stats_period
from sentry.constants import ObjectStatus
-from sentry.discover.arithmetic import is_equation, strip_equation
+from sentry.discover.arithmetic import ArithmeticParseError, is_equation, strip_equation
from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQuery, DiscoverSavedQueryTypes
from sentry.exceptions import InvalidParams, InvalidSearchQuery
from sentry.models.environment import Environment
@@ -47,6 +47,22 @@ class SplitDataset(Enum):
SplitDataset.Transactions: DiscoverSavedQueryTypes.TRANSACTION_LIKE,
}
+TRANSACTION_ONLY_AGGREGATES = [
+ "failure_rate",
+ "failure_count",
+ "apdex",
+ "count_miserable",
+ "user_misery",
+ "count_web_vitals",
+ "percentile",
+ "p50",
+ "p75",
+ "p90",
+ "p95",
+ "p99",
+ "p100",
+]
+
TRANSACTION_ONLY_FIELDS = [
"duration",
"transaction_op",
@@ -77,6 +93,8 @@ class SplitDataset(Enum):
"span_op_breakdowns[ops.ui]",
]
+ERROR_ONLY_AGGREGATES = ["last_seen"]
+
ERROR_ONLY_FIELDS = [
"location",
"exception_stacks.type",
@@ -137,6 +155,21 @@ def _check_function_parameter_matches_dataset(
return False
+def _check_function_alias_matches_dataset(
+ function: Function | CurriedFunction,
+ dataset: Dataset,
+) -> bool:
+ aggregate_aliases = (
+ TRANSACTION_ONLY_AGGREGATES if dataset == Dataset.Transactions else ERROR_ONLY_AGGREGATES
+ )
+
+ for alias in aggregate_aliases:
+ if function.alias.startswith(alias):
+ return True
+
+ return False
+
+
def _check_aliased_expression_matches_dataset(
aliased_exp: AliasedExpression,
dataset: Dataset,
@@ -216,8 +249,11 @@ def _check_selected_columns_match_dataset(
return True
elif isinstance(select_col, Function) or isinstance(select_col, CurriedFunction):
+ # The parameter check is a stronger check if applicable, so we should keep that first
if _check_function_parameter_matches_dataset(select_col, dataset):
return True
+ if _check_function_alias_matches_dataset(select_col, dataset):
+ return True
return False
@@ -355,6 +391,29 @@ def _get_and_save_split_decision_for_query(
projects = saved_query.projects.all() or Project.objects.filter(
organization_id=saved_query.organization.id, status=ObjectStatus.ACTIVE
)
+
+ # Handle cases where the organization has no projects at all.
+ # No projects means a downstream check will fail and we can default
+ # to the errors dataset.
+ if not projects.exists():
+ if not dry_run:
+ sentry_sdk.set_context(
+ "query",
+ {
+ "saved_query_id": saved_query.id,
+ "org_slug": saved_query.organization.slug,
+ },
+ )
+ sentry_sdk.capture_message(
+ "No projects found in organization for saved query, defaulting to errors dataset"
+ )
+ _save_split_decision_for_query(
+ saved_query,
+ DiscoverSavedQueryTypes.ERROR_EVENTS,
+ DatasetSourcesTypes.FORCED,
+ )
+ return DiscoverSavedQueryTypes.ERROR_EVENTS, False
+
snuba_dataclass = _get_snuba_dataclass_for_saved_query(saved_query, list(projects))
selected_columns = _get_field_list(saved_query.query.get("fields", []))
equations = _get_equation_list(saved_query.query.get("fields", []))
@@ -422,7 +481,14 @@ def _get_and_save_split_decision_for_query(
)
)
has_errors = len(error_results["data"]) > 0
- except (snuba.QueryIllegalTypeOfArgument, snuba.UnqualifiedQueryError, InvalidQueryError):
+ except (
+ snuba.QueryIllegalTypeOfArgument,
+ snuba.UnqualifiedQueryError,
+ InvalidQueryError,
+ snuba.QueryExecutionError,
+ snuba.SnubaError,
+ ArithmeticParseError,
+ ):
pass
if has_errors:
@@ -448,7 +514,14 @@ def _get_and_save_split_decision_for_query(
)
)
has_transactions = len(transaction_results["data"]) > 0
- except (snuba.QueryIllegalTypeOfArgument, snuba.UnqualifiedQueryError, InvalidQueryError):
+ except (
+ snuba.QueryIllegalTypeOfArgument,
+ snuba.UnqualifiedQueryError,
+ InvalidQueryError,
+ snuba.QueryExecutionError,
+ snuba.SnubaError,
+ ArithmeticParseError,
+ ):
pass
if has_transactions:
diff --git a/src/sentry/discover/endpoints/bases.py b/src/sentry/discover/endpoints/bases.py
index 9eccf57ba9c99..9d067e9e0ee8d 100644
--- a/src/sentry/discover/endpoints/bases.py
+++ b/src/sentry/discover/endpoints/bases.py
@@ -17,8 +17,23 @@ def has_object_permission(self, request, view, obj):
return super().has_object_permission(request, view, obj)
if isinstance(obj, DiscoverSavedQuery):
- for project in obj.projects.all():
- if not request.access.has_project_access(project):
- return False
+ # 1. Saved Query contains certain projects
+ if obj.projects.exists():
+ return request.access.has_projects_access(obj.projects.all())
+ # 2. Saved Query covers all projects or all my projects
+
+ # allow when Open Membership
+ if obj.organization.flags.allow_joinleave:
+ return True
+
+ # allow for Managers and Owners
+ if request.access.has_scope("org:write"):
+ return True
+
+ # allow for creator
+ if request.user.id == obj.created_by_id:
+ return True
+
+ return False
return True
diff --git a/src/sentry/dynamic_sampling/rules/biases/custom_rule_bias.py b/src/sentry/dynamic_sampling/rules/biases/custom_rule_bias.py
index a42a7fcb38814..fb6d62c29c0af 100644
--- a/src/sentry/dynamic_sampling/rules/biases/custom_rule_bias.py
+++ b/src/sentry/dynamic_sampling/rules/biases/custom_rule_bias.py
@@ -15,7 +15,7 @@
class CustomRuleBias(Bias):
"""
- Boosts at 100% sample rate all the traces that have a replay_id.
+ Boosts to 100% sample rate all the traces matching an active custom rule.
"""
def generate_rules(self, project: Project, base_sample_rate: float) -> list[PolymorphicRule]:
diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py
index 8158b9c23dc25..86f4a882ba8ba 100644
--- a/src/sentry/event_manager.py
+++ b/src/sentry/event_manager.py
@@ -53,33 +53,25 @@
GroupingConfig,
get_grouping_config_dict_for_project,
)
-from sentry.grouping.ingest.config import (
- is_in_transition,
- project_uses_optimized_grouping,
- update_grouping_config_if_needed,
-)
+from sentry.grouping.ingest.config import is_in_transition, update_grouping_config_if_needed
from sentry.grouping.ingest.hashing import (
- find_existing_grouphash,
- get_hash_values,
+ find_grouphash_with_group,
get_or_create_grouphashes,
maybe_run_background_grouping,
maybe_run_secondary_grouping,
run_primary_grouping,
)
-from sentry.grouping.ingest.metrics import (
- record_calculation_metric_with_result,
- record_hash_calculation_metrics,
- record_new_group_metrics,
-)
+from sentry.grouping.ingest.metrics import record_hash_calculation_metrics, record_new_group_metrics
from sentry.grouping.ingest.seer import maybe_check_seer_for_matching_grouphash
from sentry.grouping.ingest.utils import (
add_group_id_to_grouphashes,
- check_for_category_mismatch,
check_for_group_creation_load_shed,
+ is_non_error_type_group,
)
+from sentry.grouping.variants import BaseVariant
from sentry.ingest.inbound_filters import FilterStatKeys
from sentry.integrations.tasks.kick_off_status_syncs import kick_off_status_syncs
-from sentry.issues.grouptype import ErrorGroupType, GroupCategory
+from sentry.issues.grouptype import ErrorGroupType
from sentry.issues.issue_occurrence import IssueOccurrence
from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka
from sentry.killswitches import killswitch_matches_context
@@ -139,7 +131,6 @@
from sentry.utils.safe import get_path, safe_execute, setdefault_path, trim
from sentry.utils.sdk import set_measurement
from sentry.utils.tag_normalization import normalized_sdk_tag_from_event
-from sentry.utils.types import NonNone
if TYPE_CHECKING:
from sentry.eventstore.models import BaseEvent, Event
@@ -328,7 +319,9 @@ def __init__(self, group=None, last_seen=None, times_seen=None, *args, **kwargs)
def __int__(self):
# Calculate the score manually when coercing to an int.
# This is used within create_or_update and friends
- return self.group.get_score() if self.group else 0
+
+ # XXX: Since removing the 'score' column from 'Group', this now always returns 0.
+ return 0
def as_sql(
self,
@@ -517,12 +510,10 @@ def save(
return jobs[0]["event"]
else:
project = job["event"].project
- job["optimized_grouping"] = project_uses_optimized_grouping(project)
job["in_grouping_transition"] = is_in_transition(project)
metric_tags = {
"platform": job["event"].platform or "unknown",
"sdk": normalized_sdk_tag_from_event(job["event"].data),
- "using_transition_optimization": job["optimized_grouping"],
"in_transition": job["in_grouping_transition"],
}
# This metric allows differentiating from all calls to the `event_manager.save` metric
@@ -887,26 +878,6 @@ def _materialize_metadata_many(jobs: Sequence[Job]) -> None:
job["culprit"] = data["culprit"]
-# TODO: This is only called in `_save_aggregate`, so when that goes, so can this (it's been
-# supplanted by `_get_group_processing_kwargs` below)
-def _get_group_creation_kwargs(job: Job | PerformanceJob) -> dict[str, Any]:
- kwargs = {
- "platform": job["platform"],
- "message": job["event"].search_message,
- "logger": job["logger_name"],
- "level": LOG_LEVELS_MAP.get(job["level"]),
- "last_seen": job["event"].datetime,
- "first_seen": job["event"].datetime,
- "active_at": job["event"].datetime,
- "culprit": job["culprit"],
- }
-
- if job["release"]:
- kwargs["first_release"] = job["release"]
-
- return kwargs
-
-
def _get_group_processing_kwargs(job: Job) -> dict[str, Any]:
"""
Pull together all the metadata used when creating a group or updating a group's metadata based
@@ -1322,272 +1293,7 @@ def get_culprit(data: Mapping[str, Any]) -> str:
@sentry_sdk.tracing.trace
-def assign_event_to_group(event: Event, job: Job, metric_tags: MutableTags) -> GroupInfo | None:
- if job["optimized_grouping"]:
- group_info = _save_aggregate_new(
- event=event,
- job=job,
- metric_tags=metric_tags,
- )
- else:
- group_info = _save_aggregate(
- event=event,
- job=job,
- release=job["release"],
- received_timestamp=job["received_timestamp"],
- metric_tags=metric_tags,
- )
-
- if group_info:
- event.group = group_info.group
- job["groups"] = [group_info]
-
- return group_info
-
-
-def _save_aggregate(
- event: Event,
- job: Job,
- release: Release | None,
- received_timestamp: int | float,
- metric_tags: MutableTags,
-) -> GroupInfo | None:
- project = event.project
-
- primary_hashes, secondary_hashes = get_hash_values(project, job, metric_tags)
- hashes = primary_hashes + secondary_hashes
- has_secondary_hashes = len(secondary_hashes) > 0
-
- # Now that we've used the current and possibly secondary grouping config(s) to calculate the
- # hashes, we're free to perform a config update if permitted. Future events will use the new
- # config, but will also be grandfathered into the current config for a month, so as not to
- # erroneously create new groups.
- update_grouping_config_if_needed(project, "ingest")
-
- _materialize_metadata_many([job])
- metadata = dict(job["event_metadata"])
-
- group_creation_kwargs = _get_group_creation_kwargs(job)
-
- grouphashes = get_or_create_grouphashes(project, hashes)
-
- existing_grouphash = find_existing_grouphash(grouphashes)
-
- # In principle the group gets the same metadata as the event, so common
- # attributes can be defined in eventtypes.
- #
- # Additionally the `last_received` key is set for group metadata, later in
- # _save_aggregate
- group_creation_kwargs["data"] = materialize_metadata(
- event.data,
- get_event_type(event.data),
- metadata,
- )
- group_creation_kwargs["data"]["last_received"] = received_timestamp
-
- if existing_grouphash is None:
- if killswitch_matches_context(
- "store.load-shed-group-creation-projects",
- {
- "project_id": project.id,
- "platform": event.platform,
- },
- ):
- raise HashDiscarded("Load shedding group creation", reason="load_shed")
-
- with (
- sentry_sdk.start_span(op="event_manager.create_group_transaction") as span,
- metrics.timer("event_manager.create_group_transaction") as metric_tags,
- transaction.atomic(router.db_for_write(GroupHash)),
- ):
- # These values will get overridden with whatever happens inside the lock if we do manage
- # to acquire it, so it should only end up with `wait-for-lock` if we don't
- #
- # TODO: If we're using this `outome` value for anything more than a count in DD (in
- # other words, if we care about duration), we should probably update it so that when an
- # event does have to wait, we record whether during its wait the event which got the
- # lock first
- # a) created a new group without consulting Seer,
- # b) created a new group because Seer didn't find a close enough match, or
- # c) used an existing group found by Seer
- # because which of those things happened will have an effect on how long the event had to wait.
- span.set_tag("outcome", "wait_for_lock")
- metric_tags["outcome"] = "wait_for_lock"
-
- grouphash_ids = [h.id for h in grouphashes]
-
- # If we're in this branch, we checked our grouphashes and didn't find one with a group
- # attached. We thus want to either ask seer for a nearest neighbor group (and create a
- # new group if one isn't found) or just create a new group without consulting seer, but
- # either way we need to guard against another event with the same hash coming in before
- # we're done here and also thinking it needs to talk to seer and/or create a new group.
- # To prevent this, we're using double-checked locking
- # (https://en.wikipedia.org/wiki/Double-checked_locking).
-
- # First, try to lock the relevant rows in the `GroupHash` table. If another (identically
- # hashed) event is already in the process of talking to seer and/or creating a group and
- # has grabbed the lock before us, we'll block here until it's done. If not, we've now
- # got the lock and other identically-hashed events will have to wait for us.
- all_grouphashes = list(
- GroupHash.objects.filter(id__in=grouphash_ids).select_for_update()
- )
-
- grouphashes = [gh for gh in all_grouphashes if gh.hash in hashes]
-
- # Now check again to see if any of our grouphashes have a group. If we got the lock, the
- # result won't have changed and we still won't find anything. If we didn't get it, we'll
- # have blocked until whichever identically-hashed event *did* get the lock has either
- # created a new group for our hashes or assigned them to a neighboring group suggessted
- # by seer. If that happens, we'll skip this whole branch and jump down to the same one
- # we would have landed in had we found a group to begin with.
- existing_grouphash = find_existing_grouphash(grouphashes)
-
- # If we still haven't found a matching grouphash, we're now safe to go ahead and talk to
- # seer and/or create the group.
- if existing_grouphash is None:
- seer_matched_grouphash = maybe_check_seer_for_matching_grouphash(event)
- seer_matched_group = (
- Group.objects.filter(id=seer_matched_grouphash.group_id).first()
- if seer_matched_grouphash
- else None
- )
-
- group = seer_matched_group or _create_group(project, event, **group_creation_kwargs)
-
- new_hashes = list(grouphashes)
-
- GroupHash.objects.filter(id__in=[h.id for h in new_hashes]).exclude(
- state=GroupHash.State.LOCKED_IN_MIGRATION
- ).update(group=group)
-
- is_new = not seer_matched_group
- is_regression = (
- False
- if is_new
- else _process_existing_aggregate(
- # If `seer_matched_group` were `None`, `is_new` would be true and we
- # wouldn't be here
- group=NonNone(seer_matched_group),
- event=event,
- incoming_group_values=group_creation_kwargs,
- release=release,
- )
- )
-
- span.set_tag("outcome", "new_group" if is_new else "seer_match")
- metric_tags["outcome"] = "new_group" if is_new else "seer_match"
- record_calculation_metric_with_result(
- project=project,
- has_secondary_hashes=has_secondary_hashes,
- result="no_match",
- )
-
- if is_new:
- metrics.incr(
- "group.created",
- skip_internal=True,
- tags={
- "platform": event.platform or "unknown",
- "sdk": normalized_sdk_tag_from_event(event.data),
- },
- )
-
- # This only applies to events with stacktraces, and we only do this for new
- # groups, because we assume that if Seer puts an event in an existing group, it
- # and the existing group have the same frame mix
- frame_mix = event.get_event_metadata().get("in_app_frame_mix")
- if frame_mix:
- metrics.incr(
- "grouping.in_app_frame_mix",
- sample_rate=1.0,
- tags={
- "platform": event.platform or "unknown",
- "sdk": normalized_sdk_tag_from_event(event.data),
- "frame_mix": frame_mix,
- },
- )
-
- return GroupInfo(group, is_new, is_regression)
-
- # If we land here, it's because either:
- #
- # a) There's an existing group with one of our hashes and we found it the first time we looked.
- #
- # b) We didn't find a group the first time we looked, but another identically-hashed event beat
- # us to the lock and while we were waiting either created a new group or assigned our hashes to
- # a neighboring group suggested by seer - such that when we finally got the lock and looked
- # again, this time there was a group to find.
-
- group = Group.objects.get(id=existing_grouphash.group_id)
- if group.issue_category != GroupCategory.ERROR:
- logger.info(
- "event_manager.category_mismatch",
- extra={
- "issue_category": group.issue_category,
- "event_type": "error",
- },
- )
- return None
-
- is_new = False
-
- new_hashes = [h for h in grouphashes if h.group_id is None]
-
- primary_hash_values = set(primary_hashes)
- new_hash_values = {gh.hash for gh in new_hashes}
- all_primary_hashes_are_new = primary_hash_values.issubset(new_hash_values)
- record_calculation_metric_with_result(
- project=project,
- has_secondary_hashes=has_secondary_hashes,
- # If at least one primary hash value isn't new, then we'll definitely have found it, since
- # we check all of the primary hashes before any secondary ones. If the primary hash values
- # *are* all new, then we must have gotten here by finding a secondary hash (or we'd be in
- # the group-creation/seer-consultation branch).
- result="found_primary" if not all_primary_hashes_are_new else "found_secondary",
- )
-
- if new_hashes:
- # There may still be secondary hashes that we did not use to find an
- # existing group. A classic example is when grouping makes changes to
- # the app-hash (changes to in_app logic), but the system hash stays
- # stable and is used to find an existing group. Associate any new
- # hashes with the group such that event saving continues to be
- # resilient against grouping algorithm changes.
- #
- # There is a race condition here where two processes could "steal"
- # hashes from each other. In practice this should not be user-visible
- # as group creation is synchronized. Meaning the only way hashes could
- # jump between groups is if there were two processes that:
- #
- # 1) have BOTH found an existing group
- # (otherwise at least one of them would be in the group creation
- # codepath which has transaction isolation/acquires row locks)
- # 2) AND are looking at the same set, or an overlapping set of hashes
- # (otherwise they would not operate on the same rows)
- # 3) yet somehow also sort their event into two different groups each
- # (otherwise the update would not change anything)
- #
- # We think this is a very unlikely situation. A previous version of
- # _save_aggregate had races around group creation which made this race
- # more user visible. For more context, see 84c6f75a and d0e22787, as
- # well as GH-5085.
- GroupHash.objects.filter(id__in=[h.id for h in new_hashes]).exclude(
- state=GroupHash.State.LOCKED_IN_MIGRATION
- ).update(group=group)
-
- is_regression = _process_existing_aggregate(
- group=group,
- event=event,
- incoming_group_values=group_creation_kwargs,
- release=release,
- )
-
- return GroupInfo(group, is_new, is_regression)
-
-
-# TODO: None of the seer logic has been added to this version yet, so you can't simultaneously use
-# optimized transitions and seer
-def _save_aggregate_new(
+def assign_event_to_group(
event: Event,
job: Job,
metric_tags: MutableTags,
@@ -1602,7 +1308,8 @@ def _save_aggregate_new(
if primary.existing_grouphash:
group_info = handle_existing_grouphash(job, primary.existing_grouphash, primary.grouphashes)
result = "found_primary"
- # If we haven't, try again using the secondary config
+ # If we haven't, try again using the secondary config. (If there is no secondary config, or
+ # we're out of the transition period, we'll get back the empty `NULL_GROUPHASH_INFO`.)
else:
secondary = get_hashes_and_grouphashes(job, maybe_run_secondary_grouping, metric_tags)
all_grouphashes = primary.grouphashes + secondary.grouphashes
@@ -1614,7 +1321,9 @@ def _save_aggregate_new(
result = "found_secondary"
# If we still haven't found a group, ask Seer for a match (if enabled for the project)
else:
- seer_matched_grouphash = maybe_check_seer_for_matching_grouphash(event)
+ seer_matched_grouphash = maybe_check_seer_for_matching_grouphash(
+ event, primary.variants, all_grouphashes
+ )
if seer_matched_grouphash:
group_info = handle_existing_grouphash(job, seer_matched_grouphash, all_grouphashes)
@@ -1631,14 +1340,7 @@ def _save_aggregate_new(
maybe_run_background_grouping(project, job)
record_hash_calculation_metrics(
- primary.config, primary.hashes, secondary.config, secondary.hashes
- )
- # TODO: Once the legacy `_save_aggregate` goes away, the logic inside of
- # `record_calculation_metric_with_result` can be pulled into `record_hash_calculation_metrics`
- record_calculation_metric_with_result(
- project=project,
- has_secondary_hashes=len(secondary.hashes) > 0,
- result=result,
+ project, primary.config, primary.hashes, secondary.config, secondary.hashes, result
)
# Now that we've used the current and possibly secondary grouping config(s) to calculate the
@@ -1647,6 +1349,13 @@ def _save_aggregate_new(
# erroneously create new groups.
update_grouping_config_if_needed(project, "ingest")
+ # The only way there won't be group info is we matched to a performance, cron, replay, or
+ # other-non-error-type group because of a hash collision - exceedingly unlikely, and not
+ # something we've ever observed, but theoretically possible.
+ if group_info:
+ event.group = group_info.group
+ job["groups"] = [group_info]
+
return group_info
@@ -1654,7 +1363,7 @@ def get_hashes_and_grouphashes(
job: Job,
hash_calculation_function: Callable[
[Project, Job, MutableTags],
- tuple[GroupingConfig, list[str]],
+ tuple[GroupingConfig, list[str], dict[str, BaseVariant]],
],
metric_tags: MutableTags,
) -> GroupHashInfo:
@@ -1669,14 +1378,14 @@ def get_hashes_and_grouphashes(
project = job["event"].project
# These will come back as Nones if the calculation decides it doesn't need to run
- grouping_config, hashes = hash_calculation_function(project, job, metric_tags)
+ grouping_config, hashes, variants = hash_calculation_function(project, job, metric_tags)
if hashes:
- grouphashes = get_or_create_grouphashes(project, hashes)
+ grouphashes = get_or_create_grouphashes(project, hashes, grouping_config["id"])
- existing_grouphash = find_existing_grouphash(grouphashes)
+ existing_grouphash = find_grouphash_with_group(grouphashes)
- return GroupHashInfo(grouping_config, hashes, grouphashes, existing_grouphash)
+ return GroupHashInfo(grouping_config, variants, hashes, grouphashes, existing_grouphash)
else:
return NULL_GROUPHASH_INFO
@@ -1706,12 +1415,16 @@ def handle_existing_grouphash(
# (otherwise the update would not change anything)
#
# We think this is a very unlikely situation. A previous version of
- # _save_aggregate had races around group creation which made this race
+ # this function had races around group creation which made this race
# more user visible. For more context, see 84c6f75a and d0e22787, as
# well as GH-5085.
group = Group.objects.get(id=existing_grouphash.group_id)
- if check_for_category_mismatch(group):
+ # As far as we know this has never happened, but in theory at least, the error event hashing
+ # algorithm and other event hashing algorithms could come up with the same hash value in the
+ # same project and our hash could have matched to a non-error group. Just to be safe, we make
+ # sure that's not the case before proceeding.
+ if is_non_error_type_group(group):
return None
# There may still be hashes that we did not use to find an existing
@@ -1777,7 +1490,7 @@ def create_group_with_grouphashes(job: Job, grouphashes: list[GroupHash]) -> Gro
# condition scenario above, we'll have been blocked long enough for the other event to
# have created the group and updated our grouphashes with a group id, which means this
# time, we'll find something.
- existing_grouphash = find_existing_grouphash(grouphashes)
+ existing_grouphash = find_grouphash_with_group(grouphashes)
# If we still haven't found a matching grouphash, we're now safe to go ahead and create
# the group.
@@ -1806,16 +1519,6 @@ def _create_group(
first_release: Release | None = None,
**group_creation_kwargs: Any,
) -> Group:
- # Temporary log to debug events seeming to disappear after being sent to Seer
- if event.data.get("seer_similarity"):
- logger.info(
- "seer.similarity.pre_create_group",
- extra={
- "event_id": event.event_id,
- "hash": event.get_primary_hash(),
- "project": project.id,
- },
- )
short_id = _get_next_short_id(project)
@@ -1891,18 +1594,6 @@ def _create_group(
logger.exception("Error after unsticking project counter")
raise
- # Temporary log to debug events seeming to disappear after being sent to Seer
- if event.data.get("seer_similarity"):
- logger.info(
- "seer.similarity.post_create_group",
- extra={
- "event_id": event.event_id,
- "hash": event.get_primary_hash(),
- "project": project.id,
- "group_id": group.id,
- },
- )
-
return group
diff --git a/src/sentry/eventstore/models.py b/src/sentry/eventstore/models.py
index 7f6c049de8bc1..6014a7f2ff088 100644
--- a/src/sentry/eventstore/models.py
+++ b/src/sentry/eventstore/models.py
@@ -18,7 +18,7 @@
from sentry import eventtypes
from sentry.db.models import NodeData
-from sentry.grouping.variants import BaseVariant, KeyedVariants
+from sentry.grouping.variants import BaseVariant
from sentry.interfaces.base import Interface, get_interfaces
from sentry.issues.grouptype import GroupCategory
from sentry.issues.issue_occurrence import IssueOccurrence
@@ -332,6 +332,29 @@ def get_grouping_config(self) -> GroupingConfig:
return get_grouping_config_dict_for_event_data(self.data, self.project)
+ def get_hashes_and_variants(
+ self, config: StrategyConfiguration | None = None
+ ) -> tuple[list[str], dict[str, BaseVariant]]:
+ """
+ Return the event's hash values, calculated using the given config, along with the
+ `variants` data used in grouping.
+ """
+
+ variants = self.get_grouping_variants(config)
+ # Sort the variants so that the system variant (if any) is always last, in order to resolve
+ # ambiguities when choosing primary_hash for Snuba
+ sorted_variants = sorted(
+ variants.items(),
+ key=lambda name_and_variant: 1 if name_and_variant[0] == "system" else 0,
+ )
+ # Get each variant's hash value, filtering out Nones
+ hashes = list({variant.get_hash() for _, variant in sorted_variants} - {None})
+
+ # Write to event before returning
+ self.data["hashes"] = hashes
+
+ return (hashes, variants)
+
def get_hashes(self, force_config: StrategyConfiguration | None = None) -> list[str]:
"""
Returns the calculated hashes for the event. This uses the stored
@@ -353,37 +376,7 @@ def get_hashes(self, force_config: StrategyConfiguration | None = None) -> list[
return hashes
# Create fresh hashes
- from sentry.grouping.api import sort_grouping_variants
-
- variants = self.get_grouping_variants(force_config)
- hashes = [
- hash_
- for _, hash_ in self._hashes_from_sorted_grouping_variants(
- sort_grouping_variants(variants)
- )
- ]
-
- # Write to event before returning
- self.data["hashes"] = hashes
- return hashes
-
- @staticmethod
- def _hashes_from_sorted_grouping_variants(
- variants: KeyedVariants,
- ) -> list[tuple[str, str]]:
- """Create hashes from variants and filter out duplicates and None values"""
-
- filtered_hashes = []
- seen_hashes = set()
- for name, variant in variants:
- hash_ = variant.get_hash()
- if hash_ is None or hash_ in seen_hashes:
- continue
-
- seen_hashes.add(hash_)
- filtered_hashes.append((name, hash_))
-
- return filtered_hashes
+ return self.get_hashes_and_variants(force_config)[0]
def normalize_stacktraces_for_grouping(self, grouping_config: StrategyConfiguration) -> None:
"""Normalize stacktraces and clear memoized interfaces
@@ -609,7 +602,7 @@ def group_id(self) -> int | None:
def group_id(self, value: int | None) -> None:
self._group_id = value
- # TODO We need a better way to cache these properties. functools
+ # TODO: We need a better way to cache these properties. functools
# doesn't quite do the trick as there is a reference bug with unsaved
# models. But the current _group_cache thing is also clunky because these
# properties need to be stripped out in __getstate__.
diff --git a/src/sentry/eventstream/base.py b/src/sentry/eventstream/base.py
index dc7a7dc411fb0..ac505a9a7a215 100644
--- a/src/sentry/eventstream/base.py
+++ b/src/sentry/eventstream/base.py
@@ -6,9 +6,8 @@
from enum import Enum
from typing import TYPE_CHECKING, Any, Optional, TypedDict, cast
-from django.conf import settings
-
from sentry.issues.issue_occurrence import IssueOccurrence
+from sentry.queue.routers import SplitQueueRouter
from sentry.tasks.post_process import post_process_group
from sentry.utils.cache import cache_key_for_event
from sentry.utils.services import Service
@@ -65,6 +64,9 @@ class EventStream(Service):
"_get_event_type",
)
+ def __init__(self, **options: Any) -> None:
+ self.__celery_router = SplitQueueRouter()
+
def _dispatch_post_process_group_task(
self,
event_id: str,
@@ -108,9 +110,7 @@ def _get_queue_for_post_process(self, event: Event | GroupEvent) -> str:
else:
default_queue = "post_process_errors"
- return settings.SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER.get(
- default_queue, lambda: default_queue
- )()
+ return self.__celery_router.route_for_queue(default_queue)
def _get_occurrence_data(self, event: Event | GroupEvent) -> MutableMapping[str, Any]:
occurrence = cast(Optional[IssueOccurrence], getattr(event, "occurrence", None))
diff --git a/src/sentry/eventstream/kafka/backend.py b/src/sentry/eventstream/kafka/backend.py
index 8dc599d10a457..f1dd4b5269f42 100644
--- a/src/sentry/eventstream/kafka/backend.py
+++ b/src/sentry/eventstream/kafka/backend.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import logging
+import time
from collections.abc import Mapping, MutableMapping, Sequence
from datetime import datetime
from typing import TYPE_CHECKING, Any
@@ -25,10 +26,12 @@
class KafkaEventStream(SnubaProtocolEventStream):
def __init__(self, **options: Any) -> None:
+ super().__init__(**options)
self.topic = Topic.EVENTS
self.transactions_topic = Topic.TRANSACTIONS
self.issue_platform_topic = Topic.EVENTSTREAM_GENERIC
self.__producers: MutableMapping[Topic, Producer] = {}
+ self.error_last_logged_time: int | None = None
def get_transactions_topic(self, project_id: int) -> Topic:
return self.transactions_topic
@@ -42,8 +45,11 @@ def get_producer(self, topic: Topic) -> Producer:
return self.__producers[topic]
def delivery_callback(self, error: KafkaError | None, message: KafkaMessage) -> None:
+ now = int(time.time())
if error is not None:
- logger.warning("Could not publish message (error: %s): %r", error, message)
+ if self.error_last_logged_time is None or now > self.error_last_logged_time + 60:
+ self.error_last_logged_time = now
+ logger.error("Could not publish message (error: %s): %r", error, message)
def _get_headers_for_insert(
self,
diff --git a/src/sentry/features/handler.py b/src/sentry/features/handler.py
index 846626c158a94..4239e49506a10 100644
--- a/src/sentry/features/handler.py
+++ b/src/sentry/features/handler.py
@@ -1,9 +1,7 @@
from __future__ import annotations
-__all__ = ["FeatureHandler", "BatchFeatureHandler"]
-
import abc
-from collections.abc import Mapping, MutableSet, Sequence
+from collections.abc import Sequence
from typing import TYPE_CHECKING
if TYPE_CHECKING:
@@ -17,6 +15,9 @@
from sentry.users.services.user import RpcUser
+__all__ = ["FeatureHandler", "BatchFeatureHandler"]
+
+
class FeatureHandler:
"""
Base class for defining custom logic for feature decisions.
@@ -28,7 +29,7 @@ class FeatureHandler:
as we don't programatically release features in self-hosted.
"""
- features: MutableSet[str] = set()
+ features: set[str] = set()
def __call__(self, feature: Feature, actor: User) -> bool | None:
if feature.name not in self.features:
@@ -45,7 +46,7 @@ def has(
) -> bool | None:
raise NotImplementedError
- def has_for_batch(self, batch: FeatureCheckBatch) -> Mapping[Project, bool | None]:
+ def has_for_batch(self, batch: FeatureCheckBatch) -> dict[Project, bool | None]:
# If not overridden, iterate over objects in the batch individually.
return {
obj: self.has(feature, batch.actor)
@@ -60,7 +61,7 @@ def batch_has(
projects: Sequence[Project] | None = None,
organization: Organization | None = None,
batch: bool = True,
- ) -> Mapping[str, Mapping[str, bool | None]] | None:
+ ) -> dict[str, dict[str, bool | None]] | None:
raise NotImplementedError
@@ -80,13 +81,21 @@ class BatchFeatureHandler(FeatureHandler):
@abc.abstractmethod
def _check_for_batch(
- self, feature_name: str, entity: Organization | User, actor: User
+ self,
+ feature_name: str,
+ entity: Organization | User | None,
+ actor: User | RpcUser | AnonymousUser | None,
) -> bool | None:
raise NotImplementedError
- def has(self, feature: Feature, actor: User, skip_entity: bool | None = False) -> bool | None:
+ def has(
+ self,
+ feature: Feature,
+ actor: User | RpcUser | AnonymousUser | None,
+ skip_entity: bool | None = False,
+ ) -> bool | None:
return self._check_for_batch(feature.name, feature.get_subject(), actor)
- def has_for_batch(self, batch: FeatureCheckBatch) -> Mapping[Project, bool | None]:
+ def has_for_batch(self, batch: FeatureCheckBatch) -> dict[Project, bool | None]:
flag = self._check_for_batch(batch.feature_name, batch.subject, batch.actor)
return {obj: flag for obj in batch.objects}
diff --git a/src/sentry/features/manager.py b/src/sentry/features/manager.py
index 4e045f4d8eaa9..98c3eb8b72d58 100644
--- a/src/sentry/features/manager.py
+++ b/src/sentry/features/manager.py
@@ -6,7 +6,7 @@
import abc
from collections import defaultdict
-from collections.abc import Iterable, Mapping, MutableMapping, MutableSet, Sequence
+from collections.abc import Iterable, Sequence
from typing import TYPE_CHECKING, Any
import sentry_sdk
@@ -44,7 +44,7 @@ class RegisteredFeatureManager:
"""
def __init__(self) -> None:
- self._handler_registry: MutableMapping[str, list[FeatureHandler]] = defaultdict(list)
+ self._handler_registry: dict[str, list[FeatureHandler]] = defaultdict(list)
def add_handler(self, handler: FeatureHandler) -> None:
"""
@@ -78,7 +78,7 @@ def has_for_batch(
organization: Organization,
objects: Sequence[Project],
actor: User | None = None,
- ) -> Mapping[Project, bool]:
+ ) -> dict[Project, bool | None]:
"""
Determine if a feature is enabled for a batch of objects.
@@ -100,7 +100,7 @@ def has_for_batch(
>>> FeatureManager.has_for_batch('projects:feature', organization, [project1, project2], actor=request.user)
"""
- result = dict()
+ result: dict[Project, bool | None] = {}
remaining = set(objects)
handlers = self._handler_registry[name]
@@ -111,7 +111,7 @@ def has_for_batch(
with sentry_sdk.start_span(
op="feature.has_for_batch.handler",
- description=f"{type(handler).__name__} ({name})",
+ name=f"{type(handler).__name__} ({name})",
) as span:
batch_size = len(remaining)
span.set_data("Batch Size", batch_size)
@@ -143,17 +143,17 @@ def has_for_batch(
class FeatureManager(RegisteredFeatureManager):
def __init__(self) -> None:
super().__init__()
- self._feature_registry: MutableMapping[str, type[Feature]] = {}
+ self._feature_registry: dict[str, type[Feature]] = {}
# Deprecated: Remove entity_features once flagr has been removed.
- self.entity_features: MutableSet[str] = set()
- self.exposed_features: MutableSet[str] = set()
- self.option_features: MutableSet[str] = set()
- self.flagpole_features: MutableSet[str] = set()
+ self.entity_features: set[str] = set()
+ self.exposed_features: set[str] = set()
+ self.option_features: set[str] = set()
+ self.flagpole_features: set[str] = set()
self._entity_handler: FeatureHandler | None = None
def all(
self, feature_type: type[Feature] = Feature, api_expose_only: bool = False
- ) -> Mapping[str, type[Feature]]:
+ ) -> dict[str, type[Feature]]:
"""
Get a mapping of feature name -> feature class, optionally specific to a
particular feature type.
@@ -328,7 +328,7 @@ def batch_has(
actor: User | RpcUser | AnonymousUser | None = None,
projects: Sequence[Project] | None = None,
organization: Organization | None = None,
- ) -> Mapping[str, Mapping[str, bool | None]] | None:
+ ) -> dict[str, dict[str, bool | None]] | None:
"""
Determine if multiple features are enabled. Unhandled flags will not be in
the results if they cannot be handled.
@@ -346,7 +346,7 @@ def batch_has(
# Fall back to default handler if no entity handler available.
project_features = [name for name in feature_names if name.startswith("projects:")]
if projects and project_features:
- results: MutableMapping[str, Mapping[str, bool]] = {}
+ results: dict[str, dict[str, bool | None]] = {}
for project in projects:
proj_results = results[f"project:{project.id}"] = {}
for feature_name in project_features:
@@ -357,7 +357,7 @@ def batch_has(
org_features = filter(lambda name: name.startswith("organizations:"), feature_names)
if organization and org_features:
- org_results = {}
+ org_results: dict[str, bool | None] = {}
for feature_name in org_features:
org_results[feature_name] = self.has(
feature_name, organization, actor=actor
@@ -370,7 +370,7 @@ def batch_has(
feature_names,
)
if unscoped_features:
- unscoped_results = {}
+ unscoped_results: dict[str, bool | None] = {}
for feature_name in unscoped_features:
unscoped_results[feature_name] = self.has(feature_name, actor=actor)
return {"unscoped": unscoped_results}
@@ -417,7 +417,7 @@ def __init__(
self.objects = objects
self.actor = actor
- def get_feature_objects(self) -> Mapping[Project, Feature]:
+ def get_feature_objects(self) -> dict[Project, Feature]:
"""
Iterate over individual Feature objects.
@@ -429,5 +429,5 @@ def get_feature_objects(self) -> Mapping[Project, Feature]:
return {obj: cls(self.feature_name, obj) for obj in self.objects}
@property
- def subject(self) -> Organization | User:
+ def subject(self) -> Organization | User | None:
return self.organization or self.actor
diff --git a/src/sentry/features/permanent.py b/src/sentry/features/permanent.py
index de5e42547a4bb..9158b2dfa3cc2 100644
--- a/src/sentry/features/permanent.py
+++ b/src/sentry/features/permanent.py
@@ -22,6 +22,8 @@ def register_permanent_features(manager: FeatureManager):
permanent_organization_features = {
# Enable advanced search features, like negation and wildcard matching.
"organizations:advanced-search": True,
+ # Enable anomaly detection alerts
+ "organizations:anomaly-detection-alerts": False,
# Enable multiple Apple app-store-connect sources per project.
"organizations:app-store-connect-multiple": False,
# Enable change alerts for an org
@@ -76,6 +78,8 @@ def register_permanent_features(manager: FeatureManager):
"organizations:integrations-stacktrace-link": True,
# Allow orgs to automatically create Tickets in Issue Alerts
"organizations:integrations-ticket-rules": True,
+ # Enable metric alert charts in email/slack
+ "organizations:metric-alert-chartcuterie": False,
# Enable Performance view
"organizations:performance-view": True,
# Enable profiling view
diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py
index 5c96964c8b84c..c387b5ca24bd1 100644
--- a/src/sentry/features/temporary.py
+++ b/src/sentry/features/temporary.py
@@ -44,7 +44,7 @@ def register_temporary_features(manager: FeatureManager):
# Enables activated alert rules
manager.add("organizations:activated-alert-rules", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
- # Enable AI Issue Summary feture on the Issue Details page.
+ # Enable AI Issue Summary feature on the Issue Details page.
manager.add("organizations:ai-summary", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables alert creation on indexed events in UI (use for PoC/testing only)
manager.add("organizations:alert-allow-indexed", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
@@ -53,10 +53,10 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:alert-filters", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enables the migration of alerts (checked in a migration script).
manager.add("organizations:alerts-migration-enabled", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- # Enable anomaly detection alerts
- manager.add("organizations:anomaly-detection-alerts", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
- # Enable anomaly detection alerts
- manager.add("organizations:fake-anomaly-detection", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
+ # Enables EAP alerts
+ manager.add("organizations:alerts-eap", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable anomaly detection feature for rollout
+ manager.add("organizations:anomaly-detection-rollout", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable anomaly detection charts
manager.add("organizations:anomaly-detection-alerts-charts", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable anr frame analysis
@@ -82,24 +82,28 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:continuous-profiling-stats", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True)
# Enable the continuous profiling compatible redesign
manager.add("organizations:continuous-profiling-compat", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
- # Disables legacy cron ingest endpoints
- manager.add("organizations:crons-disable-ingest-endpoints", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
- # Disables legacy cron ingest endpoints
- manager.add("organizations:crons-write-user-feedback", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False)
# Delightful Developer Metrics (DDM):
# Enables experimental WIP custom metrics related features
manager.add("organizations:custom-metrics-experimental", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enables Info alert for custom metrics and alerts widgets removal
+ manager.add("organizations:custom-metrics-alerts-widgets-removal-info", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable daily summary
manager.add("organizations:daily-summary", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
+ # Enable events analytics platform data in dashboards
+ manager.add("organizations:dashboards-eap", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables import/export functionality for dashboards
manager.add("organizations:dashboards-import", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable metrics enhanced performance in dashboards
manager.add("organizations:dashboards-mep", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable metrics enhanced performance for AM2+ customers as they transition from AM2 to AM3
+ manager.add("organizations:dashboards-metrics-transition", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:dashboards-span-metrics", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False)
# Enable releases overlay on dashboard chart widgets
manager.add("organizations:dashboards-releases-on-charts", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable equations for Big Number widgets
manager.add("organizations:dashboards-bignumber-equations", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable access protected editing of dashboards
+ manager.add("organizations:dashboards-edit-access", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable the dev toolbar PoC code for employees
# Data Secrecy
manager.add("organizations:data-secrecy", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
@@ -117,8 +121,12 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:discover", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True)
# Enable the org recalibration
manager.add("organizations:ds-org-recalibration", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
+ # Enable custom dynamic sampling rates
+ manager.add("organizations:dynamic-sampling-custom", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables data secrecy mode
manager.add("organizations:enterprise-data-secrecy", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
+ # Enable issue platform deletion
+ manager.add("organizations:issue-platform-deletion", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable archive/escalating issue workflow features in v2
manager.add("organizations:escalating-issues-v2", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enable emiting escalating data to the metrics backend
@@ -129,12 +137,8 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:feature-flag-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable disabling gitlab integrations when broken is detected
manager.add("organizations:gitlab-disable-on-broken", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- # Enable only calculating a secondary hash when needed
- manager.add("organizations:grouping-suppress-unnecessary-secondary-hash", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Allow creating `GroupHashMetadata` records
manager.add("organizations:grouphash-metadata-creation", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- # Allows an org to have a larger set of project ownership rules per project
- manager.add("organizations:higher-ownership-limit", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enable increased issue_owners rate limit for auto-assignment
manager.add("organizations:increased-issue-owners-rate-limit", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Starfish: extract metrics from the spans
@@ -156,6 +160,10 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:issue-details-always-show-trace", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables the UI for Autofix in issue details
manager.add("organizations:issue-details-autofix-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable Issue Platform deletion
+ manager.add("organizations:issue-platform-deletion", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable Issue Platform deletion UI
+ manager.add("organizations:issue-platform-deletion-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables a toggle for entering the new issue details UI
manager.add("organizations:issue-details-new-experience-toggle", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables access to the streamlined issue details UI
@@ -175,17 +183,17 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:issue-search-snuba", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable the new issue stream search bar UI
manager.add("organizations:issue-stream-search-query-builder", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable issue stream table layout changes
+ manager.add("organizations:issue-stream-table-layout", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:large-debug-files", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
- # Enabled latest adopted release filter for issue alerts
- manager.add("organizations:latest-adopted-release-filter", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable members to invite teammates to organizations
manager.add("organizations:members-invite-teammates", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:mep-rollout-flag", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:mep-use-default-tags", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable messaging integration onboarding when setting up alerts
manager.add("organizations:messaging-integration-onboarding", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
- # Enable metric alert charts in email/slack
- manager.add("organizations:metric-alert-chartcuterie", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True)
+ # Enable messaging-integration onboarding when creating a new project
+ manager.add("organizations:messaging-integration-onboarding-project-creation", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable threshold period in metric alert rule builder
manager.add("organizations:metric-alert-threshold-period", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables the search bar for metrics samples list
@@ -203,9 +211,6 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:more-slow-alerts", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
manager.add("organizations:navigation-sidebar-v2", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:new-page-filter", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, default=True, api_expose=True)
- manager.add("organizations:new-weekly-report", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- # Display warning banner for every event issue alerts
- manager.add("organizations:noisy-alert-warning", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Notify all project members when fallthrough is disabled, instead of just the auto-assignee
manager.add("organizations:notification-all-recipients", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Drop obsoleted status changes in occurence consumer
@@ -222,6 +227,8 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:on-demand-metrics-query-spec-version-two", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Display metrics components with a new design
manager.add("organizations:metrics-new-inputs", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Display new Source map uploads view in settings
+ manager.add('organizations:new-source-map-uploads-view', OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Display on demand metrics related UI elements
manager.add("organizations:on-demand-metrics-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Display on demand metrics related UI elements, for dashboards and widgets. The other flag is for alerts.
@@ -230,6 +237,10 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:onboarding", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True)
# Enable the SDK selection feature in the onboarding
manager.add("organizations:onboarding-sdk-selection", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable large ownership rule file size limit
+ manager.add("organizations:ownership-size-limit-large", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
+ # Enable xlarge ownership rule file size limit
+ manager.add("organizations:ownership-size-limit-xlarge", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable views for anomaly detection
manager.add("organizations:performance-anomaly-detection-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable mobile performance score calculation for transactions in relay
@@ -315,8 +326,6 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:performance-use-metrics", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True)
# Enable showing INP web vital in default views
manager.add("organizations:performance-vitals-inp", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- # Enable the GA features for priority alerts
- manager.add("organizations:priority-ga-features", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, default=True, api_expose=True)
# Enable profiling
manager.add("organizations:profiling", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True)
# Enabled for those orgs who participated in the profiling Beta program
@@ -336,8 +345,8 @@ def register_temporary_features(manager: FeatureManager):
# Limit project events endpoint to only query back a certain number of days
manager.add("organizations:project-event-date-limit", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
manager.add("organizations:project-templates", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
- # Enable react-router 6 in the UI
- manager.add("organizations:react-router-6", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
+ # Enable the new quick start guide
+ manager.add("organizations:quick-start-updates", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable the new Related Events feature
manager.add("organizations:related-events", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enable related issues feature
@@ -378,10 +387,6 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:search-query-builder-project-details", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:search-query-builder-alerts", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:search-query-builder-performance", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
- # Enable the Replay Details > Accessibility tab
- manager.add("organizations:session-replay-a11y-tab", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- # Enable the accessibility issues endpoint
- manager.add("organizations:session-replay-accessibility-issues", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable combined envelope Kafka items in Relay
manager.add("organizations:session-replay-combined-envelope-items", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enable canvas recording
@@ -423,6 +428,8 @@ def register_temporary_features(manager: FeatureManager):
# Add regression chart as image to slack message
manager.add("organizations:slack-endpoint-regression-image", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False)
manager.add("organizations:slack-function-regression-image", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False)
+ # Enable linking to Slack alerts from multiple teams to a single channel
+ manager.add("organizations:slack-multiple-team-single-channel-linking", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
manager.add("organizations:stacktrace-processing-caching", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enable SAML2 Single-logout
manager.add("organizations:sso-saml2-slo", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False)
@@ -492,8 +499,6 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:transaction-name-sanitization", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enables creation and full updating of uptime monitors via the api
manager.add("organizations:uptime-api-create-update", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
- # Displys the "Uptime Monitor" option in the alert creation wizard
- manager.add("organizations:uptime-display-wizard-create", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables automatic hostname detection in uptime
manager.add("organizations:uptime-automatic-hostname-detection", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enables automatic subscription creation in uptime
@@ -505,8 +510,6 @@ def register_temporary_features(manager: FeatureManager):
# Enables uptime related settings for projects and orgs
manager.add('organizations:uptime-settings', OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:use-metrics-layer", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- # Enable User Feedback v2 ingest
- manager.add("organizations:user-feedback-ingest", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Use ReplayClipPreview inside the User Feedback Details panel
manager.add("organizations:user-feedback-replay-clip", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable User Feedback spam auto filtering feature ingest
@@ -529,8 +532,10 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:widget-viewer-modal-minimap", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enabled unresolved issue webhook for organization
manager.add("organizations:webhooks-unresolved", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=True)
- # Enable new feature parsing code for Jira integrations
- manager.add("organizations:new-jira-transformers", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
+ # Enable EventUniqueUserFrequencyConditionWithConditions special alert condition
+ manager.add("organizations:event-unique-user-frequency-condition-with-conditions", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
+ # Use spans instead of transactions for dynamic sampling calculations. This will become the new default.
+ manager.add("organizations:dynamic-sampling-spans", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# NOTE: Don't add features down here! Add them to their specific group and sort
# them alphabetically! The order features are registered is not important.
diff --git a/src/sentry/feedback/usecases/create_feedback.py b/src/sentry/feedback/usecases/create_feedback.py
index 57c9aae5e4a03..f19ba419d7c8c 100644
--- a/src/sentry/feedback/usecases/create_feedback.py
+++ b/src/sentry/feedback/usecases/create_feedback.py
@@ -8,7 +8,7 @@
import jsonschema
-from sentry import features
+from sentry import features, options
from sentry.constants import DataCategory
from sentry.eventstore.models import Event, GroupEvent
from sentry.feedback.usecases.spam_detection import is_spam
@@ -232,7 +232,7 @@ def create_feedback_issue(event, project_id: int, source: FeedbackCreationSource
is_message_spam = is_spam(event["contexts"]["feedback"]["message"])
except Exception:
# until we have LLM error types ironed out, just catch all exceptions
- logger.exception("Error checking if message is spam")
+ logger.exception("Error checking if message is spam", extra={"project_id": project_id})
metrics.incr(
"feedback.create_feedback_issue.spam_detection",
tags={
@@ -353,6 +353,9 @@ def shim_to_feedback(
User feedbacks are an event type, so we try and grab as much from the
legacy user report and event to create the new feedback.
"""
+ if is_in_feedback_denylist(project.organization):
+ return
+
try:
feedback_event: dict[str, Any] = {
"contexts": {
@@ -399,3 +402,7 @@ def auto_ignore_spam_feedbacks(project, issue_fingerprint):
new_substatus=GroupSubStatus.FOREVER,
),
)
+
+
+def is_in_feedback_denylist(organization):
+ return organization.slug in options.get("feedback.organizations.slug-denylist")
diff --git a/src/sentry/filestore/gcs.py b/src/sentry/filestore/gcs.py
index 36be72b03e840..c02c38b055c27 100644
--- a/src/sentry/filestore/gcs.py
+++ b/src/sentry/filestore/gcs.py
@@ -23,11 +23,11 @@
from sentry.net.http import TimeoutAdapter
from sentry.utils import metrics
-from sentry.utils.retries import ConditionalRetryPolicy, exponential_delay
+from sentry.utils.retries import ConditionalRetryPolicy, sigmoid_delay
# how many times do we want to try if stuff goes wrong
GCS_RETRIES = 5
-REPLAY_GCS_RETRIES = GCS_RETRIES + 2
+REPLAY_GCS_RETRIES = 125
# Which errors are eligible for retry.
@@ -405,6 +405,7 @@ def should_retry(attempt: int, e: Exception) -> bool:
"""Retry gateway timeout exceptions up to the limit."""
return attempt <= REPLAY_GCS_RETRIES and isinstance(e, GCS_RETRYABLE_ERRORS)
- # Retry cadence: 0.025, 0.05, 0.1, 0.2, 0.4, 0.8, 1.6, 3.2 => ~6.5 seconds
- policy = ConditionalRetryPolicy(should_retry, exponential_delay(0.05))
+ # Retry cadence: After a brief period of fast retries the function will retry once
+ # per second for two minutes.
+ policy = ConditionalRetryPolicy(should_retry, sigmoid_delay())
policy(callable)
diff --git a/src/sentry/flags/README.md b/src/sentry/flags/README.md
new file mode 100644
index 0000000000000..6a4cab09d9f61
--- /dev/null
+++ b/src/sentry/flags/README.md
@@ -0,0 +1 @@
+flag log
diff --git a/src/sentry/api/validators/sentry_apps/__init__.py b/src/sentry/flags/__init__.py
similarity index 100%
rename from src/sentry/api/validators/sentry_apps/__init__.py
rename to src/sentry/flags/__init__.py
diff --git a/src/sentry/flags/docs/api.md b/src/sentry/flags/docs/api.md
new file mode 100644
index 0000000000000..6a42c59fe9ed9
--- /dev/null
+++ b/src/sentry/flags/docs/api.md
@@ -0,0 +1,114 @@
+# Flags API
+
+Host: https://sentry.io/api/0
+
+**Authors.**
+
+@cmanallen
+
+**How to read this document.**
+
+This document is structured by resource with each resource having actions that can be performed against it. Every action that either accepts a request or returns a response WILL document the full interchange format. Clients may opt to restrict response data or provide a subset of the request data.
+
+## Flag Logs [/organizations//flags/logs/]
+
+- Parameters
+ - flag (optional, string) - The flag name to filter the result by. Can be specified multiple times.
+ - start (optional, string) - ISO 8601 format (`YYYY-MM-DDTHH:mm:ss.sssZ`)
+ - end (optional, string) - ISO 8601 format. Required if `start` is set.
+ - statsPeriod (optional, string) - A positive integer suffixed with a unit type.
+ - cursor (optional, string)`
+ - per_page (optional, number)
+ Default: 10
+ - offset (optional, number)
+ Default: 0
+
+### Browse Flag Logs [GET]
+
+Retrieve a collection of flag logs.
+
+**Attributes**
+
+| Column | Type | Description |
+| --------------- | ------ | ------------------------------------------------------------- |
+| action | string | Enum of `created`, `updated`, or `deleted`. |
+| created_at | string | ISO-8601 timestamp of when the flag was changed. |
+| created_by | string | The user responsible for the change. |
+| created_by_type | string | Enum of `email`, `id`, or `name`. |
+| flag | string | The name of the flag changed. Maps to flag_log_id in the URI. |
+| id | number | A unique identifier for the log entry. |
+| tags | object | A collection of provider-specified scoping metadata. |
+
+- Response 200
+
+ ```json
+ {
+ "data": [
+ {
+ "action": "created",
+ "created_at": "2024-01-01T05:12:33",
+ "created_by": "2552",
+ "created_by_type": "id",
+ "flag": "my-flag-name",
+ "id": 1,
+ "tags": {
+ "environment": "production"
+ }
+ }
+ ]
+ }
+ ```
+
+## Flag Log [/organizations//flags/logs//]
+
+### Fetch Flag Log [GET]
+
+Retrieve a single flag log instance.
+
+- Response 200
+
+ ```json
+ {
+ "data": {
+ "action": "updated",
+ "created_at": "2024-11-19T19:12:55",
+ "created_by": "user@site.com",
+ "created_by_type": "email",
+ "flag": "new-flag-name",
+ "id": 1,
+ "tags": {
+ "environment": "development"
+ }
+ }
+ }
+ ```
+
+## Webhooks [/webhooks/flags/organization//provider//]
+
+### Create Flag Log [POST]
+
+The shape of the request object varies by provider. The `` URI parameter informs the server of the shape of the request and it is on the server to handle the provider. The following providers are supported: Unleash, Split, and LaunchDarkly.
+
+**Flag Pole Example:**
+
+Flag pole is Sentry owned. It matches our audit-log resource because it is designed for that purpose.
+
+- Request (application/json)
+
+ ```json
+ {
+ "data": [
+ {
+ "action": "updated",
+ "created_at": "2024-11-19T19:12:55",
+ "created_by": "colton.allen@sentry.io",
+ "flag": "flag-name",
+ "tags": {
+ "commit_sha": "1f33a107d7cd060ab9c98e11c9e5a62dc1347861"
+ }
+ }
+ ]
+ }
+ ```
+
+- Response 201
diff --git a/src/sentry/remote_config/__init__.py b/src/sentry/flags/endpoints/__init__.py
similarity index 100%
rename from src/sentry/remote_config/__init__.py
rename to src/sentry/flags/endpoints/__init__.py
diff --git a/src/sentry/flags/endpoints/hooks.py b/src/sentry/flags/endpoints/hooks.py
new file mode 100644
index 0000000000000..0f71378062dbe
--- /dev/null
+++ b/src/sentry/flags/endpoints/hooks.py
@@ -0,0 +1,74 @@
+from rest_framework.request import Request
+from rest_framework.response import Response
+
+from sentry.api.api_owners import ApiOwner
+from sentry.api.api_publish_status import ApiPublishStatus
+from sentry.api.authentication import OrgAuthTokenAuthentication
+from sentry.api.base import Endpoint, region_silo_endpoint
+from sentry.api.bases.organization import OrganizationPermission
+from sentry.api.exceptions import ResourceDoesNotExist
+from sentry.flags.providers import (
+ DeserializationError,
+ InvalidProvider,
+ handle_provider_event,
+ write,
+)
+from sentry.models.organization import Organization
+from sentry.utils.sdk import bind_organization_context
+
+"""HTTP endpoint.
+
+This endpoint accepts only organization authorization tokens. I've made the conscious
+decision to exclude all other forms of authentication. We don't want users accidentally
+writing logs or leaked DSNs generating invalid log entries. An organization token is
+secret and reasonably restricted and so makes sense for this use case where we have
+inter-provider communication.
+
+This endpoint allows writes if any write-level "org" permission was provided.
+"""
+
+
+class OrganizationFlagHookPermission(OrganizationPermission):
+ scope_map = {
+ "POST": ["org:ci"],
+ }
+
+
+@region_silo_endpoint
+class OrganizationFlagsHooksEndpoint(Endpoint):
+ authentication_classes = (OrgAuthTokenAuthentication,)
+ owner = ApiOwner.REPLAY
+ permission_classes = (OrganizationFlagHookPermission,)
+ publish_status = {
+ "POST": ApiPublishStatus.PRIVATE,
+ }
+
+ def convert_args(
+ self,
+ request: Request,
+ organization_id_or_slug: int | str,
+ *args,
+ **kwargs,
+ ):
+ try:
+ if isinstance(organization_id_or_slug, int):
+ organization = Organization.objects.get_from_cache(id=organization_id_or_slug)
+ else:
+ organization = Organization.objects.get_from_cache(slug=organization_id_or_slug)
+ except Organization.DoesNotExist:
+ raise ResourceDoesNotExist
+
+ self.check_object_permissions(request, organization)
+ bind_organization_context(organization)
+
+ kwargs["organization"] = organization
+ return args, kwargs
+
+ def post(self, request: Request, organization: Organization, provider: str) -> Response:
+ try:
+ write(handle_provider_event(provider, request.data, organization.id))
+ return Response(status=200)
+ except InvalidProvider:
+ raise ResourceDoesNotExist
+ except DeserializationError as exc:
+ return Response(exc.errors, status=400)
diff --git a/src/sentry/flags/endpoints/logs.py b/src/sentry/flags/endpoints/logs.py
new file mode 100644
index 0000000000000..45dfeea606ec2
--- /dev/null
+++ b/src/sentry/flags/endpoints/logs.py
@@ -0,0 +1,95 @@
+from datetime import datetime
+from typing import Any, TypedDict
+
+from rest_framework.exceptions import ParseError
+from rest_framework.request import Request
+from rest_framework.response import Response
+
+# from sentry import features
+from sentry.api.api_owners import ApiOwner
+from sentry.api.api_publish_status import ApiPublishStatus
+from sentry.api.base import region_silo_endpoint
+from sentry.api.bases.organization import OrganizationEndpoint
+from sentry.api.exceptions import ResourceDoesNotExist
+from sentry.api.paginator import OffsetPaginator
+from sentry.api.serializers import Serializer, register, serialize
+from sentry.api.utils import get_date_range_from_params
+from sentry.flags.models import ActionEnum, CreatedByTypeEnum, FlagAuditLogModel
+from sentry.models.organization import Organization
+
+
+class FlagAuditLogModelSerializerResponse(TypedDict):
+ id: int
+ action: str
+ created_at: datetime
+ created_by: str
+ created_by_type: str
+ flag: str
+ tags: dict[str, Any]
+
+
+@register(FlagAuditLogModel)
+class FlagAuditLogModelSerializer(Serializer):
+ def serialize(self, obj, attrs, user, **kwargs) -> FlagAuditLogModelSerializerResponse:
+ return {
+ "id": obj.id,
+ "action": ActionEnum.to_string(obj.action),
+ "created_at": obj.created_at.isoformat(),
+ "created_by": obj.created_by,
+ "created_by_type": CreatedByTypeEnum.to_string(obj.created_by_type),
+ "flag": obj.flag,
+ "tags": obj.tags,
+ }
+
+
+@region_silo_endpoint
+class OrganizationFlagLogIndexEndpoint(OrganizationEndpoint):
+ owner = ApiOwner.FLAG
+ publish_status = {"GET": ApiPublishStatus.PRIVATE}
+
+ def get(self, request: Request, organization: Organization) -> Response:
+ # if not features.has("organizations:feature-flag-ui", organization, actor=request.user):
+ # raise ResourceDoesNotExist
+
+ start, end = get_date_range_from_params(request.GET)
+ if start is None or end is None:
+ raise ParseError(detail="Invalid date range")
+
+ queryset = FlagAuditLogModel.objects.filter(
+ created_at__gte=start,
+ created_at__lt=end,
+ organization_id=organization.id,
+ )
+
+ flags = request.GET.getlist("flag")
+ if flags:
+ queryset = queryset.filter(flag__in=flags)
+
+ return self.paginate(
+ request=request,
+ queryset=queryset,
+ on_results=lambda x: {
+ "data": serialize(x, request.user, FlagAuditLogModelSerializer())
+ },
+ paginator_cls=OffsetPaginator,
+ )
+
+
+@region_silo_endpoint
+class OrganizationFlagLogDetailsEndpoint(OrganizationEndpoint):
+ owner = ApiOwner.FLAG
+ publish_status = {"GET": ApiPublishStatus.PRIVATE}
+
+ def get(self, request: Request, organization: Organization, flag_log_id: int) -> Response:
+ # if not features.has("organizations:feature-flag-ui", organization, actor=request.user):
+ # raise ResourceDoesNotExist
+
+ try:
+ model = FlagAuditLogModel.objects.filter(
+ id=flag_log_id,
+ organization_id=organization.id,
+ ).get()
+ except FlagAuditLogModel.DoesNotExist:
+ raise ResourceDoesNotExist
+
+ return self.respond({"data": serialize(model, request.user, FlagAuditLogModelSerializer())})
diff --git a/src/sentry/flags/migrations/0001_add_flag_audit_log.py b/src/sentry/flags/migrations/0001_add_flag_audit_log.py
new file mode 100644
index 0000000000000..58e9e223174c8
--- /dev/null
+++ b/src/sentry/flags/migrations/0001_add_flag_audit_log.py
@@ -0,0 +1,58 @@
+# Generated by Django 5.1.1 on 2024-09-25 15:31
+
+import django.utils.timezone
+from django.db import migrations, models
+
+import sentry.db.models.fields.bounded
+import sentry.db.models.fields.hybrid_cloud_foreign_key
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ initial = True
+
+ dependencies = []
+
+ operations = [
+ migrations.CreateModel(
+ name="FlagAuditLogModel",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("action", models.PositiveSmallIntegerField()),
+ ("created_at", models.DateTimeField(default=django.utils.timezone.now)),
+ ("created_by", models.CharField(max_length=100)),
+ ("created_by_type", models.PositiveSmallIntegerField()),
+ ("flag", models.CharField(max_length=100)),
+ (
+ "organization_id",
+ sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
+ "sentry.Organization", db_index=True, on_delete="CASCADE"
+ ),
+ ),
+ ("tags", models.JSONField()),
+ ],
+ options={
+ "db_table": "flags_audit_log",
+ "indexes": [models.Index(fields=["flag"], name="flags_audit_flag_455822_idx")],
+ },
+ ),
+ ]
diff --git a/tests/sentry/api/validators/__init__.py b/src/sentry/flags/migrations/__init__.py
similarity index 100%
rename from tests/sentry/api/validators/__init__.py
rename to src/sentry/flags/migrations/__init__.py
diff --git a/src/sentry/flags/models.py b/src/sentry/flags/models.py
new file mode 100644
index 0000000000000..80852d7cc89e5
--- /dev/null
+++ b/src/sentry/flags/models.py
@@ -0,0 +1,85 @@
+from enum import Enum
+
+from django.db import models
+from django.utils import timezone
+
+from sentry.backup.scopes import RelocationScope
+from sentry.db.models import Model, region_silo_model, sane_repr
+from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
+
+
+class ActionEnum(Enum):
+ CREATED = 0
+ DELETED = 1
+ UPDATED = 2
+
+ @classmethod
+ def to_string(cls, integer):
+ if integer == 0:
+ return "created"
+ if integer == 1:
+ return "deleted"
+ if integer == 2:
+ return "updated"
+ raise ValueError
+
+
+ACTION_MAP = {
+ "created": ActionEnum.CREATED.value,
+ "deleted": ActionEnum.DELETED.value,
+ "updated": ActionEnum.UPDATED.value,
+}
+
+
+class CreatedByTypeEnum(Enum):
+ EMAIL = 0
+ ID = 1
+ NAME = 2
+
+ @classmethod
+ def to_string(cls, integer):
+ if integer == 0:
+ return "email"
+ if integer == 1:
+ return "id"
+ if integer == 2:
+ return "name"
+ raise ValueError
+
+
+CREATED_BY_TYPE_MAP = {
+ "email": CreatedByTypeEnum.EMAIL.value,
+ "id": CreatedByTypeEnum.ID.value,
+ "name": CreatedByTypeEnum.NAME.value,
+}
+
+
+@region_silo_model
+class FlagAuditLogModel(Model):
+ __relocation_scope__ = RelocationScope.Excluded
+
+ ACTION_TYPES = (
+ (ActionEnum.CREATED, "created"),
+ (ActionEnum.UPDATED, "updated"),
+ (ActionEnum.DELETED, "deleted"),
+ )
+ CREATED_BY_TYPE_TYPES = (
+ (CreatedByTypeEnum.EMAIL, "email"),
+ (CreatedByTypeEnum.NAME, "name"),
+ (CreatedByTypeEnum.ID, "id"),
+ )
+
+ action = models.PositiveSmallIntegerField(choices=ACTION_TYPES)
+ created_at = models.DateTimeField(default=timezone.now)
+ created_by = models.CharField(max_length=100)
+ created_by_type = models.PositiveSmallIntegerField(choices=CREATED_BY_TYPE_TYPES)
+ flag = models.CharField(max_length=100)
+ organization_id = HybridCloudForeignKey("sentry.Organization", null=False, on_delete="CASCADE")
+ tags = models.JSONField()
+
+ class Meta:
+ app_label = "flags"
+ db_table = "flags_audit_log"
+ indexes = (models.Index(fields=("flag",)),)
+
+ __repr__ = sane_repr("organization_id", "flag")
diff --git a/src/sentry/flags/providers.py b/src/sentry/flags/providers.py
new file mode 100644
index 0000000000000..32a96b105038a
--- /dev/null
+++ b/src/sentry/flags/providers.py
@@ -0,0 +1,89 @@
+import datetime
+from typing import Any, TypedDict
+
+from sentry.flags.models import ACTION_MAP, CREATED_BY_TYPE_MAP, FlagAuditLogModel
+from sentry.silo.base import SiloLimit
+
+
+def write(rows: list["FlagAuditLogRow"]) -> None:
+ try:
+ FlagAuditLogModel.objects.bulk_create(FlagAuditLogModel(**row) for row in rows)
+ except SiloLimit.AvailabilityError:
+ pass
+
+
+"""Provider definitions.
+
+Provider definitions are pure functions. They accept data and return data. Providers do not
+initiate any IO operations. Instead they return commands in the form of the return type or
+an exception. These commands inform the caller (the endpoint defintion) what IO must be
+emitted to satisfy the request. This is done primarily to improve testability and test
+performance but secondarily to allow easy extension of the endpoint without knowledge of
+the underlying systems.
+"""
+
+
+class FlagAuditLogRow(TypedDict):
+ """A complete flag audit log row instance."""
+
+ action: int
+ created_at: datetime.datetime
+ created_by: str
+ created_by_type: int
+ flag: str
+ organization_id: int
+ tags: dict[str, Any]
+
+
+class DeserializationError(Exception):
+ """The request body could not be deserialized."""
+
+ def __init__(self, errors):
+ self.errors = errors
+
+
+class InvalidProvider(Exception):
+ """An unsupported provider type was specified."""
+
+ ...
+
+
+def handle_provider_event(
+ provider: str,
+ request_data: dict[str, Any],
+ organization_id: int,
+) -> list[FlagAuditLogRow]:
+ raise InvalidProvider(provider)
+
+
+"""Internal flag-pole provider.
+
+Allows us to skip the HTTP endpoint.
+"""
+
+
+class FlagAuditLogItem(TypedDict):
+ """A simplified type which is easier to work with than the row definition."""
+
+ action: str
+ flag: str
+ created_at: datetime.datetime
+ created_by: str
+ tags: dict[str, str]
+
+
+def handle_flag_pole_event_internal(items: list[FlagAuditLogItem], organization_id: int) -> None:
+ write(
+ [
+ {
+ "action": ACTION_MAP[item["action"]],
+ "created_at": item["created_at"],
+ "created_by": item["created_by"],
+ "created_by_type": CREATED_BY_TYPE_MAP["name"],
+ "flag": item["flag"],
+ "organization_id": organization_id,
+ "tags": item["tags"],
+ }
+ for item in items
+ ]
+ )
diff --git a/src/sentry/grouping/api.py b/src/sentry/grouping/api.py
index a7cab08a0b657..19982875b99f3 100644
--- a/src/sentry/grouping/api.py
+++ b/src/sentry/grouping/api.py
@@ -26,7 +26,6 @@
ComponentVariant,
CustomFingerprintVariant,
FallbackVariant,
- KeyedVariants,
SaltedComponentVariant,
)
from sentry.models.grouphash import GroupHash
@@ -43,13 +42,14 @@
@dataclass
class GroupHashInfo:
config: GroupingConfig
+ variants: dict[str, BaseVariant]
hashes: list[str]
grouphashes: list[GroupHash]
existing_grouphash: GroupHash | None
NULL_GROUPING_CONFIG: GroupingConfig = {"id": "", "enhancements": ""}
-NULL_GROUPHASH_INFO = GroupHashInfo(NULL_GROUPING_CONFIG, [], [], None)
+NULL_GROUPHASH_INFO = GroupHashInfo(NULL_GROUPING_CONFIG, {}, [], [], None)
class GroupingConfigNotFound(LookupError):
@@ -234,7 +234,15 @@ def get_fingerprinting_config_for_project(
def apply_server_fingerprinting(event, config, allow_custom_title=True):
- client_fingerprint = event.get("fingerprint")
+ fingerprint_info = {}
+
+ client_fingerprint = event.get("fingerprint", [])
+ client_fingerprint_is_default = len(client_fingerprint) == 1 and is_default_fingerprint_var(
+ client_fingerprint[0]
+ )
+ if client_fingerprint and not client_fingerprint_is_default:
+ fingerprint_info["client_fingerprint"] = client_fingerprint
+
rv = config.get_fingerprint_values_for_event(event)
if rv is not None:
rule, new_fingerprint, attributes = rv
@@ -247,13 +255,10 @@ def apply_server_fingerprinting(event, config, allow_custom_title=True):
# Persist the rule that matched with the fingerprint in the event
# dictionary for later debugging.
- event["_fingerprint_info"] = {
- "client_fingerprint": client_fingerprint,
- "matched_rule": rule.to_json(),
- }
+ fingerprint_info["matched_rule"] = rule.to_json()
- if rule.is_builtin:
- event["_fingerprint_info"]["is_builtin"] = True
+ if fingerprint_info:
+ event["_fingerprint_info"] = fingerprint_info
def _get_calculated_grouping_variants_for_event(
@@ -345,7 +350,7 @@ def get_grouping_variants_for_event(
rv[key] = ComponentVariant(component, context.config)
fingerprint = resolve_fingerprint_values(fingerprint, event.data)
- if fingerprint_info and fingerprint_info.get("is_builtin", False):
+ if (fingerprint_info or {}).get("matched_rule", {}).get("is_builtin") is True:
rv["built-in-fingerprint"] = BuiltInFingerprintVariant(fingerprint, fingerprint_info)
else:
rv["custom-fingerprint"] = CustomFingerprintVariant(fingerprint, fingerprint_info)
@@ -370,18 +375,3 @@ def get_grouping_variants_for_event(
rv["fallback"] = FallbackVariant()
return rv
-
-
-def sort_grouping_variants(variants: dict[str, BaseVariant]) -> KeyedVariants:
- """Sort a sequence of variants into flat variants"""
-
- flat_variants = []
-
- for name, variant in variants.items():
- flat_variants.append((name, variant))
-
- # Sort system variant to the back of the list to resolve ambiguities when
- # choosing primary_hash for Snuba
- flat_variants.sort(key=lambda name_and_variant: 1 if name_and_variant[0] == "system" else 0)
-
- return flat_variants
diff --git a/src/sentry/grouping/component.py b/src/sentry/grouping/component.py
index 61c89ba39c33e..4269c74be4dcf 100644
--- a/src/sentry/grouping/component.py
+++ b/src/sentry/grouping/component.py
@@ -19,6 +19,7 @@
"violation": "violation",
"uri": "URL",
"message": "message",
+ "template": "template",
}
diff --git a/src/sentry/grouping/fingerprinting/__init__.py b/src/sentry/grouping/fingerprinting/__init__.py
index 36ebefdfa2e51..8523ffa587cf6 100644
--- a/src/sentry/grouping/fingerprinting/__init__.py
+++ b/src/sentry/grouping/fingerprinting/__init__.py
@@ -245,7 +245,7 @@ def get_fingerprint_values_for_event(self, event: dict[str, object]) -> None | o
def _from_config_structure(
cls, data: dict[str, Any], bases: Sequence[str] | None = None
) -> Self:
- version = data["version"]
+ version = data.get("version", VERSION)
if version != VERSION:
raise ValueError("Unknown version")
return cls(
diff --git a/src/sentry/grouping/ingest/config.py b/src/sentry/grouping/ingest/config.py
index fc7a4741151c5..c620c8f559e63 100644
--- a/src/sentry/grouping/ingest/config.py
+++ b/src/sentry/grouping/ingest/config.py
@@ -8,7 +8,7 @@
from django.conf import settings
from django.core.cache import cache
-from sentry import features, options
+from sentry import options
from sentry.grouping.strategies.configurations import CONFIGURATIONS
from sentry.locks import locks
from sentry.models.project import Project
@@ -23,11 +23,6 @@
CONFIGS_TO_DEPRECATE = ()
-# Used by getsentry script. Remove it once the script has been updated to call update_grouping_config_if_needed
-def update_grouping_config_if_permitted(project: Project) -> None:
- update_grouping_config_if_needed(project, "script")
-
-
def update_grouping_config_if_needed(project: Project, source: str) -> None:
current_config = project.get_option("sentry:grouping_config")
new_config = DEFAULT_GROUPING_CONFIG
@@ -53,8 +48,8 @@ def update_grouping_config_if_needed(project: Project, source: str) -> None:
from sentry import audit_log
from sentry.utils.audit import create_system_audit_entry
- # This is when we will stop calculating both old hashes (which we do in an effort to
- # preserve group continuity).
+ # This is when we will stop calculating the old hash in cases where we don't find the new
+ # hash (which we do in an effort to preserve group continuity).
expiry = int(time.time()) + settings.SENTRY_GROUPING_UPDATE_MIGRATION_PHASE
changes: dict[str, str | int] = {"sentry:grouping_config": new_config}
@@ -88,18 +83,3 @@ def is_in_transition(project: Project) -> bool:
secondary_grouping_expiry = project.get_option("sentry:secondary_grouping_expiry")
return bool(secondary_grouping_config) and (secondary_grouping_expiry or 0) >= time.time()
-
-
-def project_uses_optimized_grouping(project: Project) -> bool:
- if options.get("grouping.config_transition.killswitch_enabled"):
- return False
-
- return (
- features.has(
- "organizations:grouping-suppress-unnecessary-secondary-hash",
- project.organization,
- )
- or (is_in_transition(project))
- # TODO: Yes, this is everyone - this check will soon be removed entirely
- or project.id % 5 < 5 # 100% of all non-transition projects
- )
diff --git a/src/sentry/grouping/ingest/hashing.py b/src/sentry/grouping/ingest/hashing.py
index 681493d5151ed..3c7f977e9af89 100644
--- a/src/sentry/grouping/ingest/hashing.py
+++ b/src/sentry/grouping/ingest/hashing.py
@@ -14,7 +14,6 @@
NULL_GROUPING_CONFIG,
BackgroundGroupingConfigLoader,
GroupingConfig,
- GroupingConfigNotFound,
SecondaryGroupingConfigLoader,
apply_server_fingerprinting,
get_fingerprinting_config_for_project,
@@ -22,7 +21,7 @@
load_grouping_config,
)
from sentry.grouping.ingest.config import is_in_transition
-from sentry.grouping.ingest.metrics import record_hash_calculation_metrics
+from sentry.grouping.variants import BaseVariant
from sentry.models.grouphash import GroupHash
from sentry.models.grouphashmetadata import GroupHashMetadata
from sentry.models.project import Project
@@ -39,10 +38,10 @@
def _calculate_event_grouping(
project: Project, event: Event, grouping_config: GroupingConfig
-) -> list[str]:
+) -> tuple[list[str], dict[str, BaseVariant]]:
"""
- Main entrypoint for modifying/enhancing and grouping an event, writes
- hashes back into event payload.
+ Calculate hashes for the event using the given grouping config, add them to the event data, and
+ return them, along with the variants data upon which they're based.
"""
metric_tags: MutableTags = {
"grouping_config": grouping_config["id"],
@@ -61,7 +60,7 @@ def _calculate_event_grouping(
# The active grouping config was put into the event in the
# normalize step before. We now also make sure that the
# fingerprint was set to `'{{ default }}' just in case someone
- # removed it from the payload. The call to get_hashes will then
+ # removed it from the payload. The call to `get_hashes_and_variants` will then
# look at `grouping_config` to pick the right parameters.
event.data["fingerprint"] = event.data.data.get("fingerprint") or ["{{ default }}"]
apply_server_fingerprinting(
@@ -71,18 +70,9 @@ def _calculate_event_grouping(
)
with metrics.timer("event_manager.event.get_hashes", tags=metric_tags):
- # TODO: It's not clear we can even hit `GroupingConfigNotFound` here - this is leftover
- # from a time before we started separately retrieving the grouping config and passing it
- # directly to `get_hashes`. Now that we do that, a bogus config will get replaced by the
- # default long before we get here. Should we consolidate bogus config handling into the
- # code actually getting the config?
- try:
- hashes = event.get_hashes(loaded_grouping_config)
- except GroupingConfigNotFound:
- event.data["grouping_config"] = get_grouping_config_dict_for_project(project)
- hashes = event.get_hashes()
+ hashes, variants = event.get_hashes_and_variants(loaded_grouping_config)
- return hashes
+ return (hashes, variants)
def maybe_run_background_grouping(project: Project, job: Job) -> None:
@@ -111,12 +101,12 @@ def _calculate_background_grouping(
"sdk": normalized_sdk_tag_from_event(event.data),
}
with metrics.timer("event_manager.background_grouping", tags=metric_tags):
- return _calculate_event_grouping(project, event, config)
+ return _calculate_event_grouping(project, event, config)[0]
def maybe_run_secondary_grouping(
project: Project, job: Job, metric_tags: MutableTags
-) -> tuple[GroupingConfig, list[str]]:
+) -> tuple[GroupingConfig, list[str], dict[str, BaseVariant]]:
"""
If the projct is in a grouping config transition phase, calculate a set of secondary hashes for
the job's event.
@@ -130,27 +120,29 @@ def maybe_run_secondary_grouping(
secondary_grouping_config = SecondaryGroupingConfigLoader().get_config_dict(project)
secondary_hashes = _calculate_secondary_hashes(project, job, secondary_grouping_config)
- return (secondary_grouping_config, secondary_hashes)
+ # Return an empty variants dictionary because we need the signature of this function to match
+ # that of `run_primary_grouping` (so we have to return something), but we don't ever actually
+ # need the variant information
+ return (secondary_grouping_config, secondary_hashes, {})
def _calculate_secondary_hashes(
project: Project, job: Job, secondary_grouping_config: GroupingConfig
) -> list[str]:
- """Calculate secondary hash for event using a fallback grouping config for a period of time.
- This happens when we upgrade all projects that have not opted-out to automatic upgrades plus
- when the customer changes the grouping config.
- This causes extra load in save_event processing.
"""
- secondary_hashes = []
+ Calculate hashes based on an older grouping config, so that unknown hashes calculated by the
+ current config can be matched to an existing group if there is one.
+ """
+ secondary_hashes: list[str] = []
try:
with sentry_sdk.start_span(
op="event_manager",
- description="event_manager.save.secondary_calculate_event_grouping",
+ name="event_manager.save.secondary_calculate_event_grouping",
):
# create a copy since `_calculate_event_grouping` modifies the event to add all sorts
- # of grouping info and we don't want the backup grouping data in there
+ # of grouping info and we don't want the secondary grouping data in there
event_copy = copy.deepcopy(job["event"])
- secondary_hashes = _calculate_event_grouping(
+ secondary_hashes, _ = _calculate_event_grouping(
project, event_copy, secondary_grouping_config
)
except Exception as err:
@@ -161,9 +153,9 @@ def _calculate_secondary_hashes(
def run_primary_grouping(
project: Project, job: Job, metric_tags: MutableTags
-) -> tuple[GroupingConfig, list[str]]:
+) -> tuple[GroupingConfig, list[str], dict[str, BaseVariant]]:
"""
- Get the primary grouping config and primary hashes for the event.
+ Get the primary grouping config, primary hashes, and variants for the event.
"""
with metrics.timer("event_manager.load_grouping_config"):
grouping_config = get_grouping_config_dict_for_project(project)
@@ -172,29 +164,33 @@ def run_primary_grouping(
with (
sentry_sdk.start_span(
op="event_manager",
- description="event_manager.save.calculate_event_grouping",
+ name="event_manager.save.calculate_event_grouping",
),
metrics.timer("event_manager.calculate_event_grouping", tags=metric_tags),
):
- hashes = _calculate_primary_hashes(project, job, grouping_config)
+ hashes, variants = _calculate_primary_hashes_and_variants(project, job, grouping_config)
- return (grouping_config, hashes)
+ return (grouping_config, hashes, variants)
-def _calculate_primary_hashes(
+def _calculate_primary_hashes_and_variants(
project: Project, job: Job, grouping_config: GroupingConfig
-) -> list[str]:
+) -> tuple[list[str], dict[str, BaseVariant]]:
"""
- Get the primary hash for the event.
+ Get the primary hash and variants for the event.
This is pulled out into a separate function mostly in order to make testing easier.
"""
return _calculate_event_grouping(project, job["event"], grouping_config)
-def find_existing_grouphash(
+def find_grouphash_with_group(
grouphashes: Sequence[GroupHash],
) -> GroupHash | None:
+ """
+ Search in the list of given `GroupHash` records for one which has a group assigned to it, and
+ return the first one found. (Assumes grouphashes have already been sorted in priority order.)
+ """
for group_hash in grouphashes:
if group_hash.group_id is not None:
return group_hash
@@ -212,47 +208,38 @@ def find_existing_grouphash(
return None
-def get_hash_values(
- project: Project,
- job: Job,
- metric_tags: MutableTags,
-) -> tuple[list[str], list[str]]:
- # Background grouping is a way for us to get performance metrics for a new
- # config without having it actually affect on how events are grouped. It runs
- # either before or after the main grouping logic, depending on the option value.
- maybe_run_background_grouping(project, job)
-
- secondary_grouping_config, secondary_hashes = maybe_run_secondary_grouping(
- project, job, metric_tags
- )
-
- primary_grouping_config, primary_hashes = run_primary_grouping(project, job, metric_tags)
-
- record_hash_calculation_metrics(
- primary_grouping_config,
- primary_hashes,
- secondary_grouping_config,
- secondary_hashes,
- )
-
- return (primary_hashes, secondary_hashes)
-
+def get_or_create_grouphashes(
+ project: Project, hashes: Sequence[str], grouping_config: str
+) -> list[GroupHash]:
+ is_secondary = grouping_config != project.get_option("sentry:grouping_config")
+ grouphashes: list[GroupHash] = []
-def get_or_create_grouphashes(project: Project, hashes: Sequence[str]) -> list[GroupHash]:
- grouphashes = []
+ # The only utility of secondary hashes is to link new primary hashes to an existing group.
+ # Secondary hashes which are also new are therefore of no value, so there's no need to store or
+ # annotate them and we can bail now.
+ if is_secondary and not GroupHash.objects.filter(project=project, hash__in=hashes).exists():
+ return grouphashes
for hash_value in hashes:
grouphash, created = GroupHash.objects.get_or_create(project=project, hash=hash_value)
# TODO: Do we want to expand this to backfill metadata for existing grouphashes? If we do,
# we'll have to override the metadata creation date for them.
- if (
- created
- and options.get("grouping.grouphash_metadata.ingestion_writes_enabled")
- and features.has("organizations:grouphash-metadata-creation", project.organization)
+ if options.get("grouping.grouphash_metadata.ingestion_writes_enabled") and features.has(
+ "organizations:grouphash-metadata-creation", project.organization
):
- # For now, this just creates a record with a creation timestamp
- GroupHashMetadata.objects.create(grouphash=grouphash)
+ if created:
+ GroupHashMetadata.objects.create(
+ grouphash=grouphash,
+ latest_grouping_config=grouping_config,
+ )
+ elif (
+ grouphash.metadata and grouphash.metadata.latest_grouping_config != grouping_config
+ ):
+ # Keep track of the most recent config which computed this hash, so that once a
+ # config is deprecated, we can clear out the GroupHash records which are no longer
+ # being produced
+ grouphash.metadata.update(latest_grouping_config=grouping_config)
grouphashes.append(grouphash)
diff --git a/src/sentry/grouping/ingest/metrics.py b/src/sentry/grouping/ingest/metrics.py
index deabbb9184980..4f76adf9340fa 100644
--- a/src/sentry/grouping/ingest/metrics.py
+++ b/src/sentry/grouping/ingest/metrics.py
@@ -6,7 +6,7 @@
from sentry import options
from sentry.grouping.api import GroupingConfig
-from sentry.grouping.ingest.config import is_in_transition, project_uses_optimized_grouping
+from sentry.grouping.ingest.config import is_in_transition
from sentry.models.project import Project
from sentry.utils import metrics
from sentry.utils.tag_normalization import normalized_sdk_tag_from_event
@@ -20,15 +20,19 @@
def record_hash_calculation_metrics(
+ project: Project,
primary_config: GroupingConfig,
primary_hashes: list[str],
secondary_config: GroupingConfig,
secondary_hashes: list[str],
+ existing_hash_search_result: str,
) -> None:
has_secondary_hashes = len(secondary_hashes) > 0
+ # In cases where we've computed both primary and secondary hashes, track how often the config
+ # change has changed the resulting hashes
if has_secondary_hashes:
- tags = {
+ hash_comparison_tags = {
"primary_config": primary_config["id"],
"secondary_config": secondary_config["id"],
}
@@ -37,47 +41,36 @@ def record_hash_calculation_metrics(
hashes_match = current_values == secondary_values
if hashes_match:
- tags["result"] = "no change"
+ hash_comparison_tags["result"] = "no change"
else:
shared_hashes = set(current_values) & set(secondary_values)
if len(shared_hashes) > 0:
- tags["result"] = "partial change"
+ hash_comparison_tags["result"] = "partial change"
else:
- tags["result"] = "full change"
+ hash_comparison_tags["result"] = "full change"
metrics.incr(
"grouping.hash_comparison",
sample_rate=options.get("grouping.config_transition.metrics_sample_rate"),
- tags=tags,
+ tags=hash_comparison_tags,
)
-
-# TODO: Once the legacy `_save_aggregate` goes away, this logic can be pulled into
-# `record_hash_calculation_metrics`. Right now it's split up because we don't know the value for
-# `result` at the time the legacy `_save_aggregate` (indirectly) calls `record_hash_calculation_metrics`
-def record_calculation_metric_with_result(
- project: Project,
- has_secondary_hashes: bool,
- result: str,
-) -> None:
-
# Track the total number of grouping calculations done overall, so we can divide by the
# count to get an average number of calculations per event
- tags = {
+ num_calculations_tags = {
"in_transition": str(is_in_transition(project)),
- "using_transition_optimization": str(project_uses_optimized_grouping(project)),
- "result": result,
+ "result": existing_hash_search_result,
}
metrics.incr(
"grouping.event_hashes_calculated",
sample_rate=options.get("grouping.config_transition.metrics_sample_rate"),
- tags=tags,
+ tags=num_calculations_tags,
)
metrics.incr(
"grouping.total_calculations",
amount=2 if has_secondary_hashes else 1,
sample_rate=options.get("grouping.config_transition.metrics_sample_rate"),
- tags=tags,
+ tags=num_calculations_tags,
)
diff --git a/src/sentry/grouping/ingest/seer.py b/src/sentry/grouping/ingest/seer.py
index b03e93f34545b..7887d893fece7 100644
--- a/src/sentry/grouping/ingest/seer.py
+++ b/src/sentry/grouping/ingest/seer.py
@@ -10,6 +10,7 @@
from sentry.conf.server import SEER_SIMILARITY_MODEL_VERSION
from sentry.eventstore.models import Event
from sentry.grouping.grouping_info import get_grouping_info_from_variants
+from sentry.grouping.variants import BaseVariant
from sentry.models.grouphash import GroupHash
from sentry.models.project import Project
from sentry.seer.similarity.similar_issues import get_similarity_data_from_seer
@@ -27,7 +28,7 @@
logger = logging.getLogger("sentry.events.grouping")
-def should_call_seer_for_grouping(event: Event) -> bool:
+def should_call_seer_for_grouping(event: Event, variants: dict[str, BaseVariant]) -> bool:
"""
Use event content, feature flags, rate limits, killswitches, seer health, etc. to determine
whether a call to Seer should be made.
@@ -42,7 +43,7 @@ def should_call_seer_for_grouping(event: Event) -> bool:
return False
if (
- _has_customized_fingerprint(event)
+ _has_customized_fingerprint(event, variants)
or killswitch_enabled(project.id, event)
or _circuit_breaker_broken(event, project)
# **Do not add any new checks after this.** The rate limit check MUST remain the last of all
@@ -79,7 +80,7 @@ def _project_has_similarity_grouping_enabled(project: Project) -> bool:
# combined with some other value). To the extent to which we're then using this function to decide
# whether or not to call Seer, this means that the calculations giving rise to the default part of
# the value never involve Seer input. In the long run, we probably want to change that.
-def _has_customized_fingerprint(event: Event) -> bool:
+def _has_customized_fingerprint(event: Event, variants: dict[str, BaseVariant]) -> bool:
fingerprint = event.data.get("fingerprint", [])
if "{{ default }}" in fingerprint:
@@ -97,7 +98,6 @@ def _has_customized_fingerprint(event: Event) -> bool:
return True
# Fully customized fingerprint (from either us or the user)
- variants = event.get_grouping_variants()
fingerprint_variant = variants.get("custom-fingerprint") or variants.get("built-in-fingerprint")
if fingerprint_variant:
@@ -178,6 +178,7 @@ def _circuit_breaker_broken(event: Event, project: Project) -> bool:
def get_seer_similar_issues(
event: Event,
+ variants: dict[str, BaseVariant],
num_neighbors: int = 1,
) -> tuple[dict[str, Any], GroupHash | None]:
"""
@@ -186,9 +187,7 @@ def get_seer_similar_issues(
should go in (if any), or None if no neighbor was near enough.
"""
event_hash = event.get_primary_hash()
- stacktrace_string = get_stacktrace_string(
- get_grouping_info_from_variants(event.get_grouping_variants())
- )
+ stacktrace_string = get_stacktrace_string(get_grouping_info_from_variants(variants))
exception_type = get_path(event.data, "exception", "values", -1, "type")
request_data: SimilarIssuesEmbeddingsRequest = {
@@ -231,25 +230,60 @@ def get_seer_similar_issues(
return (similar_issues_metadata, parent_grouphash)
-def maybe_check_seer_for_matching_grouphash(event: Event) -> GroupHash | None:
+def maybe_check_seer_for_matching_grouphash(
+ event: Event, variants: dict[str, BaseVariant], all_grouphashes: list[GroupHash]
+) -> GroupHash | None:
seer_matched_grouphash = None
- if should_call_seer_for_grouping(event):
+ if should_call_seer_for_grouping(event, variants):
metrics.incr(
"grouping.similarity.did_call_seer",
sample_rate=options.get("seer.similarity.metrics_sample_rate"),
tags={"call_made": True, "blocker": "none"},
)
+
try:
# If no matching group is found in Seer, we'll still get back result
# metadata, but `seer_matched_grouphash` will be None
- seer_response_data, seer_matched_grouphash = get_seer_similar_issues(event)
- event.data["seer_similarity"] = seer_response_data
-
- # Insurance - in theory we shouldn't ever land here
- except Exception as e:
+ seer_response_data, seer_matched_grouphash = get_seer_similar_issues(event, variants)
+ except Exception as e: # Insurance - in theory we shouldn't ever land here
sentry_sdk.capture_exception(
e, tags={"event": event.event_id, "project": event.project.id}
)
+ return None
+
+ # Find the GroupHash corresponding to the hash value sent to Seer
+ #
+ # TODO: There shouldn't actually be more than one hash in `all_grouphashes`, but
+ # a) there's a bug in our precedence logic which leads both in-app and system stacktrace
+ # hashes being marked as contributing and making it through to this point, and
+ # b) because of how we used to compute secondary and primary hashes, we keep secondary
+ # hashes even when we don't need them.
+ # Once those two problems are fixed, there will only be one hash passed to this function
+ # and we won't have to do this search to find the right one to update.
+ primary_hash = event.get_primary_hash()
+ grouphash_sent = list(
+ filter(lambda grouphash: grouphash.hash == primary_hash, all_grouphashes)
+ )[0]
+
+ # Update the relevant GroupHash with Seer results
+ gh_metadata = grouphash_sent.metadata
+ if gh_metadata:
+ gh_metadata.update(
+ # Technically the time of the metadata record creation and the time of the Seer
+ # request will be some milliseconds apart, but a) the difference isn't meaningful
+ # for us, and b) forcing them to be the same (rather than just close) lets us use
+ # their equality as a signal that the Seer call happened during ingest rather than
+ # during a backfill, without having to store that information separately.
+ seer_date_sent=gh_metadata.date_added,
+ seer_event_sent=event.event_id,
+ seer_model=seer_response_data["similarity_model_version"],
+ seer_matched_grouphash=seer_matched_grouphash,
+ seer_match_distance=(
+ seer_response_data["results"][0]["stacktrace_distance"]
+ if seer_matched_grouphash
+ else None
+ ),
+ )
return seer_matched_grouphash
diff --git a/src/sentry/grouping/ingest/utils.py b/src/sentry/grouping/ingest/utils.py
index f18049ca8ce46..5e7c3261650f7 100644
--- a/src/sentry/grouping/ingest/utils.py
+++ b/src/sentry/grouping/ingest/utils.py
@@ -48,7 +48,7 @@ def check_for_group_creation_load_shed(project: Project, event: Event) -> None:
raise HashDiscarded("Load shedding group creation", reason="load_shed")
-def check_for_category_mismatch(group: Group) -> bool:
+def is_non_error_type_group(group: Group) -> bool:
"""
Make sure an error event hasn't hashed to a value assigned to a non-error-type group
"""
diff --git a/src/sentry/grouping/variants.py b/src/sentry/grouping/variants.py
index 261d3ba8fdf56..402735122b97a 100644
--- a/src/sentry/grouping/variants.py
+++ b/src/sentry/grouping/variants.py
@@ -58,7 +58,7 @@ def get_hash(self) -> str | None:
class FallbackVariant(BaseVariant):
- id = "fallback"
+ type = "fallback"
contributes = True
def get_hash(self) -> str | None:
diff --git a/src/sentry/hybridcloud/outbox/base.py b/src/sentry/hybridcloud/outbox/base.py
index addc0f8f859ee..abc4b3144bf9c 100644
--- a/src/sentry/hybridcloud/outbox/base.py
+++ b/src/sentry/hybridcloud/outbox/base.py
@@ -2,7 +2,7 @@
import contextlib
import logging
-from collections.abc import Collection, Generator, Iterable, Mapping, Sequence
+from collections.abc import Collection, Generator, Iterable, Mapping
from typing import TYPE_CHECKING, Any, Protocol, TypeVar
from django.db import connections, router, transaction
@@ -113,7 +113,7 @@ def bulk_create(self, objs: Iterable[_RM], *args: Any, **kwds: Any) -> list[_RM]
return super().bulk_create(tuple_of_objs, *args, **kwds)
def bulk_update(
- self, objs: Iterable[_RM], fields: Sequence[str], *args: Any, **kwds: Any
+ self, objs: Iterable[_RM], fields: Iterable[str], *args: Any, **kwds: Any
) -> Any:
from sentry.hybridcloud.models.outbox import outbox_context
@@ -297,7 +297,7 @@ def bulk_create(self, objs: Iterable[_CM], *args: Any, **kwds: Any) -> list[_CM]
return super().bulk_create(tuple_of_objs, *args, **kwds)
def bulk_update(
- self, objs: Iterable[_CM], fields: Sequence[str], *args: Any, **kwds: Any
+ self, objs: Iterable[_CM], fields: Iterable[str], *args: Any, **kwds: Any
) -> Any:
from sentry.hybridcloud.models.outbox import outbox_context
diff --git a/src/sentry/hybridcloud/rpc/pagination.py b/src/sentry/hybridcloud/rpc/pagination.py
index 5674ef6356053..9579b950a7c57 100644
--- a/src/sentry/hybridcloud/rpc/pagination.py
+++ b/src/sentry/hybridcloud/rpc/pagination.py
@@ -46,7 +46,7 @@ def do_hybrid_cloud_pagination(
cursor = get_cursor(self.encoded_cursor, cursor_cls)
with sentry_sdk.start_span(
op="hybrid_cloud.paginate.get_result",
- description=description,
+ name=description,
) as span:
annotate_span_with_pagination_args(span, self.per_page)
paginator = get_paginator(
diff --git a/src/sentry/hybridcloud/rpc/service.py b/src/sentry/hybridcloud/rpc/service.py
index 3b85e7f332c28..d630c0ca9862e 100644
--- a/src/sentry/hybridcloud/rpc/service.py
+++ b/src/sentry/hybridcloud/rpc/service.py
@@ -586,7 +586,7 @@ def _open_request_context(self) -> Generator[None]:
timer = metrics.timer("hybrid_cloud.dispatch_rpc.duration", tags=self._metrics_tags())
span = sentry_sdk.start_span(
op="hybrid_cloud.dispatch_rpc",
- description=f"rpc to {self.service_name}.{self.method_name}",
+ name=f"rpc to {self.service_name}.{self.method_name}",
)
with span, timer:
yield
diff --git a/src/sentry/identity/bitbucket/provider.py b/src/sentry/identity/bitbucket/provider.py
index 608c410fa8d67..ff01318f8c701 100644
--- a/src/sentry/identity/bitbucket/provider.py
+++ b/src/sentry/identity/bitbucket/provider.py
@@ -18,10 +18,21 @@ def get_pipeline_views(self):
class BitbucketLoginView(PipelineView):
def dispatch(self, request: Request, pipeline) -> HttpResponse:
- jwt = request.GET.get("jwt")
- if jwt is None:
- return self.redirect(
- "https://bitbucket.org/site/addons/authorize?descriptor_uri=%s"
- % (absolute_uri("/extensions/bitbucket/descriptor/"),)
- )
- return pipeline.next_step()
+ from sentry.integrations.base import IntegrationDomain
+ from sentry.integrations.utils.metrics import (
+ IntegrationPipelineViewEvent,
+ IntegrationPipelineViewType,
+ )
+
+ with IntegrationPipelineViewEvent(
+ IntegrationPipelineViewType.IDENTITY_LINK,
+ IntegrationDomain.SOURCE_CODE_MANAGEMENT,
+ pipeline.provider.key,
+ ).capture():
+ jwt = request.GET.get("jwt")
+ if jwt is None:
+ return self.redirect(
+ "https://bitbucket.org/site/addons/authorize?descriptor_uri=%s"
+ % (absolute_uri("/extensions/bitbucket/descriptor/"),)
+ )
+ return pipeline.next_step()
diff --git a/src/sentry/identity/pipeline.py b/src/sentry/identity/pipeline.py
index c4c577ed32295..ae651bba12ac4 100644
--- a/src/sentry/identity/pipeline.py
+++ b/src/sentry/identity/pipeline.py
@@ -5,7 +5,12 @@
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
-from sentry import features
+from sentry import features, options
+from sentry.integrations.base import IntegrationDomain
+from sentry.integrations.utils.metrics import (
+ IntegrationPipelineViewEvent,
+ IntegrationPipelineViewType,
+)
from sentry.models.organization import Organization
from sentry.organizations.services.organization.model import RpcOrganization
from sentry.pipeline import Pipeline, PipelineProvider
@@ -46,41 +51,49 @@ def get_provider(self, provider_key: str, **kwargs) -> PipelineProvider:
"organizations:migrate-azure-devops-integration", organization
):
provider_key = "vsts_new"
+ # TODO(iamrajjoshi): Delete this after Azure DevOps migration is complete
+ if provider_key == "vsts_login" and options.get("vsts.social-auth-migration"):
+ provider_key = "vsts_login_new"
return super().get_provider(provider_key)
def finish_pipeline(self):
- # NOTE: only reached in the case of linking a new identity
- # via Social Auth pipelines
- identity = self.provider.build_identity(self.state.data)
-
- Identity.objects.link_identity(
- user=self.request.user,
- idp=self.provider_model,
- external_id=identity["id"],
- should_reattach=False,
- defaults={
- "scopes": identity.get("scopes", []),
- "data": identity.get("data", {}),
- },
- )
-
- messages.add_message(
- self.request,
- messages.SUCCESS,
- IDENTITY_LINKED.format(identity_provider=self.provider.name),
- )
- metrics.incr(
- "identity_provider_pipeline.finish_pipeline",
- tags={
- "provider": self.provider.key,
- },
- skip_internal=False,
- )
-
- self.state.clear()
-
- # TODO(epurkhiser): When we have more identities and have built out an
- # identity management page that supports these new identities (not
- # social-auth ones), redirect to the identities page.
- return HttpResponseRedirect(reverse("sentry-account-settings"))
+ with IntegrationPipelineViewEvent(
+ IntegrationPipelineViewType.IDENTITY_LINK,
+ IntegrationDomain.IDENTITY,
+ self.provider.key,
+ ).capture():
+ # NOTE: only reached in the case of linking a new identity
+ # via Social Auth pipelines
+ identity = self.provider.build_identity(self.state.data)
+
+ Identity.objects.link_identity(
+ user=self.request.user,
+ idp=self.provider_model,
+ external_id=identity["id"],
+ should_reattach=False,
+ defaults={
+ "scopes": identity.get("scopes", []),
+ "data": identity.get("data", {}),
+ },
+ )
+
+ messages.add_message(
+ self.request,
+ messages.SUCCESS,
+ IDENTITY_LINKED.format(identity_provider=self.provider.name),
+ )
+ metrics.incr(
+ "identity_provider_pipeline.finish_pipeline",
+ tags={
+ "provider": self.provider.key,
+ },
+ skip_internal=False,
+ )
+
+ self.state.clear()
+
+ # TODO(epurkhiser): When we have more identities and have built out an
+ # identity management page that supports these new identities (not
+ # social-auth ones), redirect to the identities page.
+ return HttpResponseRedirect(reverse("sentry-account-settings"))
diff --git a/src/sentry/incidents/action_handlers.py b/src/sentry/incidents/action_handlers.py
index 30b6260a3a5a1..190ffdaebea09 100644
--- a/src/sentry/incidents/action_handlers.py
+++ b/src/sentry/incidents/action_handlers.py
@@ -370,7 +370,7 @@ def generate_incident_trigger_email_context(
threshold: None | str | float = None
if alert_rule.detection_type == AlertRuleDetectionType.DYNAMIC:
threshold_prefix_string = alert_rule.detection_type.title()
- threshold = f"({alert_rule.sensitivity} sensitivity)"
+ threshold = f"({alert_rule.sensitivity} responsiveness)"
alert_link_params["type"] = "anomaly_detection"
else:
threshold_prefix_string = ">" if show_greater_than_string else "<"
diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_anomalies.py b/src/sentry/incidents/endpoints/organization_alert_rule_anomalies.py
index 57f754ba10273..f3d48c9d779ae 100644
--- a/src/sentry/incidents/endpoints/organization_alert_rule_anomalies.py
+++ b/src/sentry/incidents/endpoints/organization_alert_rule_anomalies.py
@@ -52,7 +52,9 @@ def get(self, request: Request, organization: Organization, alert_rule: AlertRul
"""
Return a list of anomalies for a metric alert rule.
"""
- if not features.has("organizations:anomaly-detection-alerts", organization):
+ if not features.has(
+ "organizations:anomaly-detection-alerts", organization
+ ) and not features.has("organizations:anomaly-detection-rollout", organization):
raise ResourceDoesNotExist("Your organization does not have access to this feature.")
# NOTE: this will break if we ever do more than one project per alert rule
diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_index.py b/src/sentry/incidents/endpoints/organization_alert_rule_index.py
index 91178a4eeed08..e7bae6fd35570 100644
--- a/src/sentry/incidents/endpoints/organization_alert_rule_index.py
+++ b/src/sentry/incidents/endpoints/organization_alert_rule_index.py
@@ -54,6 +54,7 @@
from sentry.sentry_apps.services.app import app_service
from sentry.signals import alert_rule_created
from sentry.snuba.dataset import Dataset
+from sentry.snuba.models import SnubaQuery
from sentry.uptime.models import (
ProjectUptimeSubscription,
ProjectUptimeSubscriptionMode,
@@ -121,6 +122,13 @@ def create_metric_alert(
if not serializer.is_valid():
raise ValidationError(serializer.errors)
+ # if there are no triggers, then the serializer will raise an error
+ for trigger in data["triggers"]:
+ if not trigger.get("actions", []):
+ raise ValidationError(
+ "Each trigger must have an associated action for this alert to fire."
+ )
+
trigger_sentry_app_action_creators_for_incidents(serializer.validated_data)
if get_slack_actions_with_async_lookups(organization, request.user, request.data):
# need to kick off an async job for Slack
@@ -152,9 +160,16 @@ def create_metric_alert(
is_api_token=request.auth is not None,
duplicate_rule=duplicate_rule,
wizard_v3=wizard_v3,
+ query_type=self.get_query_type_description(data.get("queryType", None)),
)
return Response(serialize(alert_rule, request.user), status=status.HTTP_201_CREATED)
+ def get_query_type_description(self, value):
+ try:
+ return SnubaQuery.Type(value).name
+ except ValueError:
+ return "Unknown"
+
@region_silo_endpoint
class OrganizationCombinedRuleIndexEndpoint(OrganizationEndpoint):
@@ -217,9 +232,6 @@ def get(self, request: Request, organization) -> Response:
),
)
- if not features.has("organizations:uptime-rule-api", organization):
- uptime_rules = ProjectUptimeSubscription.objects.none()
-
if not features.has("organizations:performance-view", organization):
# Filter to only error alert rules
alert_rules = alert_rules.filter(snuba_query__dataset=Dataset.Events.value)
diff --git a/src/sentry/incidents/grouptype.py b/src/sentry/incidents/grouptype.py
new file mode 100644
index 0000000000000..7a7d0dc6900d5
--- /dev/null
+++ b/src/sentry/incidents/grouptype.py
@@ -0,0 +1,32 @@
+from dataclasses import dataclass
+
+from sentry.incidents.utils.types import QuerySubscriptionUpdate
+from sentry.issues.grouptype import GroupCategory, GroupType
+from sentry.ratelimits.sliding_windows import Quota
+from sentry.types.group import PriorityLevel
+from sentry.workflow_engine.models import DataPacket
+from sentry.workflow_engine.models.detector import DetectorEvaluationResult, DetectorHandler
+
+
+# TODO: This will be a stateful detector when we build that abstraction
+class MetricAlertDetectorHandler(DetectorHandler[QuerySubscriptionUpdate]):
+ def evaluate(
+ self, data_packet: DataPacket[QuerySubscriptionUpdate]
+ ) -> list[DetectorEvaluationResult]:
+ # TODO: Implement
+ return []
+
+
+# Example GroupType and detector handler for metric alerts. We don't create these issues yet, but we'll use something
+# like these when we're sending issues as alerts
+@dataclass(frozen=True)
+class MetricAlertFire(GroupType):
+ type_id = 8001
+ slug = "metric_alert_fire"
+ description = "Metric alert fired"
+ category = GroupCategory.METRIC_ALERT.value
+ creation_quota = Quota(3600, 60, 100)
+ default_priority = PriorityLevel.HIGH
+ enable_auto_resolve = False
+ enable_escalation_detection = False
+ detector_handler = MetricAlertDetectorHandler
diff --git a/src/sentry/incidents/logic.py b/src/sentry/incidents/logic.py
index f09b8fbdb1145..35b68b4342243 100644
--- a/src/sentry/incidents/logic.py
+++ b/src/sentry/incidents/logic.py
@@ -15,15 +15,14 @@
from django.db.models.signals import post_save
from django.forms import ValidationError
from django.utils import timezone as django_timezone
-from parsimonious.exceptions import ParseError
from snuba_sdk import Column, Condition, Limit, Op
-from urllib3.exceptions import MaxRetryError, TimeoutError
from sentry import analytics, audit_log, features, quotas
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.auth.access import SystemAccess
from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS, ObjectStatus
from sentry.db.models import Model
+from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.incidents import tasks
from sentry.incidents.models.alert_rule import (
AlertRule,
@@ -62,7 +61,6 @@
from sentry.models.notificationaction import ActionService, ActionTarget
from sentry.models.organization import Organization
from sentry.models.project import Project
-from sentry.models.scheduledeletion import RegionScheduledDeletion
from sentry.relay.config.metric_extraction import on_demand_metrics_feature_flags
from sentry.search.events.builder.base import BaseQueryBuilder
from sentry.search.events.constants import (
@@ -71,7 +69,7 @@
)
from sentry.search.events.fields import is_function, resolve_field
from sentry.seer.anomaly_detection.delete_rule import delete_rule_in_seer
-from sentry.seer.anomaly_detection.store_data import send_historical_data_to_seer
+from sentry.seer.anomaly_detection.store_data import send_new_rule_data, update_rule_data
from sentry.sentry_apps.services.app import RpcSentryAppInstallation, app_service
from sentry.shared_integrations.exceptions import (
ApiTimeoutError,
@@ -481,6 +479,7 @@ class AlertRuleNameAlreadyUsedError(Exception):
Dataset.Transactions: SnubaQuery.Type.PERFORMANCE,
Dataset.PerformanceMetrics: SnubaQuery.Type.PERFORMANCE,
Dataset.Metrics: SnubaQuery.Type.CRASH_RATE,
+ Dataset.EventsAnalyticsPlatform: SnubaQuery.Type.PERFORMANCE,
}
@@ -566,22 +565,28 @@ def create_alert_rule(
:return: The created `AlertRule`
"""
+ has_anomaly_detection = features.has(
+ "organizations:anomaly-detection-alerts", organization
+ ) and features.has("organizations:anomaly-detection-rollout", organization)
+
+ if detection_type == AlertRuleDetectionType.DYNAMIC.value and not has_anomaly_detection:
+ raise ResourceDoesNotExist("Your organization does not have access to this feature.")
+
if monitor_type == AlertRuleMonitorTypeInt.ACTIVATED and not activation_condition:
raise ValidationError("Activation condition required for activated alert rule")
- if detection_type == AlertRuleDetectionType.DYNAMIC:
- resolution = time_window
- else:
- resolution = get_alert_resolution(time_window, organization)
if detection_type == AlertRuleDetectionType.DYNAMIC:
+ resolution = time_window
# NOTE: we hardcode seasonality for EA
seasonality = AlertRuleSeasonality.AUTO
- if not (sensitivity):
+ if not sensitivity:
raise ValidationError("Dynamic alerts require a sensitivity level")
if time_window not in DYNAMIC_TIME_WINDOWS:
raise ValidationError(INVALID_TIME_WINDOW)
+ if "is:unresolved" in query:
+ raise ValidationError("Dynamic alerts do not support 'is:unresolved' queries")
else:
- # NOTE: we hardcode seasonality for EA
+ resolution = get_alert_resolution(time_window, organization)
seasonality = None
if sensitivity:
raise ValidationError("Sensitivity is not a valid field for this alert type")
@@ -652,31 +657,8 @@ def create_alert_rule(
AlertRuleExcludedProjects.objects.bulk_create(exclusions)
if alert_rule.detection_type == AlertRuleDetectionType.DYNAMIC.value:
- if not features.has("organizations:anomaly-detection-alerts", organization):
- alert_rule.delete()
- raise ResourceDoesNotExist(
- "Your organization does not have access to this feature."
- )
-
- try:
- # NOTE: if adding a new metric alert type, take care to check that it's handled here
- rule_status = send_historical_data_to_seer(
- alert_rule=alert_rule, project=projects[0]
- )
- if rule_status == AlertRuleStatus.NOT_ENOUGH_DATA:
- # if we don't have at least seven days worth of data, then the dynamic alert won't fire
- alert_rule.update(status=AlertRuleStatus.NOT_ENOUGH_DATA.value)
- except (TimeoutError, MaxRetryError):
- alert_rule.delete()
- raise TimeoutError("Failed to send data to Seer - cannot create alert rule.")
- except ParseError:
- alert_rule.delete()
- raise ParseError("Failed to parse Seer store data response")
- except (ValidationError, Exception):
- alert_rule.delete()
- raise
- else:
- metrics.incr("anomaly_detection_alert.created")
+ # NOTE: if adding a new metric alert type, take care to check that it's handled here
+ send_new_rule_data(alert_rule, projects[0], snuba_query)
if user:
create_audit_entry_from_user(
@@ -932,35 +914,17 @@ def update_alert_rule(
updated_fields["team_id"] = alert_rule.team_id
if detection_type == AlertRuleDetectionType.DYNAMIC:
- if not features.has("organizations:anomaly-detection-alerts", organization):
+ if not features.has(
+ "organizations:anomaly-detection-alerts", organization
+ ) and not features.has("organizations:anomaly-detection-rollout", organization):
raise ResourceDoesNotExist(
"Your organization does not have access to this feature."
)
-
- if updated_fields.get("detection_type") == AlertRuleDetectionType.DYNAMIC and (
- alert_rule.detection_type != AlertRuleDetectionType.DYNAMIC or query or aggregate
- ):
- for k, v in updated_fields.items():
- setattr(alert_rule, k, v)
-
- try:
- # NOTE: if adding a new metric alert type, take care to check that it's handled here
- rule_status = send_historical_data_to_seer(
- alert_rule=alert_rule,
- project=projects[0] if projects else alert_rule.projects.get(),
- )
- if rule_status == AlertRuleStatus.NOT_ENOUGH_DATA:
- # if we don't have at least seven days worth of data, then the dynamic alert won't fire
- alert_rule.update(status=AlertRuleStatus.NOT_ENOUGH_DATA.value)
- except (TimeoutError, MaxRetryError):
- raise TimeoutError("Failed to send data to Seer - cannot update alert rule.")
- except ParseError:
- raise ParseError(
- "Failed to parse Seer store data response - cannot update alert rule."
- )
- except (ValidationError, Exception):
- # If there's no historical data available—something went wrong when querying snuba
- raise ValidationError("Failed to send data to Seer - cannot update alert rule.")
+ if query and "is:unresolved" in query:
+ raise ValidationError("Dynamic alerts do not support 'is:unresolved' queries")
+ # NOTE: if adding a new metric alert type, take care to check that it's handled here
+ project = projects[0] if projects else alert_rule.projects.get()
+ update_rule_data(alert_rule, project, snuba_query, updated_fields, updated_query_fields)
else:
# if this was a dynamic rule, delete the data in Seer
if alert_rule.detection_type == AlertRuleDetectionType.DYNAMIC:
@@ -994,7 +958,15 @@ def update_alert_rule(
"time_window", timedelta(seconds=snuba_query.time_window)
)
updated_query_fields.setdefault("event_types", None)
- updated_query_fields.setdefault("resolution", timedelta(seconds=snuba_query.resolution))
+ if (
+ detection_type == AlertRuleDetectionType.DYNAMIC
+ and alert_rule.detection_type == AlertRuleDetectionType.DYNAMIC
+ ):
+ updated_query_fields.setdefault("resolution", snuba_query.resolution)
+ else:
+ updated_query_fields.setdefault(
+ "resolution", timedelta(seconds=snuba_query.resolution)
+ )
update_snuba_query(snuba_query, environment=environment, **updated_query_fields)
existing_subs: Iterable[QuerySubscription] = ()
@@ -1136,6 +1108,18 @@ def delete_alert_rule(
incidents = Incident.objects.filter(alert_rule=alert_rule)
if incidents.exists():
+ # if this was a dynamic rule, delete the data in Seer
+ if alert_rule.detection_type == AlertRuleDetectionType.DYNAMIC:
+ success = delete_rule_in_seer(
+ alert_rule=alert_rule,
+ )
+ if not success:
+ logger.error(
+ "Call to delete rule data in Seer failed",
+ extra={
+ "rule_id": alert_rule.id,
+ },
+ )
AlertRuleActivity.objects.create(
alert_rule=alert_rule,
user_id=user.id if user else None,
@@ -1658,7 +1642,9 @@ def _get_alert_rule_trigger_action_slack_channel_id(
except StopIteration:
integration = None
else:
- integration = integration_service.get_integration(integration_id=integration_id)
+ integration = integration_service.get_integration(
+ integration_id=integration_id, status=ObjectStatus.ACTIVE
+ )
if integration is None:
raise InvalidTriggerActionError("Slack workspace is a required field.")
@@ -1689,7 +1675,9 @@ def _get_alert_rule_trigger_action_slack_channel_id(
def _get_alert_rule_trigger_action_discord_channel_id(name: str, integration_id: int) -> str | None:
from sentry.integrations.discord.utils.channel import validate_channel_id
- integration = integration_service.get_integration(integration_id=integration_id)
+ integration = integration_service.get_integration(
+ integration_id=integration_id, status=ObjectStatus.ACTIVE
+ )
if integration is None:
raise InvalidTriggerActionError("Discord integration not found.")
try:
@@ -1858,6 +1846,22 @@ def get_opsgenie_teams(organization_id: int, integration_id: int) -> list[tuple[
"measurements.score.total",
],
}
+EAP_COLUMNS = [
+ "span.duration",
+ "span.self_time",
+]
+EAP_FUNCTIONS = [
+ "count",
+ "avg",
+ "p50",
+ "p75",
+ "p90",
+ "p95",
+ "p99",
+ "p100",
+ "max",
+ "min",
+]
def get_column_from_aggregate(aggregate: str, allow_mri: bool) -> str | None:
@@ -1870,6 +1874,11 @@ def get_column_from_aggregate(aggregate: str, allow_mri: bool) -> str | None:
or match.group("function") in METRICS_LAYER_UNSUPPORTED_TRANSACTION_METRICS_FUNCTIONS
):
return None if match.group("columns") == "" else match.group("columns")
+
+ # Skip additional validation for EAP queries. They don't exist in the old logic.
+ if match and match.group("function") in EAP_FUNCTIONS and match.group("columns") in EAP_COLUMNS:
+ return match.group("columns")
+
if allow_mri:
mri_column = _get_column_from_aggregate_with_mri(aggregate)
# Only if the column was allowed, we return it, otherwise we fallback to the old logic.
@@ -1902,7 +1911,9 @@ def _get_column_from_aggregate_with_mri(aggregate: str) -> str | None:
return columns
-def check_aggregate_column_support(aggregate: str, allow_mri: bool = False) -> bool:
+def check_aggregate_column_support(
+ aggregate: str, allow_mri: bool = False, allow_eap: bool = False
+) -> bool:
# TODO(ddm): remove `allow_mri` once the experimental feature flag is removed.
column = get_column_from_aggregate(aggregate, allow_mri)
match = is_function(aggregate)
@@ -1917,6 +1928,7 @@ def check_aggregate_column_support(aggregate: str, allow_mri: bool = False) -> b
isinstance(function, str)
and column in INSIGHTS_FUNCTION_VALID_ARGS_MAP.get(function, [])
)
+ or (column in EAP_COLUMNS and allow_eap)
)
diff --git a/src/sentry/incidents/serializers/__init__.py b/src/sentry/incidents/serializers/__init__.py
index 061c29461acd3..58a4bd86171ef 100644
--- a/src/sentry/incidents/serializers/__init__.py
+++ b/src/sentry/incidents/serializers/__init__.py
@@ -26,7 +26,11 @@
}
QUERY_TYPE_VALID_DATASETS = {
SnubaQuery.Type.ERROR: {Dataset.Events},
- SnubaQuery.Type.PERFORMANCE: {Dataset.Transactions, Dataset.PerformanceMetrics},
+ SnubaQuery.Type.PERFORMANCE: {
+ Dataset.Transactions,
+ Dataset.PerformanceMetrics,
+ Dataset.EventsAnalyticsPlatform,
+ },
SnubaQuery.Type.CRASH_RATE: {Dataset.Metrics},
}
diff --git a/src/sentry/incidents/serializers/alert_rule.py b/src/sentry/incidents/serializers/alert_rule.py
index f68911f9f0851..756d74ef08c1d 100644
--- a/src/sentry/incidents/serializers/alert_rule.py
+++ b/src/sentry/incidents/serializers/alert_rule.py
@@ -165,11 +165,17 @@ def validate_aggregate(self, aggregate):
self.context["organization"],
actor=self.context.get("user", None),
)
+ allow_eap = features.has(
+ "organizations:alerts-eap",
+ self.context["organization"],
+ actor=self.context.get("user", None),
+ )
try:
if not check_aggregate_column_support(
aggregate,
allow_mri=allow_mri,
+ allow_eap=allow_eap,
):
raise serializers.ValidationError(
"Invalid Metric: We do not currently support this field."
diff --git a/src/sentry/incidents/subscription_processor.py b/src/sentry/incidents/subscription_processor.py
index ce77e69f07bb8..26d97f6b821e1 100644
--- a/src/sentry/incidents/subscription_processor.py
+++ b/src/sentry/incidents/subscription_processor.py
@@ -12,11 +12,8 @@
from django.utils import timezone
from sentry_redis_tools.retrying_cluster import RetryingRedisCluster
from snuba_sdk import Column, Condition, Limit, Op
-from urllib3.exceptions import MaxRetryError, TimeoutError
from sentry import features
-from sentry.conf.server import SEER_ANOMALY_DETECTION_ENDPOINT_URL
-from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS, CRASH_RATE_ALERT_SESSION_COUNT_ALIAS
from sentry.incidents.logic import (
CRITICAL_TRIGGER_LABEL,
WARNING_TRIGGER_LABEL,
@@ -47,29 +44,18 @@
from sentry.incidents.tasks import handle_trigger_action
from sentry.incidents.utils.types import QuerySubscriptionUpdate
from sentry.models.project import Project
-from sentry.net.http import connection_from_url
-from sentry.seer.anomaly_detection.types import (
- AlertInSeer,
- AnomalyDetectionConfig,
- AnomalyType,
- DetectAnomaliesRequest,
- DetectAnomaliesResponse,
- TimeSeriesPoint,
-)
-from sentry.seer.anomaly_detection.utils import translate_direction
-from sentry.seer.signed_seer_api import make_signed_seer_api_request
+from sentry.seer.anomaly_detection.get_anomaly_data import get_anomaly_data_from_seer
+from sentry.seer.anomaly_detection.utils import anomaly_has_confidence, has_anomaly
from sentry.snuba.dataset import Dataset
from sentry.snuba.entity_subscription import (
ENTITY_TIME_COLUMNS,
- BaseCrashRateMetricsEntitySubscription,
get_entity_key_from_query_builder,
get_entity_subscription_from_snuba_query,
)
from sentry.snuba.models import QuerySubscription
from sentry.snuba.subscriptions import delete_snuba_subscription
-from sentry.utils import json, metrics, redis
+from sentry.utils import metrics, redis
from sentry.utils.dates import to_datetime
-from sentry.utils.json import JSONDecodeError
logger = logging.getLogger(__name__)
REDIS_TTL = int(timedelta(days=7).total_seconds())
@@ -106,11 +92,6 @@ class SubscriptionProcessor:
AlertRuleThresholdType.BELOW: (operator.lt, operator.gt),
}
- seer_anomaly_detection_connection_pool = connection_from_url(
- settings.SEER_ANOMALY_DETECTION_URL,
- timeout=settings.SEER_ANOMALY_DETECTION_TIMEOUT,
- )
-
def __init__(self, subscription: QuerySubscription) -> None:
self.subscription = subscription
try:
@@ -282,132 +263,8 @@ def get_comparison_aggregation_value(
result: float = (aggregation_value / comparison_aggregate) * 100
return result
- def get_crash_rate_alert_aggregation_value(
- self, subscription_update: QuerySubscriptionUpdate
- ) -> float | None:
- """
- Handles validation and extraction of Crash Rate Alerts subscription updates values.
- The subscription update looks like
- {
- '_crash_rate_alert_aggregate': 0.5,
- '_total_count': 34
- }
- - `_crash_rate_alert_aggregate` represents sessions_crashed/sessions or
- users_crashed/users, and so we need to subtract that number from 1 and then multiply by
- 100 to get the crash free percentage
- - `_total_count` represents the total sessions or user counts. This is used when
- CRASH_RATE_ALERT_MINIMUM_THRESHOLD is set in the sense that if the minimum threshold is
- greater than the session count, then the update is dropped. If the minimum threshold is
- not set then the total sessions count is just ignored
- """
- aggregation_value = subscription_update["values"]["data"][0][
- CRASH_RATE_ALERT_AGGREGATE_ALIAS
- ]
- if aggregation_value is None:
- self.reset_trigger_counts()
- metrics.incr("incidents.alert_rules.ignore_update_no_session_data")
- return None
-
- try:
- total_count = subscription_update["values"]["data"][0][
- CRASH_RATE_ALERT_SESSION_COUNT_ALIAS
- ]
- if CRASH_RATE_ALERT_MINIMUM_THRESHOLD is not None:
- min_threshold = int(CRASH_RATE_ALERT_MINIMUM_THRESHOLD)
- if total_count < min_threshold:
- self.reset_trigger_counts()
- metrics.incr(
- "incidents.alert_rules.ignore_update_count_lower_than_min_threshold"
- )
- return None
- except KeyError:
- # If for whatever reason total session count was not sent in the update,
- # ignore the minimum threshold comparison and continue along with processing the
- # update. However, this should not happen.
- logger.exception(
- "Received an update for a crash rate alert subscription, but no total "
- "sessions count was sent"
- )
- # The subscription aggregation for crash rate alerts uses the Discover percentage
- # function, which would technically return a ratio of sessions_crashed/sessions and
- # so we need to calculate the crash free percentage out of that returned value
- aggregation_value_result: int = round((1 - aggregation_value) * 100, 3)
- return aggregation_value_result
-
def get_crash_rate_alert_metrics_aggregation_value(
self, subscription_update: QuerySubscriptionUpdate
- ) -> float | None:
- """
- Handle both update formats.
- Once all subscriptions have been updated to v2,
- we can remove v1 and replace this function with current v2.
- """
- rows = subscription_update["values"]["data"]
- if BaseCrashRateMetricsEntitySubscription.is_crash_rate_format_v2(rows):
- version = "v2"
- result = self._get_crash_rate_alert_metrics_aggregation_value_v2(subscription_update)
- else:
- version = "v1"
- result = self._get_crash_rate_alert_metrics_aggregation_value_v1(subscription_update)
-
- metrics.incr(
- "incidents.alert_rules.get_crash_rate_alert_metrics_aggregation_value",
- tags={"format": version},
- sample_rate=1.0,
- )
- return result
-
- def _get_crash_rate_alert_metrics_aggregation_value_v1(
- self, subscription_update: QuerySubscriptionUpdate
- ) -> float | None:
- """
- Handles validation and extraction of Crash Rate Alerts subscription updates values over
- metrics dataset.
- The subscription update looks like
- [
- {'project_id': 8, 'tags[5]': 6, 'value': 2.0},
- {'project_id': 8, 'tags[5]': 13,'value': 1.0}
- ]
- where each entry represents a session status and the count of that specific session status.
- As an example, `tags[5]` represents string `session.status`, while `tags[5]: 6` could
- mean something like there are 2 sessions of status `crashed`. Likewise the other entry
- represents the number of sessions started. In this method, we need to reverse match these
- strings to end up with something that looks like
- {"init": 2, "crashed": 4}
- - `init` represents sessions or users sessions that were started, hence to get the crash
- free percentage, we would need to divide number of crashed sessions by that number,
- and subtract that value from 1. This is also used when CRASH_RATE_ALERT_MINIMUM_THRESHOLD is
- set in the sense that if the minimum threshold is greater than the session count,
- then the update is dropped. If the minimum threshold is not set then the total sessions
- count is just ignored
- - `crashed` represents the total sessions or user counts that crashed.
- """
- (
- total_session_count,
- crash_count,
- ) = BaseCrashRateMetricsEntitySubscription.translate_sessions_tag_keys_and_values(
- data=subscription_update["values"]["data"],
- org_id=self.subscription.project.organization.id,
- )
-
- if total_session_count == 0:
- self.reset_trigger_counts()
- metrics.incr("incidents.alert_rules.ignore_update_no_session_data")
- return None
-
- if CRASH_RATE_ALERT_MINIMUM_THRESHOLD is not None:
- min_threshold = int(CRASH_RATE_ALERT_MINIMUM_THRESHOLD)
- if total_session_count < min_threshold:
- self.reset_trigger_counts()
- metrics.incr("incidents.alert_rules.ignore_update_count_lower_than_min_threshold")
- return None
-
- aggregation_value = round((1 - crash_count / total_session_count) * 100, 3)
-
- return aggregation_value
-
- def _get_crash_rate_alert_metrics_aggregation_value_v2(
- self, subscription_update: QuerySubscriptionUpdate
) -> float | None:
"""
Handles validation and extraction of Crash Rate Alerts subscription updates values over
@@ -425,8 +282,8 @@ def _get_crash_rate_alert_metrics_aggregation_value_v2(
- `crashed` represents the total sessions or user counts that crashed.
"""
row = subscription_update["values"]["data"][0]
- total_session_count = row["count"]
- crash_count = row["crashed"]
+ total_session_count = row.get("count", 0)
+ crash_count = row.get("crashed", 0)
if total_session_count == 0:
self.reset_trigger_counts()
@@ -530,20 +387,34 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None:
},
)
- self.has_anomaly_detection = features.has(
+ has_anomaly_detection = features.has(
"organizations:anomaly-detection-alerts", self.subscription.project.organization
- )
- has_fake_anomalies = features.has(
- "organizations:fake-anomaly-detection", self.subscription.project.organization
+ ) and features.has(
+ "organizations:anomaly-detection-rollout", self.subscription.project.organization
)
potential_anomalies = None
if (
- self.has_anomaly_detection
+ has_anomaly_detection
and self.alert_rule.detection_type == AlertRuleDetectionType.DYNAMIC
):
- potential_anomalies = self.get_anomaly_data_from_seer(aggregation_value)
+ potential_anomalies = get_anomaly_data_from_seer(
+ alert_rule=self.alert_rule,
+ subscription=self.subscription,
+ last_update=self.last_update.timestamp(),
+ aggregation_value=aggregation_value,
+ )
if potential_anomalies is None:
+ logger.info(
+ "No potential anomalies found",
+ extra={
+ "subscription_id": self.subscription.id,
+ "dataset": self.alert_rule.snuba_query.dataset,
+ "organization_id": self.subscription.project.organization.id,
+ "project_id": self.subscription.project_id,
+ "alert_rule_id": self.alert_rule.id,
+ },
+ )
return []
# Trigger callbacks for any AlertRules that may need to know about the subscription update
@@ -576,7 +447,7 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None:
for potential_anomaly in potential_anomalies:
# check to see if we have enough data for the dynamic alert rule now
if self.alert_rule.status == AlertRuleStatus.NOT_ENOUGH_DATA.value:
- if self.anomaly_has_confidence(potential_anomaly):
+ if anomaly_has_confidence(potential_anomaly):
# NOTE: this means "enabled," and it's the default alert rule status.
# TODO: change these status labels to be less confusing
self.alert_rule.status = AlertRuleStatus.PENDING.value
@@ -585,8 +456,8 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None:
# we don't need to check if the alert should fire if the alert can't fire yet
continue
- if self.has_anomaly(
- potential_anomaly, trigger.label, has_fake_anomalies
+ if has_anomaly(
+ potential_anomaly, trigger.label
) and not self.check_trigger_matches_status(trigger, TriggerStatus.ACTIVE):
metrics.incr(
"incidents.alert_rules.threshold.alert",
@@ -601,9 +472,7 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None:
self.trigger_alert_counts[trigger.id] = 0
if (
- not self.has_anomaly(
- potential_anomaly, trigger.label, has_fake_anomalies
- )
+ not has_anomaly(potential_anomaly, trigger.label)
and self.active_incident
and self.check_trigger_matches_status(trigger, TriggerStatus.ACTIVE)
):
@@ -670,130 +539,6 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None:
# before the next one then we might alert twice.
self.update_alert_rule_stats()
- def has_anomaly(self, anomaly: TimeSeriesPoint, label: str, has_fake_anomalies: bool) -> bool:
- """
- Helper function to determine whether we care about an anomaly based on the
- anomaly type and trigger type.
- """
- if has_fake_anomalies:
- return True
-
- anomaly_type = anomaly.get("anomaly", {}).get("anomaly_type")
-
- if anomaly_type == AnomalyType.HIGH_CONFIDENCE.value or (
- label == WARNING_TRIGGER_LABEL and anomaly_type == AnomalyType.LOW_CONFIDENCE.value
- ):
- return True
- return False
-
- def anomaly_has_confidence(self, anomaly: TimeSeriesPoint) -> bool:
- """
- Helper function to determine whether we have the 7+ days of data necessary
- to detect anomalies/send alerts for dynamic alert rules.
- """
- anomaly_type = anomaly.get("anomaly", {}).get("anomaly_type")
- return anomaly_type != AnomalyType.NO_DATA.value
-
- def get_anomaly_data_from_seer(
- self, aggregation_value: float | None
- ) -> list[TimeSeriesPoint] | None:
- anomaly_detection_config = AnomalyDetectionConfig(
- time_period=int(self.alert_rule.snuba_query.time_window / 60),
- sensitivity=self.alert_rule.sensitivity,
- direction=translate_direction(self.alert_rule.threshold_type),
- expected_seasonality=self.alert_rule.seasonality,
- )
- context = AlertInSeer(
- id=self.alert_rule.id,
- cur_window=TimeSeriesPoint(
- timestamp=self.last_update.timestamp(), value=aggregation_value
- ),
- )
- detect_anomalies_request = DetectAnomaliesRequest(
- organization_id=self.subscription.project.organization.id,
- project_id=self.subscription.project_id,
- config=anomaly_detection_config,
- context=context,
- )
- extra_data = {
- "subscription_id": self.subscription.id,
- "dataset": self.subscription.snuba_query.dataset,
- "organization_id": self.subscription.project.organization.id,
- "project_id": self.subscription.project_id,
- "alert_rule_id": self.alert_rule.id,
- }
- try:
- response = make_signed_seer_api_request(
- self.seer_anomaly_detection_connection_pool,
- SEER_ANOMALY_DETECTION_ENDPOINT_URL,
- json.dumps(detect_anomalies_request).encode("utf-8"),
- )
- except (TimeoutError, MaxRetryError):
- logger.warning(
- "Timeout error when hitting anomaly detection endpoint", extra=extra_data
- )
- return None
-
- if response.status > 400:
- logger.error(
- "Error when hitting Seer detect anomalies endpoint",
- extra={
- "response_data": response.data,
- **extra_data,
- },
- )
- return None
- try:
- decoded_data = response.data.decode("utf-8")
- except AttributeError:
- logger.exception(
- "Failed to parse Seer anomaly detection response",
- extra={
- "ad_config": anomaly_detection_config,
- "context": context,
- "response_data": response.data,
- "response_code": response.status,
- },
- )
- return None
-
- try:
- results: DetectAnomaliesResponse = json.loads(decoded_data)
- except JSONDecodeError:
- logger.exception(
- "Failed to parse Seer anomaly detection response",
- extra={
- "ad_config": anomaly_detection_config,
- "context": context,
- "response_data": decoded_data,
- "response_code": response.status,
- },
- )
- return None
-
- if not results.get("success"):
- logger.error(
- "Error when hitting Seer detect anomalies endpoint",
- extra={
- "error_message": results.get("message", ""),
- **extra_data,
- },
- )
- return None
-
- ts = results.get("timeseries")
- if not ts:
- logger.warning(
- "Seer anomaly detection response returned no potential anomalies",
- extra={
- "ad_config": anomaly_detection_config,
- "context": context,
- "response_data": results.get("message"),
- },
- )
- return None
- return ts
-
def calculate_event_date_from_update_date(self, update_date: datetime) -> datetime:
"""
Calculates the date that an event actually happened based on the date that we
diff --git a/src/sentry/incidents/tasks.py b/src/sentry/incidents/tasks.py
index e9e96cf97780e..5fe389e68082b 100644
--- a/src/sentry/incidents/tasks.py
+++ b/src/sentry/incidents/tasks.py
@@ -31,6 +31,7 @@
from sentry.snuba.models import QuerySubscription
from sentry.snuba.query_subscriptions.consumer import register_subscriber
from sentry.tasks.base import instrumented_task
+from sentry.users.models.user import User
from sentry.users.services.user import RpcUser
from sentry.users.services.user.service import user_service
from sentry.utils import metrics
@@ -84,7 +85,7 @@ def send_subscriber_notifications(activity_id: int) -> None:
def generate_incident_activity_email(
- activity: IncidentActivity, user: RpcUser, activity_user: RpcUser | None = None
+ activity: IncidentActivity, user: RpcUser | User, activity_user: RpcUser | User | None = None
) -> MessageBuilder:
incident = activity.incident
return MessageBuilder(
diff --git a/src/sentry/ingest/consumer/factory.py b/src/sentry/ingest/consumer/factory.py
index 65c0436624368..9691f022592e4 100644
--- a/src/sentry/ingest/consumer/factory.py
+++ b/src/sentry/ingest/consumer/factory.py
@@ -159,3 +159,58 @@ def shutdown(self) -> None:
self._pool.close()
if self._attachments_pool:
self._attachments_pool.close()
+
+
+class IngestTransactionsStrategyFactory(ProcessingStrategyFactory[KafkaPayload]):
+ """
+ Processes transactions in either celery or no-celery mode.
+ Transactions are either dispatched to `save_transaction_event` or stored directly in the
+ consumer depending on the mode.
+ """
+
+ def __init__(
+ self,
+ reprocess_only_stuck_events: bool,
+ stop_at_timestamp: int | None,
+ num_processes: int,
+ max_batch_size: int,
+ max_batch_time: int,
+ input_block_size: int | None,
+ output_block_size: int | None,
+ no_celery_mode: bool = False,
+ ):
+ self.consumer_type = ConsumerType.Transactions
+ self.reprocess_only_stuck_events = reprocess_only_stuck_events
+ self.stop_at_timestamp = stop_at_timestamp
+
+ self.multi_process = None
+ self._pool = MultiprocessingPool(num_processes)
+
+ if num_processes > 1:
+ self.multi_process = MultiProcessConfig(
+ num_processes, max_batch_size, max_batch_time, input_block_size, output_block_size
+ )
+
+ self.health_checker = HealthChecker("ingest")
+ self.no_celery_mode = no_celery_mode
+
+ def create_with_partitions(
+ self,
+ commit: Commit,
+ partitions: Mapping[Partition, int],
+ ) -> ProcessingStrategy[KafkaPayload]:
+ mp = self.multi_process
+
+ final_step = CommitOffsets(commit)
+
+ event_function = partial(
+ process_simple_event_message,
+ consumer_type=self.consumer_type,
+ reprocess_only_stuck_events=self.reprocess_only_stuck_events,
+ no_celery_mode=self.no_celery_mode,
+ )
+ next_step = maybe_multiprocess_step(mp, event_function, final_step, self._pool)
+ return create_backpressure_step(health_checker=self.health_checker, next_step=next_step)
+
+ def shutdown(self) -> None:
+ self._pool.close()
diff --git a/src/sentry/ingest/consumer/processors.py b/src/sentry/ingest/consumer/processors.py
index 3f067f0d8dbfb..7ea9fbf72ce03 100644
--- a/src/sentry/ingest/consumer/processors.py
+++ b/src/sentry/ingest/consumer/processors.py
@@ -11,9 +11,9 @@
from sentry import eventstore, features
from sentry.attachments import CachedAttachment, attachment_cache
-from sentry.event_manager import save_attachment
+from sentry.event_manager import EventManager, save_attachment
from sentry.eventstore.processing import event_processing_store
-from sentry.feedback.usecases.create_feedback import FeedbackCreationSource
+from sentry.feedback.usecases.create_feedback import FeedbackCreationSource, is_in_feedback_denylist
from sentry.ingest.userreport import Conflict, save_userreport
from sentry.killswitches import killswitch_matches_context
from sentry.models.project import Project
@@ -23,6 +23,7 @@
from sentry.utils import metrics
from sentry.utils.cache import cache_key_for_event
from sentry.utils.dates import to_datetime
+from sentry.utils.sdk import set_current_event_project
from sentry.utils.snuba import RateLimitExceeded
logger = logging.getLogger(__name__)
@@ -53,10 +54,36 @@ def inner(*args, **kwargs):
return wrapper
+def process_transaction_no_celery(
+ data: MutableMapping[str, Any], project_id: int, start_time: float
+) -> None:
+
+ set_current_event_project(project_id)
+
+ manager = EventManager(data)
+ # event.project.organization is populated after this statement.
+ manager.save(
+ project_id,
+ assume_normalized=True,
+ start_time=start_time,
+ )
+ # Put the updated event back into the cache so that post_process
+ # has the most recent data.
+ data = manager.get_data()
+ if not isinstance(data, dict):
+ data = dict(data.items())
+
+ with sentry_sdk.start_span(op="event_processing_store.store"):
+ event_processing_store.store(data)
+
+
@trace_func(name="ingest_consumer.process_event")
@metrics.wraps("ingest_consumer.process_event")
def process_event(
- message: IngestMessage, project: Project, reprocess_only_stuck_events: bool = False
+ message: IngestMessage,
+ project: Project,
+ reprocess_only_stuck_events: bool = False,
+ no_celery_mode: bool = False,
) -> None:
"""
Perform some initial filtering and deserialize the message payload.
@@ -88,37 +115,44 @@ def process_event(
# This code has been ripped from the old python store endpoint. We're
# keeping it around because it does provide some protection against
# reprocessing good events if a single consumer is in a restart loop.
- deduplication_key = f"ev:{project_id}:{event_id}"
-
- try:
- cached_value = cache.get(deduplication_key)
- except Exception as exc:
- raise Retriable(exc)
+ with sentry_sdk.start_span(op="deduplication_check"):
+ deduplication_key = f"ev:{project_id}:{event_id}"
- if cached_value is not None:
- logger.warning(
- "pre-process-forwarder detected a duplicated event" " with id:%s for project:%s.",
- event_id,
- project_id,
- )
- return # message already processed do not reprocess
+ try:
+ cached_value = cache.get(deduplication_key)
+ except Exception as exc:
+ raise Retriable(exc)
+
+ if cached_value is not None:
+ logger.warning(
+ "pre-process-forwarder detected a duplicated event" " with id:%s for project:%s.",
+ event_id,
+ project_id,
+ )
+ return # message already processed do not reprocess
- if killswitch_matches_context(
- "store.load-shed-pipeline-projects",
- {
- "project_id": project_id,
- "event_id": event_id,
- "has_attachments": bool(attachments),
- },
+ with sentry_sdk.start_span(
+ op="killswitch_matches_context", name="store.load-shed-pipeline-projects"
):
- # This killswitch is for the worst of scenarios and should probably not
- # cause additional load on our logging infrastructure
- return
+ if killswitch_matches_context(
+ "store.load-shed-pipeline-projects",
+ {
+ "project_id": project_id,
+ "event_id": event_id,
+ "has_attachments": bool(attachments),
+ },
+ ):
+ # This killswitch is for the worst of scenarios and should probably not
+ # cause additional load on our logging infrastructure
+ return
# Parse the JSON payload. This is required to compute the cache key and
# call process_event. The payload will be put into Kafka raw, to avoid
# serializing it again.
- data = orjson.loads(payload)
+ with sentry_sdk.start_span(op="orjson.loads"):
+ data = orjson.loads(payload)
+
+ sentry_sdk.set_extra("event_type", data.get("type"))
if project_id == settings.SENTRY_PROJECT:
metrics.incr(
@@ -126,17 +160,20 @@ def process_event(
tags={"event_type": data.get("type") or "null"},
)
- if killswitch_matches_context(
- "store.load-shed-parsed-pipeline-projects",
- {
- "organization_id": project.organization_id,
- "project_id": project.id,
- "event_type": data.get("type") or "null",
- "has_attachments": bool(attachments),
- "event_id": event_id,
- },
+ with sentry_sdk.start_span(
+ op="killswitch_matches_context", name="store.load-shed-parsed-pipeline-projects"
):
- return
+ if killswitch_matches_context(
+ "store.load-shed-parsed-pipeline-projects",
+ {
+ "organization_id": project.organization_id,
+ "project_id": project.id,
+ "event_type": data.get("type") or "null",
+ "has_attachments": bool(attachments),
+ "event_id": event_id,
+ },
+ ):
+ return
# Raise the retriable exception and skip DLQ if anything below this point fails as it may be caused by
# intermittent network issue
@@ -144,11 +181,19 @@ def process_event(
# If we only want to reprocess "stuck" events, we check if this event is already in the
# `processing_store`. We only continue here if the event *is* present, as that will eventually
# process and consume the event from the `processing_store`, whereby getting it "unstuck".
- if reprocess_only_stuck_events and not event_processing_store.exists(data):
- return
-
- with metrics.timer("ingest_consumer._store_event"):
- cache_key = event_processing_store.store(data)
+ if reprocess_only_stuck_events:
+ with sentry_sdk.start_span(op="event_processing_store.exists"):
+ if not event_processing_store.exists(data):
+ return
+
+ # The no_celery_mode version of the transactions consumer skips one trip to rc-processing
+ # Otherwise, we have to store the event in processing store here for the save_event task to
+ # fetch later
+ if no_celery_mode and not attachments:
+ cache_key = None
+ else:
+ with metrics.timer("ingest_consumer._store_event"):
+ cache_key = event_processing_store.store(data)
try:
# Records rc-processing usage broken down by
@@ -172,28 +217,38 @@ def process_event(
CachedAttachment(type=attachment.pop("attachment_type"), **attachment)
for attachment in attachments
]
-
+ assert cache_key is not None
attachment_cache.set(
cache_key, attachments=attachment_objects, timeout=CACHE_TIMEOUT
)
if data.get("type") == "transaction":
- # No need for preprocess/process for transactions thus submit
- # directly transaction specific save_event task.
- save_event_transaction.delay(
- cache_key=cache_key,
- data=None,
- start_time=start_time,
- event_id=event_id,
- project_id=project_id,
- )
+ if no_celery_mode:
+ with sentry_sdk.start_span(op="ingest_consumer.process_transaction_no_celery"):
+ transaction = sentry_sdk.get_current_scope().transaction
+
+ if transaction is not None:
+ transaction.set_tag("no_celery_mode", True)
+
+ process_transaction_no_celery(data, project_id, start_time)
+ else:
+ assert cache_key is not None
+ # No need for preprocess/process for transactions thus submit
+ # directly transaction specific save_event task.
+ save_event_transaction.delay(
+ cache_key=cache_key,
+ data=None,
+ start_time=start_time,
+ event_id=event_id,
+ project_id=project_id,
+ )
try:
collect_span_metrics(project, data)
except Exception:
pass
elif data.get("type") == "feedback":
- if features.has("organizations:user-feedback-ingest", project.organization, actor=None):
+ if not is_in_feedback_denylist(project.organization):
save_event_feedback.delay(
cache_key=None, # no need to cache as volume is low
data=data,
@@ -201,6 +256,8 @@ def process_event(
event_id=event_id,
project_id=project_id,
)
+ else:
+ metrics.incr("feedback.ingest.filtered", tags={"reason": "org.denylist"})
else:
# Preprocess this event, which spawns either process_event or
# save_event. Pass data explicitly to avoid fetching it again from the
@@ -216,10 +273,14 @@ def process_event(
)
# remember for an 1 hour that we saved this event (deduplication protection)
- cache.set(deduplication_key, "", CACHE_TIMEOUT)
+ with sentry_sdk.start_span(op="cache.set"):
+ cache.set(deduplication_key, "", CACHE_TIMEOUT)
# emit event_accepted once everything is done
- event_accepted.send_robust(ip=remote_addr, data=data, project=project, sender=process_event)
+ with sentry_sdk.start_span(op="event_accepted.send_robust"):
+ event_accepted.send_robust(
+ ip=remote_addr, data=data, project=project, sender=process_event
+ )
except Exception as exc:
if isinstance(exc, KeyError): # ex: missing event_id in message["payload"]
raise
diff --git a/src/sentry/ingest/consumer/simple_event.py b/src/sentry/ingest/consumer/simple_event.py
index fb594f11a14b3..99fa07a4b52ae 100644
--- a/src/sentry/ingest/consumer/simple_event.py
+++ b/src/sentry/ingest/consumer/simple_event.py
@@ -14,7 +14,10 @@
def process_simple_event_message(
- raw_message: Message[KafkaPayload], consumer_type: str, reprocess_only_stuck_events: bool
+ raw_message: Message[KafkaPayload],
+ consumer_type: str,
+ reprocess_only_stuck_events: bool,
+ no_celery_mode: bool = False,
) -> None:
"""
Processes a single Kafka Message containing a "simple" Event payload.
@@ -28,6 +31,8 @@ def process_simple_event_message(
- Store the JSON payload in the event processing store, and pass it on to
`preprocess_event`, which will schedule a followup task such as
`symbolicate_event` or `process_event`.
+
+ No celery mode only applies to the transactions consumer.
"""
raw_payload = raw_message.payload.value
@@ -54,7 +59,7 @@ def process_simple_event_message(
logger.exception("Project for ingested event does not exist: %s", project_id)
return
- return process_event(message, project, reprocess_only_stuck_events)
+ return process_event(message, project, reprocess_only_stuck_events, no_celery_mode)
except Exception as exc:
# If the retriable exception was raised, we should not DLQ
diff --git a/src/sentry/ingest/userreport.py b/src/sentry/ingest/userreport.py
index 71b777fc04c85..904f66418b89d 100644
--- a/src/sentry/ingest/userreport.py
+++ b/src/sentry/ingest/userreport.py
@@ -6,10 +6,11 @@
from django.db import IntegrityError, router
from django.utils import timezone
-from sentry import eventstore, features, options
+from sentry import eventstore, options
from sentry.eventstore.models import Event, GroupEvent
from sentry.feedback.usecases.create_feedback import (
UNREAL_FEEDBACK_UNATTENDED_MESSAGE,
+ is_in_feedback_denylist,
shim_to_feedback,
)
from sentry.models.userreport import UserReport
@@ -32,7 +33,8 @@ def save_userreport(
start_time=None,
):
with metrics.timer("sentry.ingest.userreport.save_userreport"):
- if is_org_in_denylist(project.organization):
+ if is_in_feedback_denylist(project.organization):
+ metrics.incr("user_report.create_user_report.filtered", tags={"reason": "org.denylist"})
return
if should_filter_user_report(report["comments"]):
return
@@ -97,24 +99,19 @@ def save_userreport(
user_feedback_received.send(project=project, sender=save_userreport)
- has_feedback_ingest = features.has(
- "organizations:user-feedback-ingest", project.organization, actor=None
- )
logger.info(
"ingest.user_report",
extra={
"project_id": project.id,
"event_id": report["event_id"],
"has_event": bool(event),
- "has_feedback_ingest": has_feedback_ingest,
},
)
metrics.incr(
"user_report.create_user_report.saved",
- tags={"has_event": bool(event), "has_feedback_ingest": has_feedback_ingest},
+ tags={"has_event": bool(event)},
)
-
- if has_feedback_ingest and event:
+ if event:
logger.info(
"ingest.user_report.shim_to_feedback",
extra={"project_id": project.id, "event_id": report["event_id"]},
@@ -150,10 +147,3 @@ def should_filter_user_report(comments: str):
return True
return False
-
-
-def is_org_in_denylist(organization):
- if organization.slug in options.get("feedback.organizations.slug-denylist"):
- metrics.incr("user_report.create_user_report.filtered", tags={"reason": "org.denylist"})
- return True
- return False
diff --git a/src/sentry/integrations/api/bases/external_actor.py b/src/sentry/integrations/api/bases/external_actor.py
index ca1c47f4ba11f..99cd4c8eb65d6 100644
--- a/src/sentry/integrations/api/bases/external_actor.py
+++ b/src/sentry/integrations/api/bases/external_actor.py
@@ -3,6 +3,7 @@
from django.db import IntegrityError
from django.http import Http404
+from drf_spectacular.utils import extend_schema_serializer
from rest_framework import serializers
from rest_framework.exceptions import PermissionDenied
from rest_framework.request import Request
@@ -54,7 +55,7 @@ class ExternalActorSerializerBase(CamelSnakeModelSerializer):
required=False, allow_null=True, help_text="The associated user ID for provider."
)
external_name = serializers.CharField(
- required=True, help_text="The associated username for the provider."
+ required=True, help_text="The associated name for the provider."
)
provider = serializers.ChoiceField(
choices=get_provider_choices(AVAILABLE_PROVIDERS),
@@ -155,6 +156,7 @@ class Meta:
fields = ["user_id", "external_id", "external_name", "provider", "integration_id", "id"]
+@extend_schema_serializer(exclude_fields=["team_id"])
class ExternalTeamSerializer(ExternalActorSerializerBase):
_actor_key = "team_id"
diff --git a/src/sentry/integrations/api/endpoints/external_team_details.py b/src/sentry/integrations/api/endpoints/external_team_details.py
index 3838359061e00..4a195157a1541 100644
--- a/src/sentry/integrations/api/endpoints/external_team_details.py
+++ b/src/sentry/integrations/api/endpoints/external_team_details.py
@@ -1,6 +1,7 @@
import logging
from typing import Any
+from drf_spectacular.utils import extend_schema
from rest_framework import status
from rest_framework.request import Request
from rest_framework.response import Response
@@ -10,10 +11,14 @@
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.team import TeamEndpoint
from sentry.api.serializers import serialize
+from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN, RESPONSE_NO_CONTENT
+from sentry.apidocs.examples.integration_examples import IntegrationExamples
+from sentry.apidocs.parameters import GlobalParams, OrganizationParams
from sentry.integrations.api.bases.external_actor import (
ExternalActorEndpointMixin,
ExternalTeamSerializer,
)
+from sentry.integrations.api.serializers.models.external_actor import ExternalActorSerializer
from sentry.integrations.models.external_actor import ExternalActor
from sentry.models.team import Team
@@ -21,10 +26,11 @@
@region_silo_endpoint
+@extend_schema(tags=["Integrations"])
class ExternalTeamDetailsEndpoint(TeamEndpoint, ExternalActorEndpointMixin):
publish_status = {
- "DELETE": ApiPublishStatus.UNKNOWN,
- "PUT": ApiPublishStatus.UNKNOWN,
+ "DELETE": ApiPublishStatus.PUBLIC,
+ "PUT": ApiPublishStatus.PUBLIC,
}
owner = ApiOwner.ENTERPRISE
@@ -45,19 +51,24 @@ def convert_args(
)
return args, kwargs
+ @extend_schema(
+ operation_id="Update an External Team",
+ parameters=[
+ GlobalParams.ORG_ID_OR_SLUG,
+ GlobalParams.TEAM_ID_OR_SLUG,
+ OrganizationParams.EXTERNAL_TEAM_ID,
+ ],
+ request=ExternalTeamSerializer,
+ responses={
+ 200: ExternalActorSerializer,
+ 400: RESPONSE_BAD_REQUEST,
+ 403: RESPONSE_FORBIDDEN,
+ },
+ examples=IntegrationExamples.EXTERNAL_TEAM_CREATE,
+ )
def put(self, request: Request, team: Team, external_team: ExternalActor) -> Response:
"""
- Update an External Team
- `````````````
-
- :pparam string organization_id_or_slug: the id or slug of the organization the
- team belongs to.
- :pparam string team_id_or_slug: the id or slug of the team to get.
- :pparam string external_team_id: id of external_team object
- :param string external_id: the associated user ID for this provider
- :param string external_name: the Github/Gitlab team name.
- :param string provider: enum("github","gitlab")
- :auth: required
+ Update a team in an external provider that is currently linked to a Sentry team.
"""
self.assert_has_feature(request, team.organization)
@@ -76,9 +87,23 @@ def put(self, request: Request, team: Team, external_team: ExternalActor) -> Res
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ @extend_schema(
+ operation_id="Delete an External Team",
+ parameters=[
+ GlobalParams.ORG_ID_OR_SLUG,
+ GlobalParams.TEAM_ID_OR_SLUG,
+ OrganizationParams.EXTERNAL_TEAM_ID,
+ ],
+ request=None,
+ responses={
+ 204: RESPONSE_NO_CONTENT,
+ 400: RESPONSE_BAD_REQUEST,
+ 403: RESPONSE_FORBIDDEN,
+ },
+ )
def delete(self, request: Request, team: Team, external_team: ExternalActor) -> Response:
"""
- Delete an External Team
+ Delete the link between a team from an external provider and a Sentry team.
"""
external_team.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/src/sentry/integrations/api/endpoints/external_team_index.py b/src/sentry/integrations/api/endpoints/external_team_index.py
index 6796e11de89f9..1beab567c8316 100644
--- a/src/sentry/integrations/api/endpoints/external_team_index.py
+++ b/src/sentry/integrations/api/endpoints/external_team_index.py
@@ -1,5 +1,6 @@
import logging
+from drf_spectacular.utils import extend_schema
from rest_framework import status
from rest_framework.request import Request
from rest_framework.response import Response
@@ -9,35 +10,42 @@
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.team import TeamEndpoint
from sentry.api.serializers import serialize
+from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN
+from sentry.apidocs.examples.integration_examples import IntegrationExamples
+from sentry.apidocs.parameters import GlobalParams
from sentry.integrations.api.bases.external_actor import (
ExternalActorEndpointMixin,
ExternalTeamSerializer,
)
+from sentry.integrations.api.serializers.models.external_actor import ExternalActorSerializer
from sentry.models.team import Team
logger = logging.getLogger(__name__)
@region_silo_endpoint
+@extend_schema(tags=["Integrations"])
class ExternalTeamEndpoint(TeamEndpoint, ExternalActorEndpointMixin):
publish_status = {
- "POST": ApiPublishStatus.UNKNOWN,
+ "POST": ApiPublishStatus.PUBLIC,
}
owner = ApiOwner.ENTERPRISE
+ @extend_schema(
+ operation_id="Create an External Team",
+ parameters=[GlobalParams.ORG_ID_OR_SLUG, GlobalParams.TEAM_ID_OR_SLUG],
+ request=ExternalTeamSerializer,
+ responses={
+ 200: ExternalActorSerializer,
+ 201: ExternalActorSerializer,
+ 400: RESPONSE_BAD_REQUEST,
+ 403: RESPONSE_FORBIDDEN,
+ },
+ examples=IntegrationExamples.EXTERNAL_TEAM_CREATE,
+ )
def post(self, request: Request, team: Team) -> Response:
"""
- Create an External Team
- `````````````
-
- :pparam string organization_id_or_slug: the id or slug of the organization the
- team belongs to.
- :pparam string team_id_or_slug: the team_id_or_slug of the team to get.
- :param required string provider: enum("github", "gitlab")
- :param required string external_name: the associated Github/Gitlab team name.
- :param optional string integration_id: the id of the integration if it exists.
- :param string external_id: the associated user ID for this provider
- :auth: required
+ Link a team from an external provider to a Sentry team.
"""
self.assert_has_feature(request, team.organization)
diff --git a/src/sentry/integrations/api/endpoints/external_user_details.py b/src/sentry/integrations/api/endpoints/external_user_details.py
index 3c96ff9b22d40..a1e5294fc3fb4 100644
--- a/src/sentry/integrations/api/endpoints/external_user_details.py
+++ b/src/sentry/integrations/api/endpoints/external_user_details.py
@@ -14,12 +14,13 @@
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.serializers import serialize
from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN, RESPONSE_NO_CONTENT
-from sentry.apidocs.examples.organization_examples import OrganizationExamples
+from sentry.apidocs.examples.integration_examples import IntegrationExamples
from sentry.apidocs.parameters import GlobalParams, OrganizationParams
from sentry.integrations.api.bases.external_actor import (
ExternalActorEndpointMixin,
ExternalUserSerializer,
)
+from sentry.integrations.api.serializers.models.external_actor import ExternalActorSerializer
from sentry.integrations.models.external_actor import ExternalActor
from sentry.models.organization import Organization
@@ -27,7 +28,7 @@
@region_silo_endpoint
-@extend_schema(tags=["Organizations"])
+@extend_schema(tags=["Integrations"])
class ExternalUserDetailsEndpoint(OrganizationEndpoint, ExternalActorEndpointMixin):
publish_status = {
"DELETE": ApiPublishStatus.PUBLIC,
@@ -54,11 +55,11 @@ def convert_args(
parameters=[GlobalParams.ORG_ID_OR_SLUG, OrganizationParams.EXTERNAL_USER_ID],
request=ExternalUserSerializer,
responses={
- 200: ExternalUserSerializer,
+ 200: ExternalActorSerializer,
400: RESPONSE_BAD_REQUEST,
403: RESPONSE_FORBIDDEN,
},
- examples=OrganizationExamples.EXTERNAL_USER_CREATE,
+ examples=IntegrationExamples.EXTERNAL_USER_CREATE,
)
def put(
self, request: Request, organization: Organization, external_user: ExternalActor
@@ -92,7 +93,6 @@ def put(
400: RESPONSE_BAD_REQUEST,
403: RESPONSE_FORBIDDEN,
},
- examples=OrganizationExamples.EXTERNAL_USER_CREATE,
)
def delete(
self, request: Request, organization: Organization, external_user: ExternalActor
diff --git a/src/sentry/integrations/api/endpoints/external_user_index.py b/src/sentry/integrations/api/endpoints/external_user_index.py
index 875ab4b4e5de1..2cd13471abb37 100644
--- a/src/sentry/integrations/api/endpoints/external_user_index.py
+++ b/src/sentry/integrations/api/endpoints/external_user_index.py
@@ -10,20 +10,21 @@
from sentry.api.base import region_silo_endpoint
from sentry.api.bases import OrganizationEndpoint
from sentry.api.serializers import serialize
-from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN, RESPONSE_SUCCESS
-from sentry.apidocs.examples.organization_examples import OrganizationExamples
+from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN
+from sentry.apidocs.examples.integration_examples import IntegrationExamples
from sentry.apidocs.parameters import GlobalParams
from sentry.integrations.api.bases.external_actor import (
ExternalActorEndpointMixin,
ExternalUserSerializer,
)
+from sentry.integrations.api.serializers.models.external_actor import ExternalActorSerializer
from sentry.models.organization import Organization
logger = logging.getLogger(__name__)
@region_silo_endpoint
-@extend_schema(tags=["Organizations"])
+@extend_schema(tags=["Integrations"])
class ExternalUserEndpoint(OrganizationEndpoint, ExternalActorEndpointMixin):
publish_status = {
"POST": ApiPublishStatus.PUBLIC,
@@ -35,16 +36,16 @@ class ExternalUserEndpoint(OrganizationEndpoint, ExternalActorEndpointMixin):
parameters=[GlobalParams.ORG_ID_OR_SLUG],
request=ExternalUserSerializer,
responses={
- 200: RESPONSE_SUCCESS,
- 201: ExternalUserSerializer,
+ 200: ExternalActorSerializer,
+ 201: ExternalActorSerializer,
400: RESPONSE_BAD_REQUEST,
403: RESPONSE_FORBIDDEN,
},
- examples=OrganizationExamples.EXTERNAL_USER_CREATE,
+ examples=IntegrationExamples.EXTERNAL_USER_CREATE,
)
def post(self, request: Request, organization: Organization) -> Response:
"""
- Links a user from an external provider to a Sentry user.
+ Link a user from an external provider to a Sentry user.
"""
self.assert_has_feature(request, organization)
diff --git a/src/sentry/integrations/api/endpoints/organization_code_mapping_codeowners.py b/src/sentry/integrations/api/endpoints/organization_code_mapping_codeowners.py
index 9e2090c9951a9..4edd397b751f2 100644
--- a/src/sentry/integrations/api/endpoints/organization_code_mapping_codeowners.py
+++ b/src/sentry/integrations/api/endpoints/organization_code_mapping_codeowners.py
@@ -8,6 +8,7 @@
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.organization import OrganizationEndpoint, OrganizationIntegrationsPermission
+from sentry.constants import ObjectStatus
from sentry.integrations.models.repository_project_path_config import RepositoryProjectPathConfig
from sentry.integrations.services.integration import integration_service
from sentry.integrations.source_code_management.repository import RepositoryIntegration
@@ -18,7 +19,9 @@ def get_codeowner_contents(config):
if not config.organization_integration_id:
raise NotFound(detail="No associated integration")
- integration = integration_service.get_integration(integration_id=config.integration_id)
+ integration = integration_service.get_integration(
+ integration_id=config.integration_id, status=ObjectStatus.ACTIVE
+ )
if not integration:
return None
install = integration.get_installation(organization_id=config.project.organization_id)
diff --git a/src/sentry/integrations/api/endpoints/organization_integration_details.py b/src/sentry/integrations/api/endpoints/organization_integration_details.py
index 1b7ea1294f93d..830316dd51285 100644
--- a/src/sentry/integrations/api/endpoints/organization_integration_details.py
+++ b/src/sentry/integrations/api/endpoints/organization_integration_details.py
@@ -16,12 +16,12 @@
from sentry.api.base import control_silo_endpoint
from sentry.api.serializers import serialize
from sentry.constants import ObjectStatus
+from sentry.deletions.models.scheduleddeletion import ScheduledDeletion
from sentry.integrations.api.bases.organization_integrations import (
OrganizationIntegrationBaseEndpoint,
)
from sentry.integrations.api.serializers.models.integration import OrganizationIntegrationSerializer
from sentry.integrations.models.organization_integration import OrganizationIntegration
-from sentry.models.scheduledeletion import ScheduledDeletion
from sentry.organizations.services.organization import RpcUserOrganizationContext
from sentry.shared_integrations.exceptions import ApiError, IntegrationError
from sentry.utils.audit import create_audit_entry
diff --git a/src/sentry/integrations/api/endpoints/organization_integrations_index.py b/src/sentry/integrations/api/endpoints/organization_integrations_index.py
index 93979867c2a71..588994d2045d3 100644
--- a/src/sentry/integrations/api/endpoints/organization_integrations_index.py
+++ b/src/sentry/integrations/api/endpoints/organization_integrations_index.py
@@ -21,6 +21,7 @@
OrganizationIntegrationBaseEndpoint,
)
from sentry.integrations.api.serializers.models.integration import OrganizationIntegrationResponse
+from sentry.integrations.base import INTEGRATION_TYPE_TO_PROVIDER, IntegrationDomain
from sentry.integrations.models.integration import Integration
from sentry.integrations.models.organization_integration import OrganizationIntegration
from sentry.organizations.services.organization.model import (
@@ -93,6 +94,7 @@ def get(
if provider_key is None:
provider_key = request.GET.get("provider_key", "")
include_config_raw = request.GET.get("includeConfig")
+ integration_type = request.GET.get("integrationType")
# Include the configurations by default if includeConfig is not present.
# TODO(mgaeta): HACK. We need a consistent way to get booleans from query parameters.
@@ -109,6 +111,16 @@ def get(
if provider_key:
queryset = queryset.filter(integration__provider=provider_key.lower())
+ if integration_type:
+ try:
+ integration_domain = IntegrationDomain(integration_type)
+ except ValueError:
+ return Response({"detail": "Invalid integration type"}, status=400)
+ provider_slugs = [
+ provider for provider in INTEGRATION_TYPE_TO_PROVIDER.get(integration_domain, [])
+ ]
+ queryset = queryset.filter(integration__provider__in=provider_slugs)
+
def on_results(results: Sequence[OrganizationIntegration]) -> Sequence[Mapping[str, Any]]:
if feature_filters:
results = filter_by_features(results, feature_filters)
diff --git a/src/sentry/integrations/api/endpoints/organization_repository_details.py b/src/sentry/integrations/api/endpoints/organization_repository_details.py
index 00a6690be4bd4..2258ad33c3181 100644
--- a/src/sentry/integrations/api/endpoints/organization_repository_details.py
+++ b/src/sentry/integrations/api/endpoints/organization_repository_details.py
@@ -15,11 +15,11 @@
from sentry.api.fields.empty_integer import EmptyIntegerField
from sentry.api.serializers import serialize
from sentry.constants import ObjectStatus
+from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.hybridcloud.rpc import coerce_id_from
from sentry.integrations.services.integration import integration_service
from sentry.models.commit import Commit
from sentry.models.repository import Repository
-from sentry.models.scheduledeletion import RegionScheduledDeletion
from sentry.tasks.repository import repository_cascade_delete_on_hide
@@ -74,7 +74,9 @@ def put(self, request: Request, organization, repo_id) -> Response:
raise NotImplementedError
if result.get("integrationId"):
integration = integration_service.get_integration(
- integration_id=result["integrationId"], organization_id=coerce_id_from(organization)
+ integration_id=result["integrationId"],
+ organization_id=coerce_id_from(organization),
+ status=ObjectStatus.ACTIVE,
)
if integration is None:
return Response({"detail": "Invalid integration id"}, status=400)
diff --git a/src/sentry/integrations/api/serializers/rest_framework/doc_integration.py b/src/sentry/integrations/api/serializers/rest_framework/doc_integration.py
index f4ea801fedad4..0483ebd7299a1 100644
--- a/src/sentry/integrations/api/serializers/rest_framework/doc_integration.py
+++ b/src/sentry/integrations/api/serializers/rest_framework/doc_integration.py
@@ -8,7 +8,6 @@
from sentry.api.fields.avatar import AvatarField
from sentry.api.helpers.slugs import sentry_slugify
-from sentry.api.serializers.rest_framework.sentry_app import URLField
from sentry.api.validators.doc_integration import validate_metadata_schema
from sentry.integrations.models.doc_integration import DocIntegration
from sentry.integrations.models.integration_feature import (
@@ -16,6 +15,7 @@
IntegrationFeature,
IntegrationTypes,
)
+from sentry.sentry_apps.api.parsers.sentry_app import URLField
class MetadataField(serializers.JSONField):
diff --git a/src/sentry/integrations/base.py b/src/sentry/integrations/base.py
index abd91547629fa..1a37db0858aab 100644
--- a/src/sentry/integrations/base.py
+++ b/src/sentry/integrations/base.py
@@ -4,7 +4,7 @@
import logging
import sys
from collections.abc import Mapping, MutableMapping, Sequence
-from enum import Enum
+from enum import Enum, StrEnum
from functools import cached_property
from typing import TYPE_CHECKING, Any, NamedTuple, NoReturn
@@ -36,6 +36,7 @@
from sentry.shared_integrations.exceptions import (
ApiError,
ApiHostError,
+ ApiInvalidRequestError,
ApiUnauthorized,
IntegrationError,
IntegrationFormError,
@@ -125,6 +126,58 @@ class IntegrationFeatures(Enum):
DEPLOYMENT = "deployment"
+# Integration Types
+class IntegrationDomain(StrEnum):
+ MESSAGING = "messaging"
+ PROJECT_MANAGEMENT = "project_management"
+ SOURCE_CODE_MANAGEMENT = "source_code_management"
+ ON_CALL_SCHEDULING = "on_call_scheduling"
+ IDENTITY = "identity" # for identity pipelines
+
+
+class IntegrationProviderSlug(StrEnum):
+ SLACK = "slack"
+ DISCORD = "discord"
+ MSTeams = "msteams"
+ JIRA = "jira"
+ JIRA_SERVER = "jira_server"
+ AZURE_DEVOPS = "vsts"
+ GITHUB = "github"
+ GITHUB_ENTERPRISE = "github_enterprise"
+ GITLAB = "gitlab"
+ BITBUCKET = "bitbucket"
+ PAGERDUTY = "pagerduty"
+ OPSGENIE = "opsgenie"
+
+
+INTEGRATION_TYPE_TO_PROVIDER = {
+ IntegrationDomain.MESSAGING: [
+ IntegrationProviderSlug.SLACK,
+ IntegrationProviderSlug.DISCORD,
+ IntegrationProviderSlug.MSTeams,
+ ],
+ IntegrationDomain.PROJECT_MANAGEMENT: [
+ IntegrationProviderSlug.JIRA,
+ IntegrationProviderSlug.JIRA_SERVER,
+ IntegrationProviderSlug.GITHUB,
+ IntegrationProviderSlug.GITHUB_ENTERPRISE,
+ IntegrationProviderSlug.GITLAB,
+ IntegrationProviderSlug.AZURE_DEVOPS,
+ ],
+ IntegrationDomain.SOURCE_CODE_MANAGEMENT: [
+ IntegrationProviderSlug.GITHUB,
+ IntegrationProviderSlug.GITHUB_ENTERPRISE,
+ IntegrationProviderSlug.GITLAB,
+ IntegrationProviderSlug.BITBUCKET,
+ IntegrationProviderSlug.AZURE_DEVOPS,
+ ],
+ IntegrationDomain.ON_CALL_SCHEDULING: [
+ IntegrationProviderSlug.PAGERDUTY,
+ IntegrationProviderSlug.OPSGENIE,
+ ],
+}
+
+
class IntegrationProvider(PipelineProvider, abc.ABC):
"""
An integration provider describes a third party that can be registered within Sentry.
@@ -373,7 +426,7 @@ def get_client(self) -> Any:
"""
raise NotImplementedError
- def get_keyring_client(self, keyid: str) -> Any:
+ def get_keyring_client(self, keyid: int | str) -> Any:
"""
Return an API client with a scoped key based on the key_name.
@@ -431,7 +484,7 @@ def raise_error(self, exc: Exception, identity: Identity | None = None) -> NoRet
raise InvalidIdentity(self.message_from_error(exc), identity=identity).with_traceback(
sys.exc_info()[2]
)
- elif isinstance(exc, ApiError):
+ elif isinstance(exc, ApiInvalidRequestError):
if exc.json:
error_fields = self.error_fields_from_json(exc.json)
if error_fields is not None:
diff --git a/src/sentry/integrations/bitbucket/client.py b/src/sentry/integrations/bitbucket/client.py
index 2b5682abaa84e..897e309ab7b30 100644
--- a/src/sentry/integrations/bitbucket/client.py
+++ b/src/sentry/integrations/bitbucket/client.py
@@ -11,7 +11,7 @@
from sentry.integrations.client import ApiClient
from sentry.integrations.services.integration.model import RpcIntegration
from sentry.integrations.source_code_management.repository import RepositoryClient
-from sentry.integrations.utils import get_query_hash
+from sentry.integrations.utils.atlassian_connect import get_query_hash
from sentry.models.repository import Repository
from sentry.shared_integrations.client.base import BaseApiResponseX
from sentry.utils import jwt
diff --git a/src/sentry/integrations/bitbucket/integration.py b/src/sentry/integrations/bitbucket/integration.py
index 1e12015d59f81..f3d05641111f0 100644
--- a/src/sentry/integrations/bitbucket/integration.py
+++ b/src/sentry/integrations/bitbucket/integration.py
@@ -10,6 +10,7 @@
from sentry.identity.pipeline import IdentityProviderPipeline
from sentry.integrations.base import (
FeatureDescription,
+ IntegrationDomain,
IntegrationFeatures,
IntegrationMetadata,
IntegrationProvider,
@@ -18,7 +19,14 @@
from sentry.integrations.services.repository import RpcRepository, repository_service
from sentry.integrations.source_code_management.repository import RepositoryIntegration
from sentry.integrations.tasks.migrate_repo import migrate_repo
-from sentry.integrations.utils import AtlassianConnectValidationError, get_integration_from_request
+from sentry.integrations.utils.atlassian_connect import (
+ AtlassianConnectValidationError,
+ get_integration_from_request,
+)
+from sentry.integrations.utils.metrics import (
+ IntegrationPipelineViewEvent,
+ IntegrationPipelineViewType,
+)
from sentry.models.repository import Repository
from sentry.organizations.services.organization import RpcOrganizationSummary
from sentry.pipeline import NestedPipelineView, PipelineView
@@ -251,9 +259,18 @@ def setup(self):
class VerifyInstallation(PipelineView):
def dispatch(self, request: Request, pipeline) -> Response:
- try:
- integration = get_integration_from_request(request, BitbucketIntegrationProvider.key)
- except AtlassianConnectValidationError:
- return pipeline.error("Unable to verify installation.")
- pipeline.bind_state("external_id", integration.external_id)
- return pipeline.next_step()
+ with IntegrationPipelineViewEvent(
+ IntegrationPipelineViewType.VERIFY_INSTALLATION,
+ IntegrationDomain.SOURCE_CODE_MANAGEMENT,
+ BitbucketIntegrationProvider.key,
+ ).capture() as lifecycle:
+ try:
+ integration = get_integration_from_request(
+ request, BitbucketIntegrationProvider.key
+ )
+ except AtlassianConnectValidationError as e:
+ lifecycle.record_failure({"failure_reason": str(e)})
+ return pipeline.error("Unable to verify installation.")
+
+ pipeline.bind_state("external_id", integration.external_id)
+ return pipeline.next_step()
diff --git a/src/sentry/integrations/bitbucket/uninstalled.py b/src/sentry/integrations/bitbucket/uninstalled.py
index 2c336f64c026d..6b660b68d9df4 100644
--- a/src/sentry/integrations/bitbucket/uninstalled.py
+++ b/src/sentry/integrations/bitbucket/uninstalled.py
@@ -9,7 +9,10 @@
from sentry.integrations.models.integration import Integration
from sentry.integrations.services.integration import integration_service
from sentry.integrations.services.repository import repository_service
-from sentry.integrations.utils import AtlassianConnectValidationError, get_integration_from_jwt
+from sentry.integrations.utils.atlassian_connect import (
+ AtlassianConnectValidationError,
+ get_integration_from_jwt,
+)
@control_silo_endpoint
diff --git a/src/sentry/integrations/bitbucket_server/integration.py b/src/sentry/integrations/bitbucket_server/integration.py
index 01ab5b7ecadd6..04d5b7e5ea06b 100644
--- a/src/sentry/integrations/bitbucket_server/integration.py
+++ b/src/sentry/integrations/bitbucket_server/integration.py
@@ -1,6 +1,5 @@
from __future__ import annotations
-import logging
from typing import Any
from urllib.parse import urlparse
@@ -16,6 +15,7 @@
from sentry.integrations.base import (
FeatureDescription,
+ IntegrationDomain,
IntegrationFeatureNotImplementedError,
IntegrationFeatures,
IntegrationMetadata,
@@ -26,6 +26,10 @@
from sentry.integrations.services.repository.model import RpcRepository
from sentry.integrations.source_code_management.repository import RepositoryIntegration
from sentry.integrations.tasks.migrate_repo import migrate_repo
+from sentry.integrations.utils.metrics import (
+ IntegrationPipelineViewEvent,
+ IntegrationPipelineViewType,
+)
from sentry.models.repository import Repository
from sentry.organizations.services.organization import RpcOrganizationSummary
from sentry.pipeline import PipelineView
@@ -36,8 +40,6 @@
from .client import BitbucketServerClient, BitbucketServerSetupClient
from .repository import BitbucketServerRepositoryProvider
-logger = logging.getLogger("sentry.integrations.bitbucket_server")
-
DESCRIPTION = """
Connect your Sentry organization to Bitbucket Server, enabling the following features:
"""
@@ -164,37 +166,38 @@ class OAuthLoginView(PipelineView):
@method_decorator(csrf_exempt)
def dispatch(self, request: Request, pipeline) -> HttpResponse:
- if "oauth_token" in request.GET:
- return pipeline.next_step()
-
- config = pipeline.fetch_state("installation_data")
- client = BitbucketServerSetupClient(
- config.get("url"),
- config.get("consumer_key"),
- config.get("private_key"),
- config.get("verify_ssl"),
- )
+ with IntegrationPipelineViewEvent(
+ IntegrationPipelineViewType.OAUTH_LOGIN,
+ IntegrationDomain.SOURCE_CODE_MANAGEMENT,
+ BitbucketServerIntegrationProvider.key,
+ ).capture() as lifecycle:
+ if "oauth_token" in request.GET:
+ return pipeline.next_step()
- try:
- request_token = client.get_request_token()
- except ApiError as error:
- logger.info(
- "identity.bitbucket-server.request-token",
- extra={"url": config.get("url"), "error": error},
+ config = pipeline.fetch_state("installation_data")
+ client = BitbucketServerSetupClient(
+ config.get("url"),
+ config.get("consumer_key"),
+ config.get("private_key"),
+ config.get("verify_ssl"),
)
- return pipeline.error(f"Could not fetch a request token from Bitbucket. {error}")
- pipeline.bind_state("request_token", request_token)
- if not request_token.get("oauth_token"):
- logger.info(
- "identity.bitbucket-server.oauth-token",
- extra={"url": config.get("url")},
- )
- return pipeline.error("Missing oauth_token")
+ try:
+ request_token = client.get_request_token()
+ except ApiError as error:
+ lifecycle.record_failure({"failure_reason": str(error), "url": config.get("url")})
+ return pipeline.error(f"Could not fetch a request token from Bitbucket. {error}")
- authorize_url = client.get_authorize_url(request_token)
+ pipeline.bind_state("request_token", request_token)
+ if not request_token.get("oauth_token"):
+ lifecycle.record_failure(
+ {"failure_reason": "missing oauth_token", "url": config.get("url")}
+ )
+ return pipeline.error("Missing oauth_token")
- return self.redirect(authorize_url)
+ authorize_url = client.get_authorize_url(request_token)
+
+ return self.redirect(authorize_url)
class OAuthCallbackView(PipelineView):
@@ -205,25 +208,32 @@ class OAuthCallbackView(PipelineView):
@method_decorator(csrf_exempt)
def dispatch(self, request: Request, pipeline) -> HttpResponse:
- config = pipeline.fetch_state("installation_data")
- client = BitbucketServerSetupClient(
- config.get("url"),
- config.get("consumer_key"),
- config.get("private_key"),
- config.get("verify_ssl"),
- )
-
- try:
- access_token = client.get_access_token(
- pipeline.fetch_state("request_token"), request.GET["oauth_token"]
+ with IntegrationPipelineViewEvent(
+ IntegrationPipelineViewType.OAUTH_CALLBACK,
+ IntegrationDomain.SOURCE_CODE_MANAGEMENT,
+ BitbucketServerIntegrationProvider.key,
+ ).capture() as lifecycle:
+ config = pipeline.fetch_state("installation_data")
+ client = BitbucketServerSetupClient(
+ config.get("url"),
+ config.get("consumer_key"),
+ config.get("private_key"),
+ config.get("verify_ssl"),
)
- pipeline.bind_state("access_token", access_token)
+ try:
+ access_token = client.get_access_token(
+ pipeline.fetch_state("request_token"), request.GET["oauth_token"]
+ )
+
+ pipeline.bind_state("access_token", access_token)
- return pipeline.next_step()
- except ApiError as error:
- logger.info("identity.bitbucket-server.access-token", extra={"error": error})
- return pipeline.error(f"Could not fetch an access token from Bitbucket. {str(error)}")
+ return pipeline.next_step()
+ except ApiError as error:
+ lifecycle.record_failure({"failure_reason": str(error)})
+ return pipeline.error(
+ f"Could not fetch an access token from Bitbucket. {str(error)}"
+ )
class BitbucketServerIntegration(RepositoryIntegration):
diff --git a/src/sentry/integrations/discord/actions/issue_alert/form.py b/src/sentry/integrations/discord/actions/issue_alert/form.py
index 1fa0644e4995d..62aafe5d877f3 100644
--- a/src/sentry/integrations/discord/actions/issue_alert/form.py
+++ b/src/sentry/integrations/discord/actions/issue_alert/form.py
@@ -6,6 +6,7 @@
from django.core.exceptions import ValidationError
from django.forms.fields import ChoiceField
+from sentry.constants import ObjectStatus
from sentry.integrations.discord.utils.channel import validate_channel_id
from sentry.integrations.discord.utils.channel_from_url import get_channel_id_from_url
from sentry.integrations.services.integration import integration_service
@@ -36,7 +37,9 @@ def clean(self) -> dict[str, object] | None:
cleaned_data: dict[str, object] = super().clean() or {}
channel_id = cleaned_data.get("channel_id")
server = cleaned_data.get("server")
- integration = integration_service.get_integration(integration_id=server)
+ integration = integration_service.get_integration(
+ integration_id=server, status=ObjectStatus.ACTIVE
+ )
if not server or not integration:
raise forms.ValidationError(
diff --git a/src/sentry/integrations/discord/integration.py b/src/sentry/integrations/discord/integration.py
index f44ef2404a09f..e99a619c2350b 100644
--- a/src/sentry/integrations/discord/integration.py
+++ b/src/sentry/integrations/discord/integration.py
@@ -148,6 +148,12 @@ def get_pipeline_views(self) -> Sequence[PipelineView]:
def build_integration(self, state: Mapping[str, object]) -> Mapping[str, object]:
guild_id = str(state.get("guild_id"))
+
+ if not guild_id.isdigit():
+ raise IntegrationError(
+ "Invalid guild ID. The Discord guild ID must be entirely numeric."
+ )
+
try:
guild_name = self.client.get_guild_name(guild_id=guild_id)
except (ApiError, AttributeError):
diff --git a/src/sentry/integrations/discord/requests/base.py b/src/sentry/integrations/discord/requests/base.py
index 930d030407332..f22c438e98e90 100644
--- a/src/sentry/integrations/discord/requests/base.py
+++ b/src/sentry/integrations/discord/requests/base.py
@@ -9,6 +9,7 @@
from rest_framework.request import Request
from sentry import options
+from sentry.constants import ObjectStatus
from sentry.identity.services.identity import RpcIdentityProvider
from sentry.identity.services.identity.model import RpcIdentity
from sentry.identity.services.identity.service import identity_service
@@ -224,7 +225,7 @@ def get_identity_str(self) -> str | None:
def validate_integration(self) -> None:
if not self._integration:
self._integration = integration_service.get_integration(
- provider="discord", external_id=self.guild_id
+ provider="discord", external_id=self.guild_id, status=ObjectStatus.ACTIVE
)
self._info("discord.validate.integration")
diff --git a/src/sentry/integrations/discord/webhooks/command.py b/src/sentry/integrations/discord/webhooks/command.py
index 5f41165998710..471b7bdcbead1 100644
--- a/src/sentry/integrations/discord/webhooks/command.py
+++ b/src/sentry/integrations/discord/webhooks/command.py
@@ -1,10 +1,22 @@
+from collections.abc import Callable, Iterable
+from dataclasses import dataclass
+
from rest_framework.response import Response
+from sentry.integrations.discord.requests.base import DiscordRequest
+from sentry.integrations.discord.spec import DiscordMessagingSpec
+from sentry.integrations.discord.utils import logger
from sentry.integrations.discord.views.link_identity import build_linking_url
from sentry.integrations.discord.views.unlink_identity import build_unlinking_url
from sentry.integrations.discord.webhooks.handler import DiscordInteractionHandler
-
-from ..utils import logger
+from sentry.integrations.messaging import commands
+from sentry.integrations.messaging.commands import (
+ CommandInput,
+ CommandNotMatchedError,
+ MessagingIntegrationCommand,
+ MessagingIntegrationCommandDispatcher,
+)
+from sentry.integrations.messaging.spec import MessagingIntegrationSpec
LINK_USER_MESSAGE = "[Click here]({url}) to link your Discord account to your Sentry account."
ALREADY_LINKED_MESSAGE = "You are already linked to the Sentry account with email: `{email}`."
@@ -22,12 +34,6 @@
"""
-class DiscordCommandNames:
- LINK = "link"
- UNLINK = "unlink"
- HELP = "help"
-
-
class DiscordCommandHandler(DiscordInteractionHandler):
"""
Handles logic for Discord Command interactions.
@@ -37,25 +43,39 @@ class DiscordCommandHandler(DiscordInteractionHandler):
def handle(self) -> Response:
command_name = self.request.get_command_name()
- logging_data = self.request.logging_data
+ cmd_input = CommandInput(command_name)
+ dispatcher = DiscordCommandDispatcher(self.request)
+ try:
+ message = dispatcher.dispatch(cmd_input)
+ except CommandNotMatchedError:
+ logger.warning(
+ "discord.interaction.command.unknown",
+ extra={"command": command_name, **self.request.logging_data},
+ )
+ message = dispatcher.help(cmd_input)
- if command_name == DiscordCommandNames.LINK:
- return self.link_user()
- elif command_name == DiscordCommandNames.UNLINK:
- return self.unlink_user()
- elif command_name == DiscordCommandNames.HELP:
- return self.help()
+ return self.send_message(message)
- logger.warning(
- "discord.interaction.command.unknown", extra={"command": command_name, **logging_data}
- )
- return self.help()
- def link_user(self) -> Response:
+@dataclass(frozen=True)
+class DiscordCommandDispatcher(MessagingIntegrationCommandDispatcher[str]):
+ request: DiscordRequest
+
+ @property
+ def integration_spec(self) -> MessagingIntegrationSpec:
+ return DiscordMessagingSpec()
+
+ @property
+ def command_handlers(
+ self,
+ ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], str]]]:
+ yield commands.HELP, self.help
+ yield commands.LINK_IDENTITY, self.link_user
+ yield commands.UNLINK_IDENTITY, self.unlink_user
+
+ def link_user(self, _: CommandInput) -> str:
if self.request.has_identity():
- return self.send_message(
- ALREADY_LINKED_MESSAGE.format(email=self.request.get_identity_str())
- )
+ return ALREADY_LINKED_MESSAGE.format(email=self.request.get_identity_str())
if not self.request.integration or not self.request.user_id:
logger.warning(
@@ -65,18 +85,18 @@ def link_user(self) -> Response:
"hasUserId": self.request.user_id,
},
)
- return self.send_message(MISSING_DATA_MESSAGE)
+ return MISSING_DATA_MESSAGE
link_url = build_linking_url(
integration=self.request.integration,
discord_id=self.request.user_id,
)
- return self.send_message(LINK_USER_MESSAGE.format(url=link_url))
+ return LINK_USER_MESSAGE.format(url=link_url)
- def unlink_user(self) -> Response:
+ def unlink_user(self, _: CommandInput) -> str:
if not self.request.has_identity():
- return self.send_message(NOT_LINKED_MESSAGE)
+ return NOT_LINKED_MESSAGE
# if self.request.has_identity() then these must not be None
assert self.request.integration is not None
@@ -87,7 +107,7 @@ def unlink_user(self) -> Response:
discord_id=self.request.user_id,
)
- return self.send_message(UNLINK_USER_MESSAGE.format(url=unlink_url))
+ return UNLINK_USER_MESSAGE.format(url=unlink_url)
- def help(self) -> Response:
- return self.send_message(HELP_MESSAGE)
+ def help(self, _: CommandInput) -> str:
+ return HELP_MESSAGE
diff --git a/src/sentry/integrations/discord/webhooks/message_component.py b/src/sentry/integrations/discord/webhooks/message_component.py
index de92003824861..39d58be7d61ae 100644
--- a/src/sentry/integrations/discord/webhooks/message_component.py
+++ b/src/sentry/integrations/discord/webhooks/message_component.py
@@ -18,7 +18,12 @@
)
from sentry.integrations.discord.message_builder.base.flags import DiscordMessageFlags
from sentry.integrations.discord.requests.base import DiscordRequest
+from sentry.integrations.discord.spec import DiscordMessagingSpec
from sentry.integrations.discord.webhooks.handler import DiscordInteractionHandler
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.models.activity import ActivityIntegration
from sentry.models.group import Group
from sentry.models.grouphistory import STATUS_TO_STRING_LOOKUP, GroupHistoryStatus
@@ -85,36 +90,51 @@ def handle(self) -> Response:
)
return self.send_message(NOT_IN_ORG)
+ def record_event(interaction_type: MessagingInteractionType) -> MessagingInteractionEvent:
+ return MessagingInteractionEvent(
+ interaction_type,
+ DiscordMessagingSpec(),
+ user=self.user,
+ organization=(self.group.organization if self.group else None),
+ )
+
if self.custom_id.startswith(CustomIds.ASSIGN_DIALOG):
logger.info("discord.interaction.component.assign_dialog", extra={**logging_data})
- return self.assign_dialog()
+ with record_event(MessagingInteractionType.ASSIGN_DIALOG).capture():
+ return self.assign_dialog()
elif self.custom_id.startswith(CustomIds.ASSIGN):
logger.info(
"discord.interaction.component.assign",
extra={**logging_data, "assign_to": self.request.get_selected_options()[0]},
)
- return self.assign()
+ with record_event(MessagingInteractionType.ASSIGN).capture():
+ return self.assign()
elif self.custom_id.startswith(CustomIds.RESOLVE_DIALOG):
logger.info("discord.interaction.component.resolve_dialog", extra={**logging_data})
- return self.resolve_dialog()
+ with record_event(MessagingInteractionType.RESOLVE_DIALOG).capture():
+ return self.resolve_dialog()
elif self.custom_id.startswith(CustomIds.RESOLVE):
logger.info("discord.interaction.component.resolve", extra={**logging_data})
- return self.resolve()
+ with record_event(MessagingInteractionType.RESOLVE).capture():
+ return self.resolve()
elif self.custom_id.startswith(CustomIds.UNRESOLVE):
logger.info("discord.interaction.component.unresolve", extra={**logging_data})
- return self.unresolve()
+ with record_event(MessagingInteractionType.UNRESOLVE).capture():
+ return self.unresolve()
elif self.custom_id.startswith(CustomIds.MARK_ONGOING):
logger.info("discord.interaction.component.mark_ongoing", extra={**logging_data})
- return self.unresolve(from_mark_ongoing=True)
+ with record_event(MessagingInteractionType.MARK_ONGOING).capture():
+ return self.unresolve(from_mark_ongoing=True)
elif self.custom_id.startswith(CustomIds.ARCHIVE):
logger.info("discord.interaction.component.archive", extra={**logging_data})
- return self.archive()
+ with record_event(MessagingInteractionType.ARCHIVE).capture():
+ return self.archive()
logger.warning("discord.interaction.component.unknown_custom_id", extra={**logging_data})
return self.send_message(INVALID_GROUP_ID)
diff --git a/src/sentry/integrations/github/client.py b/src/sentry/integrations/github/client.py
index 29a8a55b0027e..98c189d50bbbf 100644
--- a/src/sentry/integrations/github/client.py
+++ b/src/sentry/integrations/github/client.py
@@ -554,7 +554,7 @@ def get_with_pagination(
with sentry_sdk.start_span(
op=f"{self.integration_type}.http.pagination",
- description=f"{self.integration_type}.http_response.pagination.{self.name}",
+ name=f"{self.integration_type}.http_response.pagination.{self.name}",
):
output = []
diff --git a/src/sentry/integrations/github/integration.py b/src/sentry/integrations/github/integration.py
index f1f79cfb102eb..8da637d0cde63 100644
--- a/src/sentry/integrations/github/integration.py
+++ b/src/sentry/integrations/github/integration.py
@@ -3,6 +3,7 @@
import logging
import re
from collections.abc import Mapping, Sequence
+from enum import StrEnum
from typing import Any
from urllib.parse import parse_qsl
@@ -18,6 +19,7 @@
from sentry.identity.github import GitHubIdentityProvider, get_user_info
from sentry.integrations.base import (
FeatureDescription,
+ IntegrationDomain,
IntegrationFeatures,
IntegrationMetadata,
IntegrationProvider,
@@ -31,6 +33,10 @@
from sentry.integrations.source_code_management.repository import RepositoryIntegration
from sentry.integrations.tasks.migrate_repo import migrate_repo
from sentry.integrations.utils.code_mapping import RepoTree
+from sentry.integrations.utils.metrics import (
+ IntegrationPipelineViewEvent,
+ IntegrationPipelineViewType,
+)
from sentry.models.repository import Repository
from sentry.organizations.absolute_url import generate_organization_url
from sentry.organizations.services.organization import RpcOrganizationSummary, organization_service
@@ -399,57 +405,93 @@ def setup(self) -> None:
)
-class OAuthLoginView(PipelineView):
- def dispatch(self, request: Request, pipeline) -> HttpResponseBase:
- self.determine_active_organization(request)
+class GitHubInstallationError(StrEnum):
+ INVALID_STATE = "Invalid state"
+ MISSING_TOKEN = "Missing access token"
+ MISSING_LOGIN = "Missing login info"
+ PENDING_DELETION = "GitHub installation pending deletion."
+ INSTALLATION_EXISTS = "Github installed on another Sentry organization."
+ USER_MISMATCH = "Authenticated user is not the same as who installed the app."
+ MISSING_INTEGRATION = "Integration does not exist."
- ghip = GitHubIdentityProvider()
- github_client_id = ghip.get_oauth_client_id()
- github_client_secret = ghip.get_oauth_client_secret()
- installation_id = request.GET.get("installation_id")
- if installation_id:
- pipeline.bind_state("installation_id", installation_id)
+def record_event(event: IntegrationPipelineViewType):
+ return IntegrationPipelineViewEvent(
+ event, IntegrationDomain.SOURCE_CODE_MANAGEMENT, GitHubIntegrationProvider.key
+ )
- if not request.GET.get("state"):
- state = pipeline.signature
- redirect_uri = absolute_uri(
- reverse("sentry-extension-setup", kwargs={"provider_id": "github"})
- )
- return self.redirect(
- f"{ghip.get_oauth_authorize_url()}?client_id={github_client_id}&state={state}&redirect_uri={redirect_uri}"
+class OAuthLoginView(PipelineView):
+ def dispatch(self, request: Request, pipeline) -> HttpResponseBase:
+ with record_event(IntegrationPipelineViewType.OAUTH_LOGIN).capture() as lifecycle:
+ self.determine_active_organization(request)
+ lifecycle.add_extra(
+ "organization_id",
+ self.active_organization.organization.id if self.active_organization else None,
)
- # At this point, we are past the GitHub "authorize" step
- if request.GET.get("state") != pipeline.signature:
- return error(request, self.active_organization, error_short="Invalid state")
-
- # similar to OAuth2CallbackView.get_token_params
- data = {
- "code": request.GET.get("code"),
- "client_id": github_client_id,
- "client_secret": github_client_secret,
- }
-
- # similar to OAuth2CallbackView.exchange_token
- req = safe_urlopen(url=ghip.get_oauth_access_token_url(), data=data)
-
- try:
- body = safe_urlread(req).decode("utf-8")
- payload = dict(parse_qsl(body))
- except Exception:
- payload = {}
-
- if "access_token" not in payload:
- return error(request, self.active_organization, error_short="Missing access token")
-
- authenticated_user_info = get_user_info(payload["access_token"])
- if "login" not in authenticated_user_info:
- return error(request, self.active_organization, error_short="Missing login info")
+ ghip = GitHubIdentityProvider()
+ github_client_id = ghip.get_oauth_client_id()
+ github_client_secret = ghip.get_oauth_client_secret()
+
+ installation_id = request.GET.get("installation_id")
+ if installation_id:
+ pipeline.bind_state("installation_id", installation_id)
+
+ if not request.GET.get("state"):
+ state = pipeline.signature
+
+ redirect_uri = absolute_uri(
+ reverse("sentry-extension-setup", kwargs={"provider_id": "github"})
+ )
+ return self.redirect(
+ f"{ghip.get_oauth_authorize_url()}?client_id={github_client_id}&state={state}&redirect_uri={redirect_uri}"
+ )
+
+ # At this point, we are past the GitHub "authorize" step
+ if request.GET.get("state") != pipeline.signature:
+ lifecycle.record_failure({"failure_reason": GitHubInstallationError.INVALID_STATE})
+ return error(
+ request,
+ self.active_organization,
+ error_short=GitHubInstallationError.INVALID_STATE,
+ )
+
+ # similar to OAuth2CallbackView.get_token_params
+ data = {
+ "code": request.GET.get("code"),
+ "client_id": github_client_id,
+ "client_secret": github_client_secret,
+ }
- pipeline.bind_state("github_authenticated_user", authenticated_user_info["login"])
- return pipeline.next_step()
+ # similar to OAuth2CallbackView.exchange_token
+ req = safe_urlopen(url=ghip.get_oauth_access_token_url(), data=data)
+
+ try:
+ body = safe_urlread(req).decode("utf-8")
+ payload = dict(parse_qsl(body))
+ except Exception:
+ payload = {}
+
+ if "access_token" not in payload:
+ lifecycle.record_failure({"failure_reason": GitHubInstallationError.MISSING_TOKEN})
+ return error(
+ request,
+ self.active_organization,
+ error_short=GitHubInstallationError.MISSING_TOKEN,
+ )
+
+ authenticated_user_info = get_user_info(payload["access_token"])
+ if "login" not in authenticated_user_info:
+ lifecycle.record_failure({"failure_reason": GitHubInstallationError.MISSING_LOGIN})
+ return error(
+ request,
+ self.active_organization,
+ error_short=GitHubInstallationError.MISSING_LOGIN,
+ )
+
+ pipeline.bind_state("github_authenticated_user", authenticated_user_info["login"])
+ return pipeline.next_step()
class GitHubInstallation(PipelineView):
@@ -458,67 +500,82 @@ def get_app_url(self) -> str:
return f"https://github.com/apps/{slugify(name)}"
def dispatch(self, request: Request, pipeline: Pipeline) -> HttpResponseBase:
- installation_id = request.GET.get(
- "installation_id", pipeline.fetch_state("installation_id")
- )
- if installation_id is None:
- return self.redirect(self.get_app_url())
-
- pipeline.bind_state("installation_id", installation_id)
- self.determine_active_organization(request)
-
- integration_pending_deletion_exists = False
- if self.active_organization:
- # We want to wait until the scheduled deletions finish or else the
- # post install to migrate repos do not work.
- integration_pending_deletion_exists = OrganizationIntegration.objects.filter(
- integration__provider=GitHubIntegrationProvider.key,
- organization_id=self.active_organization.organization.id,
- status=ObjectStatus.PENDING_DELETION,
- ).exists()
-
- if integration_pending_deletion_exists:
- return error(
- request,
- self.active_organization,
- error_short="GitHub installation pending deletion.",
- error_long=ERR_INTEGRATION_PENDING_DELETION,
+ with record_event(IntegrationPipelineViewType.GITHUB_INSTALLATION).capture() as lifecycle:
+ installation_id = request.GET.get(
+ "installation_id", pipeline.fetch_state("installation_id")
)
+ if installation_id is None:
+ return self.redirect(self.get_app_url())
- try:
- # We want to limit GitHub integrations to 1 organization
- installations_exist = OrganizationIntegration.objects.filter(
- integration=Integration.objects.get(external_id=installation_id)
- ).exists()
-
- except Integration.DoesNotExist:
- return pipeline.next_step()
-
- if installations_exist:
- return error(
- request,
- self.active_organization,
- error_short="Github installed on another Sentry organization.",
- error_long=ERR_INTEGRATION_EXISTS_ON_ANOTHER_ORG,
+ pipeline.bind_state("installation_id", installation_id)
+ self.determine_active_organization(request)
+ lifecycle.add_extra(
+ "organization_id",
+ self.active_organization.organization.id if self.active_organization else None,
)
- # OrganizationIntegration does not exist, but Integration does exist.
- try:
- integration = Integration.objects.get(
- external_id=installation_id, status=ObjectStatus.ACTIVE
- )
- except Integration.DoesNotExist:
- return error(request, self.active_organization)
-
- # Check that the authenticated GitHub user is the same as who installed the app.
- if (
- pipeline.fetch_state("github_authenticated_user")
- != integration.metadata["sender"]["login"]
- ):
- return error(
- request,
- self.active_organization,
- error_short="Authenticated user is not the same as who installed the app",
- )
+ integration_pending_deletion_exists = False
+ if self.active_organization:
+ # We want to wait until the scheduled deletions finish or else the
+ # post install to migrate repos do not work.
+ integration_pending_deletion_exists = OrganizationIntegration.objects.filter(
+ integration__provider=GitHubIntegrationProvider.key,
+ organization_id=self.active_organization.organization.id,
+ status=ObjectStatus.PENDING_DELETION,
+ ).exists()
+
+ if integration_pending_deletion_exists:
+ lifecycle.record_failure(
+ {"failure_reason": GitHubInstallationError.PENDING_DELETION}
+ )
+ return error(
+ request,
+ self.active_organization,
+ error_short=GitHubInstallationError.PENDING_DELETION,
+ error_long=ERR_INTEGRATION_PENDING_DELETION,
+ )
+
+ try:
+ # We want to limit GitHub integrations to 1 organization
+ installations_exist = OrganizationIntegration.objects.filter(
+ integration=Integration.objects.get(external_id=installation_id)
+ ).exists()
+
+ except Integration.DoesNotExist:
+ return pipeline.next_step()
+
+ if installations_exist:
+ lifecycle.record_failure(
+ {"failure_reason": GitHubInstallationError.INSTALLATION_EXISTS}
+ )
+ return error(
+ request,
+ self.active_organization,
+ error_short=GitHubInstallationError.INSTALLATION_EXISTS,
+ error_long=ERR_INTEGRATION_EXISTS_ON_ANOTHER_ORG,
+ )
+
+ # OrganizationIntegration does not exist, but Integration does exist.
+ try:
+ integration = Integration.objects.get(
+ external_id=installation_id, status=ObjectStatus.ACTIVE
+ )
+ except Integration.DoesNotExist:
+ lifecycle.record_failure(
+ {"failure_reason": GitHubInstallationError.MISSING_INTEGRATION}
+ )
+ return error(request, self.active_organization)
+
+ # Check that the authenticated GitHub user is the same as who installed the app.
+ if (
+ pipeline.fetch_state("github_authenticated_user")
+ != integration.metadata["sender"]["login"]
+ ):
+ lifecycle.record_failure({"failure_reason": GitHubInstallationError.USER_MISMATCH})
+ return error(
+ request,
+ self.active_organization,
+ error_short=GitHubInstallationError.USER_MISMATCH,
+ )
- return pipeline.next_step()
+ return pipeline.next_step()
diff --git a/src/sentry/integrations/github/tasks/pr_comment.py b/src/sentry/integrations/github/tasks/pr_comment.py
index 3d9c1b91c8290..93536c32b9e96 100644
--- a/src/sentry/integrations/github/tasks/pr_comment.py
+++ b/src/sentry/integrations/github/tasks/pr_comment.py
@@ -9,6 +9,7 @@
from snuba_sdk import Column, Condition, Direction, Entity, Function, Op, OrderBy, Query
from snuba_sdk import Request as SnubaRequest
+from sentry import features
from sentry.constants import ObjectStatus
from sentry.integrations.github.constants import ISSUE_LOCKED_ERROR_MESSAGE, RATE_LIMITED_MESSAGE
from sentry.integrations.github.tasks.utils import PullRequestIssue
@@ -215,6 +216,22 @@ def github_comment_workflow(pullrequest_id: int, project_id: int):
top_24_issues = issue_list[:24] # 24 is the P99 for issues-per-PR
+ enabled_copilot = features.has("projects:ai-autofix", project) or features.has(
+ "organizations:autofix", organization
+ )
+ github_copilot_actions = (
+ [
+ {
+ "name": f"Root cause #{i + 1}",
+ "type": "copilot-chat",
+ "prompt": f"@sentry root cause issue {str(issue_id)} with PR URL https://github.com/{repo.name}/pull/{str(pr_key)}",
+ }
+ for i, issue_id in enumerate(top_24_issues[:3])
+ ]
+ if enabled_copilot
+ else None
+ )
+
try:
installation.create_or_update_comment(
repo=repo,
@@ -223,6 +240,7 @@ def github_comment_workflow(pullrequest_id: int, project_id: int):
pullrequest_id=pullrequest_id,
issue_list=top_24_issues,
metrics_base=MERGED_PR_METRICS_BASE,
+ github_copilot_actions=github_copilot_actions,
)
except ApiError as e:
cache.delete(cache_key)
diff --git a/src/sentry/integrations/github_enterprise/webhook.py b/src/sentry/integrations/github_enterprise/webhook.py
index 33b700d0aadad..706544f1d9272 100644
--- a/src/sentry/integrations/github_enterprise/webhook.py
+++ b/src/sentry/integrations/github_enterprise/webhook.py
@@ -17,6 +17,7 @@
from sentry import options
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
+from sentry.constants import ObjectStatus
from sentry.integrations.github.webhook import (
InstallationEventWebhook,
PullRequestEventWebhook,
@@ -79,6 +80,7 @@ def get_installation_metadata(event, host):
integration = integration_service.get_integration(
external_id=external_id,
provider="github_enterprise",
+ status=ObjectStatus.ACTIVE,
)
if integration is None:
metrics.incr("integrations.github_enterprise.does_not_exist")
diff --git a/src/sentry/integrations/gitlab/client.py b/src/sentry/integrations/gitlab/client.py
index f56d0ce31fdc7..3b720b16353f6 100644
--- a/src/sentry/integrations/gitlab/client.py
+++ b/src/sentry/integrations/gitlab/client.py
@@ -9,6 +9,7 @@
from requests import PreparedRequest
from sentry.identity.services.identity.model import RpcIdentity
+from sentry.integrations.base import IntegrationFeatureNotImplementedError
from sentry.integrations.gitlab.blame import fetch_file_blames
from sentry.integrations.gitlab.utils import GitLabApiClientPath
from sentry.integrations.source_code_management.commit_context import (
@@ -309,6 +310,9 @@ def get_commit(self, project_id, sha):
"""
return self.get_cached(GitLabApiClientPath.commit.format(project=project_id, sha=sha))
+ def get_merge_commit_sha_from_commit(self, repo: str, sha: str) -> str | None:
+ raise IntegrationFeatureNotImplementedError
+
def compare_commits(self, project_id, start_sha, end_sha):
"""Compare commits between two SHAs
diff --git a/src/sentry/integrations/gitlab/webhooks.py b/src/sentry/integrations/gitlab/webhooks.py
index 063cbe02fa008..ad3a507153d76 100644
--- a/src/sentry/integrations/gitlab/webhooks.py
+++ b/src/sentry/integrations/gitlab/webhooks.py
@@ -168,7 +168,7 @@ def __call__(
authors = {}
- # TODO gitlab only sends a max of 20 commits. If a push contains
+ # TODO: gitlab only sends a max of 20 commits. If a push contains
# more commits they provide a total count and require additional API
# requests to fetch the commit details
for commit in event.get("commits", []):
diff --git a/src/sentry/integrations/jira/client.py b/src/sentry/integrations/jira/client.py
index 1fd6cfa83f1d4..8bf1d6db7e488 100644
--- a/src/sentry/integrations/jira/client.py
+++ b/src/sentry/integrations/jira/client.py
@@ -8,7 +8,7 @@
from sentry.integrations.client import ApiClient
from sentry.integrations.services.integration.model import RpcIntegration
-from sentry.integrations.utils import get_query_hash
+from sentry.integrations.utils.atlassian_connect import get_query_hash
from sentry.shared_integrations.exceptions import ApiError
from sentry.utils import jwt
from sentry.utils.http import absolute_uri
@@ -25,6 +25,7 @@ class JiraCloudClient(ApiClient):
COMMENTS_URL = "/rest/api/2/issue/%s/comment"
COMMENT_URL = "/rest/api/2/issue/%s/comment/%s"
STATUS_URL = "/rest/api/2/status"
+ STATUS_SEARCH_URL = "/rest/api/2/statuses/search"
CREATE_URL = "/rest/api/2/issue"
ISSUE_URL = "/rest/api/2/issue/%s"
META_URL = "/rest/api/2/issue/createmeta"
@@ -224,3 +225,6 @@ def get_field_autocomplete(self, name, value):
return self.get_cached(
self.AUTOCOMPLETE_URL, params={"fieldName": jql_name, "fieldValue": value}
)
+
+ def get_project_statuses(self, project_id: str) -> dict[str, Any]:
+ return dict(self.get_cached(self.STATUS_SEARCH_URL, params={"projectId": project_id}))
diff --git a/src/sentry/integrations/jira/integration.py b/src/sentry/integrations/jira/integration.py
index 599e85abaf2ec..1e054814013e5 100644
--- a/src/sentry/integrations/jira/integration.py
+++ b/src/sentry/integrations/jira/integration.py
@@ -32,6 +32,7 @@
from sentry.shared_integrations.exceptions import (
ApiError,
ApiHostError,
+ ApiRateLimitedError,
ApiUnauthorized,
IntegrationError,
IntegrationFormError,
@@ -41,6 +42,7 @@
from sentry.users.services.user.service import user_service
from sentry.utils.strings import truncatechars
+from ...api.exceptions import ResourceDoesNotExist
from .client import JiraCloudClient
from .models.create_issue_metadata import JIRA_CUSTOM_FIELD_TYPES
from .utils import build_user_choice
@@ -106,10 +108,17 @@
aspects={"externalInstall": external_install},
)
+# Some Jira errors for invalid field values don't actually provide the field
+# ID in an easily mappable way, so we have to manually map known error types
+# here to make it explicit to the user what failed.
+CUSTOM_ERROR_MESSAGE_MATCHERS = [(re.compile("Team with id '.*' not found.$"), "Team Field")]
+
# Hide linked issues fields because we don't have the necessary UI for fully specifying
# a valid link (e.g. "is blocked by ISSUE-1").
HIDDEN_ISSUE_FIELDS = ["issuelinks"]
+JIRA_PROJECT_SIZE_LOGGING_THRESHOLD = 5
+
class JiraIntegration(IssueSyncIntegration):
comment_key = "sync_comments"
@@ -124,7 +133,7 @@ class JiraIntegration(IssueSyncIntegration):
def use_email_scope(cls):
return settings.JIRA_USE_EMAIL_SCOPE
- def get_organization_config(self):
+ def get_organization_config(self) -> dict[str, Any]:
configuration = [
{
"name": self.outbound_status_key,
@@ -140,8 +149,7 @@ def get_organization_config(self):
"items": [], # Populated with projects
},
"mappedSelectors": {
- "on_resolve": {"choices": [], "placeholder": _("Select a status")},
- "on_unresolve": {"choices": [], "placeholder": _("Select a status")},
+ # Populated on a per-project basis below
},
"columnLabels": {
"on_resolve": _("When resolved"),
@@ -149,6 +157,7 @@ def get_organization_config(self):
},
"mappedColumnLabel": _("Jira Project"),
"formatMessageValue": False,
+ "perItemMapping": True,
},
{
"name": self.outbound_assignee_key,
@@ -206,13 +215,59 @@ def get_organization_config(self):
client = self.get_client()
- try:
- statuses = [(c["id"], c["name"]) for c in client.get_valid_statuses()]
- configuration[0]["mappedSelectors"]["on_resolve"]["choices"] = statuses
- configuration[0]["mappedSelectors"]["on_unresolve"]["choices"] = statuses
+ logging_context: dict[str, Any] = {}
+
+ if not self.org_integration:
+ raise ResourceDoesNotExist()
+
+ logging_context["org_integration_id"] = self.org_integration.id
+ logging_context["integration_id"] = self.org_integration.integration_id
+ try:
projects = [{"value": p["id"], "label": p["name"]} for p in client.get_projects_list()]
configuration[0]["addDropdown"]["items"] = projects
+
+ # We need to monitor if we're getting a large volume of requests
+ # with a significant number of projects. Issuing 5 requests or more
+ # per configuration load is something we may need to address via
+ # a bulk query.
+
+ # Jira's API supports querying all available statuses, along with
+ # their project and workflow usages, but this is paginated and may
+ # have many of the same query concerns depending on how many
+ # statuses are defined within the Jira organization.
+
+ logging_context["num_projects"] = len(projects)
+ if len(projects) > JIRA_PROJECT_SIZE_LOGGING_THRESHOLD:
+ logger.info(
+ "excessive_project_status_requests",
+ extra={
+ **logging_context,
+ },
+ )
+ # Each project can have a different set of statuses assignable for
+ # issues, so we need to create per-project mappings.
+ for proj in projects:
+ project_id = proj["value"]
+ project_statuses = client.get_project_statuses(project_id).get("values")
+ if not project_statuses:
+ continue
+
+ statuses_for_project = [(c["id"], c["name"]) for c in project_statuses]
+
+ configuration[0]["mappedSelectors"][project_id] = {
+ "on_resolve": {
+ "choices": statuses_for_project,
+ "placeholder": _("Select a status"),
+ },
+ "on_unresolve": {
+ "choices": statuses_for_project,
+ "placeholder": _("Select a status"),
+ },
+ }
+ except ApiRateLimitedError:
+ logger.warning("config_query_rate_limited", extra={**logging_context})
+ raise
except ApiError:
configuration[0]["disabled"] = True
configuration[0]["disabledReason"] = _(
@@ -485,10 +540,28 @@ def error_message_from_json(self, data):
def error_fields_from_json(self, data):
errors = data.get("errors")
- if not errors:
+ error_messages = data.get("errorMessages")
+
+ if not errors and not error_messages:
+ return None
+
+ error_data = {}
+ if error_messages:
+ # These may or may not contain field specific errors, so we manually
+ # map them
+ for message in error_messages:
+ for error_regex, key in CUSTOM_ERROR_MESSAGE_MATCHERS:
+ if error_regex.match(message):
+ error_data[key] = [message]
+
+ if errors:
+ for key, error in data.get("errors").items():
+ error_data[key] = [error]
+
+ if not error_data:
return None
- return {key: [error] for key, error in data.get("errors").items()}
+ return error_data
def search_url(self, org_slug):
"""
@@ -515,7 +588,12 @@ def build_dynamic_field(self, field_meta, group=None):
elif (
# Assignee and reporter fields
field_meta.get("autoCompleteUrl")
- and (schema.get("items") == "user" or schema["type"] == "user")
+ and (
+ schema.get("items") == "user"
+ or schema["type"] == "user"
+ or schema["type"] == "team"
+ or schema.get("items") == "team"
+ )
# Sprint and "Epic Link" fields
or schema.get("custom")
in (JIRA_CUSTOM_FIELD_TYPES["sprint"], JIRA_CUSTOM_FIELD_TYPES["epic"])
@@ -795,100 +873,6 @@ def get_create_issue_config(self, group: Group | None, user: RpcUser, **kwargs):
return fields
- def _old_clean_and_transform_issue_data(
- self, data: dict[str, Any], issue_type_meta: dict[str, Any]
- ) -> dict[str, Any]:
- """
- Get the (cached) "createmeta" from Jira to use as a "schema". Clean up
- the Jira issue by removing all fields that aren't enumerated by this
- schema. Send this cleaned data to Jira. Finally, make another API call
- to Jira to make sure the issue was created and return basic issue details.
-
- :param data: JiraCreateTicketAction object
- :return: simple object with basic Jira issue details
- """
- client = self.get_client()
- cleaned_data = {}
- user_id_field = client.user_id_field()
-
- fs = issue_type_meta["fields"]
- for field in fs.keys():
- f = fs[field]
- if field == "description":
- cleaned_data[field] = data[field]
- continue
- elif field == "summary":
- cleaned_data["summary"] = data["title"]
- continue
- elif field == "labels" and "labels" in data:
- labels = [label.strip() for label in data["labels"].split(",") if label.strip()]
- cleaned_data["labels"] = labels
- continue
- if field in data.keys():
- v = data.get(field)
- if not v:
- continue
-
- schema = f.get("schema")
- if schema:
- if schema.get("type") == "string" and not schema.get("custom"):
- cleaned_data[field] = v
- continue
- if schema["type"] == "user" or schema.get("items") == "user":
- if schema.get("custom") == JIRA_CUSTOM_FIELD_TYPES.get("multiuserpicker"):
- # custom multi-picker
- v = [{user_id_field: user_id} for user_id in v]
- else:
- v = {user_id_field: v}
- elif schema["type"] == "issuelink": # used by Parent field
- v = {"key": v}
- elif schema.get("custom") == JIRA_CUSTOM_FIELD_TYPES["epic"]:
- v = v
- elif schema.get("custom") == JIRA_CUSTOM_FIELD_TYPES["team"]:
- v = v
- elif schema.get("custom") == JIRA_CUSTOM_FIELD_TYPES["sprint"]:
- try:
- v = int(v)
- except ValueError:
- raise IntegrationError(f"Invalid sprint ({v}) specified")
- elif schema["type"] == "array" and schema.get("items") == "option":
- v = [{"value": vx} for vx in v]
- elif schema["type"] == "array" and schema.get("items") == "string":
- v = [v]
- elif schema["type"] == "array" and schema.get("items") != "string":
- v = [{"id": vx} for vx in v]
- elif schema["type"] == "option":
- v = {"value": v}
- elif schema.get("custom") == JIRA_CUSTOM_FIELD_TYPES.get("textarea"):
- v = v
- elif (
- schema["type"] == "number"
- or schema.get("custom") == JIRA_CUSTOM_FIELD_TYPES["tempo_account"]
- ):
- try:
- if "." in v:
- v = float(v)
- else:
- v = int(v)
- except ValueError:
- pass
- elif (
- schema.get("type") != "string"
- or (schema.get("items") and schema.get("items") != "string")
- or schema.get("custom") == JIRA_CUSTOM_FIELD_TYPES.get("select")
- ):
- v = {"id": v}
- cleaned_data[field] = v
-
- if not (isinstance(cleaned_data["issuetype"], dict) and "id" in cleaned_data["issuetype"]):
- # something fishy is going on with this field, working on some Jira
- # instances, and some not.
- # testing against 5.1.5 and 5.1.4 does not convert (perhaps is no longer included
- # in the projectmeta API call, and would normally be converted in the
- # above clean method.)
- cleaned_data["issuetype"] = {"id": cleaned_data["issuetype"]}
- return cleaned_data
-
def _clean_and_transform_issue_data(
self, issue_metadata: JiraIssueTypeMetadata, data: dict[str, Any]
) -> Any:
@@ -914,12 +898,9 @@ def create_issue(self, data, **kwargs):
raise IntegrationError("Could not fetch issue create configuration from Jira.")
issue_type_meta = self.get_issue_type_meta(data["issuetype"], meta)
- if features.has("organizations:new-jira-transformers", organization=self.organization):
- cleaned_data = self._clean_and_transform_issue_data(
- JiraIssueTypeMetadata.from_dict(issue_type_meta), data
- )
- else:
- cleaned_data = self._old_clean_and_transform_issue_data(data, issue_type_meta)
+ cleaned_data = self._clean_and_transform_issue_data(
+ JiraIssueTypeMetadata.from_dict(issue_type_meta), data
+ )
try:
response = client.create_issue(cleaned_data)
diff --git a/src/sentry/integrations/jira/models/create_issue_metadata.py b/src/sentry/integrations/jira/models/create_issue_metadata.py
index 2817c998c4952..feb5d529da842 100644
--- a/src/sentry/integrations/jira/models/create_issue_metadata.py
+++ b/src/sentry/integrations/jira/models/create_issue_metadata.py
@@ -30,6 +30,9 @@ class JiraSchemaTypes(str, Enum):
team = "team"
number = "number"
json = "json"
+ version = "version"
+ component = "component"
+ priority = "priority"
any = "any"
diff --git a/src/sentry/integrations/jira/utils/api.py b/src/sentry/integrations/jira/utils/api.py
index 99d4d8626db28..cdc0f2b37e6a5 100644
--- a/src/sentry/integrations/jira/utils/api.py
+++ b/src/sentry/integrations/jira/utils/api.py
@@ -9,7 +9,7 @@
from sentry.integrations.services.integration import integration_service
from sentry.integrations.services.integration.model import RpcIntegration
-from sentry.integrations.utils import sync_group_assignee_inbound
+from sentry.integrations.utils.sync import sync_group_assignee_inbound
from sentry.shared_integrations.exceptions import ApiError
from ...mixins.issues import IssueSyncIntegration
diff --git a/src/sentry/integrations/jira/utils/create_issue_schema_transformers.py b/src/sentry/integrations/jira/utils/create_issue_schema_transformers.py
index 9db9d6fd3a62d..7269e1b8774d8 100644
--- a/src/sentry/integrations/jira/utils/create_issue_schema_transformers.py
+++ b/src/sentry/integrations/jira/utils/create_issue_schema_transformers.py
@@ -44,6 +44,9 @@ def get_type_transformer_mappings(user_id_field: str) -> TransformerType:
JiraSchemaTypes.issue_link.value: lambda x: {"key": x},
JiraSchemaTypes.project.value: id_obj_transformer,
JiraSchemaTypes.number.value: parse_number_field,
+ JiraSchemaTypes.priority.value: id_obj_transformer,
+ JiraSchemaTypes.version.value: id_obj_transformer,
+ JiraSchemaTypes.component: id_obj_transformer,
}
return transformers
@@ -51,11 +54,6 @@ def get_type_transformer_mappings(user_id_field: str) -> TransformerType:
def get_custom_field_transformer_mappings() -> TransformerType:
transformers = {
- # TODO(Gabe): `select` type fields are broken in the UI, fix this.
- # JIRA_CUSTOM_FIELD_TYPES["select"]: identity_transformer,
- # TODO(Gabe): `epic` type fields don't currently appear in the issue
- # link dialog. Re-enable this if needed after testing.
- # JIRA_CUSTOM_FIELD_TYPES["epic"]: identity_transformer,
JIRA_CUSTOM_FIELD_TYPES["tempo_account"]: parse_number_field,
JIRA_CUSTOM_FIELD_TYPES["sprint"]: parse_number_field,
JIRA_CUSTOM_FIELD_TYPES["rank"]: id_obj_transformer,
@@ -100,10 +98,10 @@ def transform_fields(
for field in jira_fields:
field_data = data.get(field.key)
- # We don't have a mapping for this field, so it's probably extraneous.
- # TODO(Gabe): Explore raising a sentry issue for unmapped fields in
- # order for us to properly filter them out.
- if field_data is None:
+ # Skip any values that indicate no value should be provided.
+ # We have some older alert templates with "" values, which will raise
+ # if we don't skip them.
+ if field_data is None or field_data == "":
continue
field_transformer = get_transformer_for_field(
@@ -134,7 +132,7 @@ def transform_fields(
except JiraSchemaParseError as e:
raise IntegrationFormError(field_errors={field.name: str(e)}) from e
- if transformed_value:
+ if transformed_value is not None:
transformed_data[field.key] = transformed_value
return transformed_data
diff --git a/src/sentry/integrations/jira/views/sentry_installation.py b/src/sentry/integrations/jira/views/sentry_installation.py
index 653ae21dfd591..04b17b6d9fa99 100644
--- a/src/sentry/integrations/jira/views/sentry_installation.py
+++ b/src/sentry/integrations/jira/views/sentry_installation.py
@@ -3,7 +3,10 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry.integrations.utils import AtlassianConnectValidationError, get_integration_from_request
+from sentry.integrations.utils.atlassian_connect import (
+ AtlassianConnectValidationError,
+ get_integration_from_request,
+)
from sentry.utils.assets import get_asset_url
from sentry.utils.http import absolute_uri
from sentry.utils.signing import sign
diff --git a/src/sentry/integrations/jira/views/sentry_issue_details.py b/src/sentry/integrations/jira/views/sentry_issue_details.py
index dae799b6ba2ee..fb4174b0be631 100644
--- a/src/sentry/integrations/jira/views/sentry_issue_details.py
+++ b/src/sentry/integrations/jira/views/sentry_issue_details.py
@@ -17,7 +17,10 @@
from sentry.api.serializers.models.group_stream import StreamGroupSerializer
from sentry.integrations.models.external_issue import ExternalIssue
from sentry.integrations.services.integration import integration_service
-from sentry.integrations.utils import AtlassianConnectValidationError, get_integration_from_request
+from sentry.integrations.utils.atlassian_connect import (
+ AtlassianConnectValidationError,
+ get_integration_from_request,
+)
from sentry.models.group import Group
from sentry.models.organization import Organization
from sentry.shared_integrations.exceptions import ApiError
diff --git a/src/sentry/integrations/jira/webhooks/installed.py b/src/sentry/integrations/jira/webhooks/installed.py
index 48737929102ee..421ed574fd93c 100644
--- a/src/sentry/integrations/jira/webhooks/installed.py
+++ b/src/sentry/integrations/jira/webhooks/installed.py
@@ -8,7 +8,7 @@
from sentry.api.base import control_silo_endpoint
from sentry.integrations.jira.tasks import sync_metadata
from sentry.integrations.pipeline import ensure_integration
-from sentry.integrations.utils import authenticate_asymmetric_jwt, verify_claims
+from sentry.integrations.utils.atlassian_connect import authenticate_asymmetric_jwt, verify_claims
from sentry.utils import jwt
from ..integration import JiraIntegrationProvider
diff --git a/src/sentry/integrations/jira/webhooks/issue_updated.py b/src/sentry/integrations/jira/webhooks/issue_updated.py
index f2cd4d9b382ad..01b3202da3a78 100644
--- a/src/sentry/integrations/jira/webhooks/issue_updated.py
+++ b/src/sentry/integrations/jira/webhooks/issue_updated.py
@@ -13,7 +13,7 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.integrations.utils import get_integration_from_jwt
+from sentry.integrations.utils.atlassian_connect import get_integration_from_jwt
from sentry.integrations.utils.scope import bind_org_context_from_integration
from sentry.shared_integrations.exceptions import ApiError
diff --git a/src/sentry/integrations/jira/webhooks/uninstalled.py b/src/sentry/integrations/jira/webhooks/uninstalled.py
index 84fc0512d1fa4..f8ac4fd41c406 100644
--- a/src/sentry/integrations/jira/webhooks/uninstalled.py
+++ b/src/sentry/integrations/jira/webhooks/uninstalled.py
@@ -7,7 +7,7 @@
from sentry.api.base import control_silo_endpoint
from sentry.constants import ObjectStatus
from sentry.integrations.models.integration import Integration
-from sentry.integrations.utils import get_integration_from_jwt
+from sentry.integrations.utils.atlassian_connect import get_integration_from_jwt
from sentry.integrations.utils.scope import bind_org_context_from_integration
from .base import JiraWebhookBase
diff --git a/src/sentry/integrations/jira_server/utils/api.py b/src/sentry/integrations/jira_server/utils/api.py
index 8415d9e36371d..c7c6870f1f16a 100644
--- a/src/sentry/integrations/jira_server/utils/api.py
+++ b/src/sentry/integrations/jira_server/utils/api.py
@@ -6,7 +6,7 @@
from sentry.integrations.services.integration.model import RpcIntegration
from sentry.integrations.services.integration.service import integration_service
-from sentry.integrations.utils import sync_group_assignee_inbound
+from sentry.integrations.utils.sync import sync_group_assignee_inbound
if TYPE_CHECKING:
from sentry.integrations.models.integration import Integration
diff --git a/src/sentry/integrations/messaging/commands.py b/src/sentry/integrations/messaging/commands.py
new file mode 100644
index 0000000000000..32968a56e8ef2
--- /dev/null
+++ b/src/sentry/integrations/messaging/commands.py
@@ -0,0 +1,151 @@
+import itertools
+from abc import ABC, abstractmethod
+from collections.abc import Callable, Iterable
+from dataclasses import dataclass
+from typing import Generic, TypeVar
+
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
+from sentry.integrations.messaging.spec import MessagingIntegrationSpec
+
+
+@dataclass(frozen=True, eq=True)
+class CommandInput:
+ cmd_value: str
+ arg_values: tuple[str, ...] = ()
+
+ def get_all_tokens(self) -> Iterable[str]:
+ yield self.cmd_value
+ yield from self.arg_values
+
+ def adjust(self, slug: "CommandSlug") -> "CommandInput":
+ """Remove the args that are part of a slug."""
+ token_count = len(slug.tokens) - 1
+ slug_part = [self.cmd_value] + list(self.arg_values)[:token_count]
+ remaining_args = self.arg_values[token_count:]
+ return CommandInput(" ".join(slug_part), remaining_args)
+
+
+class CommandNotMatchedError(Exception):
+ def __init__(self, message: str, unmatched_input: CommandInput) -> None:
+ super().__init__(message)
+ self.unmatched_input = unmatched_input
+
+
+class CommandSlug:
+ def __init__(self, text: str) -> None:
+ self.tokens = tuple(token.casefold() for token in text.strip().split())
+
+ def does_match(self, cmd_input: CommandInput) -> bool:
+ if not self.tokens:
+ return cmd_input.cmd_value == "" and not cmd_input.arg_values
+ cmd_prefix = itertools.islice(cmd_input.get_all_tokens(), 0, len(self.tokens))
+ cmd_tokens = tuple(token.casefold() for token in cmd_prefix)
+ return self.tokens == cmd_tokens
+
+ def __repr__(self):
+ joined_tokens = " ".join(self.tokens)
+ return f"{type(self).__name__}({joined_tokens!r})"
+
+
+class MessagingIntegrationCommand:
+ def __init__(
+ self,
+ interaction_type: MessagingInteractionType,
+ command_text: str,
+ aliases: Iterable[str] = (),
+ ) -> None:
+ super().__init__()
+ self.interaction_type = interaction_type
+ self.command_slug = CommandSlug(command_text)
+ self.aliases = frozenset(CommandSlug(alias) for alias in aliases)
+
+ @property
+ def name(self) -> str:
+ return self.interaction_type.value
+
+ @staticmethod
+ def _to_tokens(text: str) -> tuple[str, ...]:
+ return tuple(token.casefold() for token in text.strip().split())
+
+ def get_all_command_slugs(self) -> Iterable[CommandSlug]:
+ yield self.command_slug
+ yield from self.aliases
+
+
+MESSAGING_INTEGRATION_COMMANDS = (
+ HELP := MessagingIntegrationCommand(
+ MessagingInteractionType.HELP,
+ "help",
+ aliases=("", "support", "docs"),
+ ),
+ LINK_IDENTITY := MessagingIntegrationCommand(
+ MessagingInteractionType.LINK_IDENTITY,
+ "link",
+ ),
+ UNLINK_IDENTITY := MessagingIntegrationCommand(
+ MessagingInteractionType.UNLINK_IDENTITY,
+ "unlink",
+ ),
+ LINK_TEAM := MessagingIntegrationCommand(
+ MessagingInteractionType.LINK_TEAM,
+ "link team",
+ ),
+ UNLINK_TEAM := MessagingIntegrationCommand(
+ MessagingInteractionType.UNLINK_TEAM,
+ "unlink team",
+ ),
+)
+
+R = TypeVar("R") # response
+
+
+class MessagingIntegrationCommandDispatcher(Generic[R], ABC):
+ """The set of commands handled by one messaging integration."""
+
+ @property
+ @abstractmethod
+ def integration_spec(self) -> MessagingIntegrationSpec:
+ raise NotImplementedError
+
+ @property
+ @abstractmethod
+ def command_handlers(
+ self,
+ ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], R]]]:
+ raise NotImplementedError
+
+ def get_event(self, command: MessagingIntegrationCommand) -> MessagingInteractionEvent:
+ return MessagingInteractionEvent(
+ interaction_type=command.interaction_type, spec=self.integration_spec
+ )
+
+ def dispatch(self, cmd_input: CommandInput) -> R:
+ @dataclass(frozen=True)
+ class CandidateHandler:
+ command: MessagingIntegrationCommand
+ slug: CommandSlug
+ callback: Callable[[CommandInput], R]
+
+ def parsing_order(self) -> int:
+ # Sort by descending length of arg tokens. If one slug is a prefix of
+ # another (e.g., "link" and "link team"), we must check for the longer
+ # one first.
+ return -len(self.slug.tokens)
+
+ candidate_handlers = [
+ CandidateHandler(command, slug, callback)
+ for (command, callback) in self.command_handlers
+ for slug in command.get_all_command_slugs()
+ ]
+ candidate_handlers.sort(key=CandidateHandler.parsing_order)
+
+ for handler in candidate_handlers:
+ if handler.slug.does_match(cmd_input):
+ arg_input = cmd_input.adjust(handler.slug)
+ with self.get_event(handler.command).capture(assume_success=False):
+ return handler.callback(arg_input)
+
+ raise CommandNotMatchedError(f"{cmd_input=!r}", cmd_input)
diff --git a/src/sentry/integrations/messaging/linkage.py b/src/sentry/integrations/messaging/linkage.py
index a41db6a8dd75b..672a6c0a07748 100644
--- a/src/sentry/integrations/messaging/linkage.py
+++ b/src/sentry/integrations/messaging/linkage.py
@@ -14,6 +14,7 @@
from sentry import analytics, features
from sentry.api.helpers.teams import is_team_admin
+from sentry.constants import ObjectStatus
from sentry.identity.services.identity import identity_service
from sentry.integrations.messaging.spec import MessagingIntegrationSpec
from sentry.integrations.models.external_actor import ExternalActor
@@ -360,7 +361,9 @@ def handle(self, request: HttpRequest, signed_params: str) -> HttpResponseBase:
slack_id: str = params["slack_id"]
organization_id: str | None = params.get("organization_id")
- integration = integration_service.get_integration(integration_id=integration_id)
+ integration = integration_service.get_integration(
+ integration_id=integration_id, status=ObjectStatus.ACTIVE
+ )
if integration is None:
logger.info(
"integration.not_found",
diff --git a/src/sentry/integrations/messaging/metrics.py b/src/sentry/integrations/messaging/metrics.py
new file mode 100644
index 0000000000000..00c6057166829
--- /dev/null
+++ b/src/sentry/integrations/messaging/metrics.py
@@ -0,0 +1,72 @@
+from collections.abc import Mapping
+from dataclasses import dataclass
+from enum import Enum
+from typing import Any
+
+from sentry.integrations.base import IntegrationDomain
+from sentry.integrations.messaging.spec import MessagingIntegrationSpec
+from sentry.integrations.utils.metrics import EventLifecycleMetric, EventLifecycleOutcome
+from sentry.models.organization import Organization
+from sentry.organizations.services.organization import RpcOrganization
+from sentry.users.models import User
+from sentry.users.services.user import RpcUser
+
+
+class MessagingInteractionType(Enum):
+ """A way in which a user can interact with Sentry through a messaging app."""
+
+ # Direct interactions with the user
+ HELP = "HELP"
+ LINK_IDENTITY = "LINK_IDENTITY"
+ UNLINK_IDENTITY = "UNLINK_IDENTITY"
+ LINK_TEAM = "LINK_TEAM"
+ UNLINK_TEAM = "UNLINK_TEAM"
+
+ # Interactions on Issues
+ STATUS = "STATUS"
+ ARCHIVE_DIALOG = "ARCHIVE_DIALOG"
+ ARCHIVE = "ARCHIVE"
+ ASSIGN_DIALOG = "ASSIGN_DIALOG"
+ ASSIGN = "ASSIGN"
+ UNASSIGN = "ASSIGN"
+ RESOLVE_DIALOG = "RESOLVE_DIALOG"
+ RESOLVE = "RESOLVE"
+ UNRESOLVE = "UNRESOLVE"
+ IGNORE = "IGNORE"
+ MARK_ONGOING = "MARK_ONGOING"
+
+ # Automatic behaviors
+ UNFURL_ISSUES = "UNFURL_ISSUES"
+ UNFURL_METRIC_ALERTS = "UNFURL_METRIC_ALERTS"
+ UNFURL_DISCOVER = "UNFURL_DISCOVER"
+
+ GET_PARENT_NOTIFICATION = "GET_PARENT_NOTIFICATION"
+
+ def __str__(self) -> str:
+ return self.value.lower()
+
+
+@dataclass
+class MessagingInteractionEvent(EventLifecycleMetric):
+ """An instance to be recorded of a user interacting through a messaging app."""
+
+ interaction_type: MessagingInteractionType
+ spec: MessagingIntegrationSpec
+
+ # Optional attributes to populate extras
+ user: User | RpcUser | None = None
+ organization: Organization | RpcOrganization | None = None
+
+ def get_key(self, outcome: EventLifecycleOutcome) -> str:
+ return self.get_standard_key(
+ domain=IntegrationDomain.MESSAGING,
+ integration_name=self.spec.provider_slug,
+ interaction_type=str(self.interaction_type),
+ outcome=outcome,
+ )
+
+ def get_extras(self) -> Mapping[str, Any]:
+ return {
+ "user_id": (self.user.id if self.user else None),
+ "organization_id": (self.organization.id if self.organization else None),
+ }
diff --git a/src/sentry/integrations/metric_alerts.py b/src/sentry/integrations/metric_alerts.py
index 31e61ac367e4a..fb544a6bd9b95 100644
--- a/src/sentry/integrations/metric_alerts.py
+++ b/src/sentry/integrations/metric_alerts.py
@@ -26,7 +26,6 @@
"percentage(sessions_crashed, sessions)": "% sessions crash free rate",
"percentage(users_crashed, users)": "% users crash free rate",
}
-LOGO_URL = absolute_uri(get_asset_url("sentry", "images/sentry-email-avatar.png"))
# These should be the same as the options in the frontend
# COMPARISON_DELTA_OPTIONS
TEXT_COMPARISON_DELTA = {
@@ -39,6 +38,10 @@
}
+def logo_url() -> str:
+ return absolute_uri(get_asset_url("sentry", "images/sentry-email-avatar.png"))
+
+
def get_metric_count_from_incident(incident: Incident) -> str:
"""Returns the current or last count of an incident aggregate."""
incident_trigger = (
@@ -115,7 +118,9 @@ def incident_attachment_info(
metric_value = get_metric_count_from_incident(incident)
text = get_incident_status_text(alert_rule, metric_value)
- if features.has("organizations:anomaly-detection-alerts", incident.organization):
+ if features.has(
+ "organizations:anomaly-detection-alerts", incident.organization
+ ) and features.has("organizations:anomaly-detection-rollout", incident.organization):
text += f"\nThreshold: {alert_rule.detection_type.title()}"
title = f"{status}: {alert_rule.name}"
@@ -142,7 +147,7 @@ def incident_attachment_info(
return {
"title": title,
"text": text,
- "logo_url": LOGO_URL,
+ "logo_url": logo_url(),
"status": status,
"ts": incident.date_started,
"title_link": title_link,
@@ -211,7 +216,9 @@ def metric_alert_attachment_info(
if metric_value is not None and status != INCIDENT_STATUS[IncidentStatus.CLOSED]:
text = get_incident_status_text(alert_rule, metric_value)
- if features.has("organizations:anomaly-detection-alerts", alert_rule.organization):
+ if features.has(
+ "organizations:anomaly-detection-alerts", alert_rule.organization
+ ) and features.has("organizations:anomaly-detection-rollout", alert_rule.organization):
text += f"\nThreshold: {alert_rule.detection_type.title()}"
date_started = None
@@ -228,7 +235,7 @@ def metric_alert_attachment_info(
return {
"title": title,
"text": text,
- "logo_url": LOGO_URL,
+ "logo_url": logo_url(),
"status": status,
"date_started": date_started,
"last_triggered_date": last_triggered_date,
diff --git a/src/sentry/integrations/middleware/hybrid_cloud/parser.py b/src/sentry/integrations/middleware/hybrid_cloud/parser.py
index c83c8d051aa8e..b1740f1e250b8 100644
--- a/src/sentry/integrations/middleware/hybrid_cloud/parser.py
+++ b/src/sentry/integrations/middleware/hybrid_cloud/parser.py
@@ -239,7 +239,9 @@ def get_response_from_webhookpayload_for_integration(
regions=regions, identifier=integration.id, integration_id=integration.id
)
- def get_mailbox_identifier(self, integration: RpcIntegration, data: Mapping[str, Any]) -> str:
+ def get_mailbox_identifier(
+ self, integration: RpcIntegration | Integration, data: Mapping[str, Any]
+ ) -> str:
"""
Used by integrations with higher hook volumes to create smaller mailboxes
that can be delivered in parallel. Requires the integration to implement
diff --git a/src/sentry/integrations/mixins/issues.py b/src/sentry/integrations/mixins/issues.py
index 9a77dd4d89f9e..add7ee42679a2 100644
--- a/src/sentry/integrations/mixins/issues.py
+++ b/src/sentry/integrations/mixins/issues.py
@@ -12,11 +12,12 @@
from sentry.eventstore.models import GroupEvent
from sentry.integrations.base import IntegrationInstallation
from sentry.integrations.models.external_issue import ExternalIssue
+from sentry.integrations.services.assignment_source import AssignmentSource
from sentry.integrations.services.integration import integration_service
from sentry.integrations.tasks.sync_status_inbound import (
sync_status_inbound as sync_status_inbound_task,
)
-from sentry.integrations.utils import where_should_sync
+from sentry.integrations.utils.sync import where_should_sync
from sentry.issues.grouptype import GroupCategory
from sentry.models.group import Group
from sentry.models.grouplink import GroupLink
@@ -62,7 +63,7 @@ def from_resolve_unresolve(
class IssueBasicIntegration(IntegrationInstallation, ABC):
- def should_sync(self, attribute):
+ def should_sync(self, attribute, sync_source: AssignmentSource | None = None):
return False
def get_group_title(self, group, event, **kwargs):
@@ -378,10 +379,17 @@ class IssueSyncIntegration(IssueBasicIntegration, ABC):
outbound_assignee_key: ClassVar[str | None] = None
inbound_assignee_key: ClassVar[str | None] = None
- def should_sync(self, attribute: str) -> bool:
+ def should_sync(self, attribute: str, sync_source: AssignmentSource | None = None) -> bool:
key = getattr(self, f"{attribute}_key", None)
if key is None or self.org_integration is None:
return False
+
+ # Check that the assignment source isn't this same integration in order to
+ # prevent sync-cycles from occurring. This should still allow other
+ # integrations to propagate changes outward.
+ if sync_source and sync_source.integration_id == self.org_integration.integration_id:
+ return False
+
value: bool = self.org_integration.config.get(key, False)
return value
@@ -400,7 +408,14 @@ def sync_assignee_outbound(
raise NotImplementedError
@abstractmethod
- def sync_status_outbound(self, external_issue, is_resolved, project_id, **kwargs):
+ def sync_status_outbound(
+ self,
+ external_issue,
+ is_resolved,
+ project_id,
+ assignment_source: AssignmentSource | None = None,
+ **kwargs,
+ ):
"""
Propagate a sentry issue's status to a linked issue's status.
"""
diff --git a/src/sentry/integrations/mixins/notifications.py b/src/sentry/integrations/mixins/notifications.py
index 0565d6f1291df..80c94602facae 100644
--- a/src/sentry/integrations/mixins/notifications.py
+++ b/src/sentry/integrations/mixins/notifications.py
@@ -1,5 +1,7 @@
import logging
+from sentry_sdk import capture_message
+
from sentry.integrations.models.external_actor import ExternalActor
from sentry.models.team import Team
@@ -22,6 +24,21 @@ def notify_remove_external_team(self, external_team: ExternalActor, team: Team)
"""
Notify through the integration that an external team has been removed.
"""
+ if not external_team.external_id:
+ logger.info(
+ "notify.external_team_missing_external_id",
+ extra={
+ "external_team_id": external_team.id,
+ "team_id": team.id,
+ "team_slug": team.slug,
+ },
+ )
+ capture_message(
+ f"External team {external_team.id} has no external_id",
+ level="warning",
+ )
+ return
+
self.send_message(
channel_id=external_team.external_id,
message=SUCCESS_UNLINKED_TEAM_MESSAGE.format(team=team.slug),
diff --git a/src/sentry/integrations/models/external_actor.py b/src/sentry/integrations/models/external_actor.py
index e21955a55d329..58c4183d8da8a 100644
--- a/src/sentry/integrations/models/external_actor.py
+++ b/src/sentry/integrations/models/external_actor.py
@@ -6,6 +6,7 @@
from django.utils import timezone
from sentry.backup.scopes import RelocationScope
+from sentry.constants import ObjectStatus
from sentry.db.models import BoundedPositiveIntegerField, FlexibleForeignKey, region_silo_model
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
from sentry.hybridcloud.outbox.base import ReplicatedRegionModel
@@ -68,7 +69,9 @@ def delete(self, *args, **kwargs):
# TODO: Extract this out of the delete method into the endpoint / controller instead.
if self.team is not None:
- integration = integration_service.get_integration(integration_id=self.integration_id)
+ integration = integration_service.get_integration(
+ integration_id=self.integration_id, status=ObjectStatus.ACTIVE
+ )
if integration:
install = integration.get_installation(organization_id=self.organization.id)
team = self.team
diff --git a/src/sentry/integrations/models/external_issue.py b/src/sentry/integrations/models/external_issue.py
index 1671e2c5439db..9e12f67fe5f54 100644
--- a/src/sentry/integrations/models/external_issue.py
+++ b/src/sentry/integrations/models/external_issue.py
@@ -7,6 +7,7 @@
from django.utils import timezone
from sentry.backup.scopes import RelocationScope
+from sentry.constants import ObjectStatus
from sentry.db.models import FlexibleForeignKey, JSONField, Model, region_silo_model, sane_repr
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
from sentry.db.models.manager.base import BaseManager
@@ -89,7 +90,9 @@ class Meta:
def get_installation(self) -> Any:
from sentry.integrations.services.integration import integration_service
- integration = integration_service.get_integration(integration_id=self.integration_id)
+ integration = integration_service.get_integration(
+ integration_id=self.integration_id, status=ObjectStatus.ACTIVE
+ )
assert integration, "Integration is required to get an installation"
return integration.get_installation(organization_id=self.organization_id)
diff --git a/src/sentry/integrations/msteams/notifications.py b/src/sentry/integrations/msteams/notifications.py
index fca1a6152a81c..0152243dabc5b 100644
--- a/src/sentry/integrations/msteams/notifications.py
+++ b/src/sentry/integrations/msteams/notifications.py
@@ -82,9 +82,7 @@ def send_notification_as_msteams(
)
return
- with sentry_sdk.start_span(
- op="notification.send_msteams", description="gen_channel_integration_map"
- ):
+ with sentry_sdk.start_span(op="notification.send_msteams", name="gen_channel_integration_map"):
data = get_integrations_by_channel_by_recipient(
organization=notification.organization,
recipients=recipients,
@@ -92,13 +90,11 @@ def send_notification_as_msteams(
)
for recipient, integrations_by_channel in data.items():
- with sentry_sdk.start_span(op="notification.send_msteams", description="send_one"):
+ with sentry_sdk.start_span(op="notification.send_msteams", name="send_one"):
extra_context = (extra_context_by_actor or {}).get(recipient, {})
context = get_context(notification, recipient, shared_context, extra_context)
- with sentry_sdk.start_span(
- op="notification.send_msteams", description="gen_attachments"
- ):
+ with sentry_sdk.start_span(op="notification.send_msteams", name="gen_attachments"):
card = get_notification_card(notification, context, recipient)
for channel, integration in integrations_by_channel.items():
@@ -107,7 +103,7 @@ def send_notification_as_msteams(
client = MsTeamsClient(integration)
try:
with sentry_sdk.start_span(
- op="notification.send_msteams", description="notify_recipient"
+ op="notification.send_msteams", name="notify_recipient"
):
client.send_card(conversation_id, card)
diff --git a/src/sentry/integrations/msteams/parsing.py b/src/sentry/integrations/msteams/parsing.py
index 69e5a98687f0d..67e778c870bda 100644
--- a/src/sentry/integrations/msteams/parsing.py
+++ b/src/sentry/integrations/msteams/parsing.py
@@ -2,6 +2,7 @@
from collections.abc import Mapping
from typing import Any
+from sentry.constants import ObjectStatus
from sentry.integrations.msteams.spec import PROVIDER
from sentry.integrations.services.integration import integration_service
from sentry.integrations.services.integration.model import RpcIntegration
@@ -23,14 +24,18 @@ def get_integration_from_channel_data(data: Mapping[str, Any]) -> RpcIntegration
team_id = _infer_team_id_from_channel_data(data=data)
if team_id is None:
return None
- return integration_service.get_integration(provider=PROVIDER, external_id=team_id)
+ return integration_service.get_integration(
+ provider=PROVIDER, external_id=team_id, status=ObjectStatus.ACTIVE
+ )
def get_integration_for_tenant(data: Mapping[str, Any]) -> RpcIntegration | None:
try:
channel_data = data["channelData"]
tenant_id = channel_data["tenant"]["id"]
- return integration_service.get_integration(provider=PROVIDER, external_id=tenant_id)
+ return integration_service.get_integration(
+ provider=PROVIDER, external_id=tenant_id, status=ObjectStatus.ACTIVE
+ )
except Exception as err:
logger.info("failed to get tenant id from request data", exc_info=err, extra={"data": data})
return None
@@ -56,7 +61,9 @@ def get_integration_from_card_action(data: Mapping[str, Any]) -> RpcIntegration
integration_id = _infer_integration_id_from_card_action(data=data)
if integration_id is None:
return None
- return integration_service.get_integration(integration_id=integration_id)
+ return integration_service.get_integration(
+ integration_id=integration_id, status=ObjectStatus.ACTIVE
+ )
def can_infer_integration(data: Mapping[str, Any]) -> bool:
diff --git a/src/sentry/integrations/msteams/webhook.py b/src/sentry/integrations/msteams/webhook.py
index dbfe7fd00f318..fb40c4b04e05c 100644
--- a/src/sentry/integrations/msteams/webhook.py
+++ b/src/sentry/integrations/msteams/webhook.py
@@ -2,7 +2,8 @@
import logging
import time
-from collections.abc import Callable, Mapping
+from collections.abc import Callable, Iterable, Mapping
+from dataclasses import dataclass
from enum import Enum
from typing import Any, cast
@@ -18,10 +19,23 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import Endpoint, all_silo_endpoint
+from sentry.constants import ObjectStatus
from sentry.identity.services.identity import identity_service
from sentry.identity.services.identity.model import RpcIdentity
+from sentry.integrations.messaging import commands
+from sentry.integrations.messaging.commands import (
+ CommandInput,
+ CommandNotMatchedError,
+ MessagingIntegrationCommand,
+ MessagingIntegrationCommandDispatcher,
+)
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
+from sentry.integrations.messaging.spec import MessagingIntegrationSpec
from sentry.integrations.msteams import parsing
-from sentry.integrations.msteams.spec import PROVIDER
+from sentry.integrations.msteams.spec import PROVIDER, MsTeamsMessagingSpec
from sentry.integrations.services.integration import integration_service
from sentry.models.activity import ActivityIntegration
from sentry.models.apikey import ApiKey
@@ -447,22 +461,21 @@ def _make_action_data(self, data: Mapping[str, Any], user_id: int) -> dict[str,
action_data = {"assignedTo": ""}
return action_data
+ _ACTION_TYPES = {
+ ACTION_TYPE.RESOLVE: ("resolve", MessagingInteractionType.RESOLVE),
+ ACTION_TYPE.IGNORE: ("ignore", MessagingInteractionType.IGNORE),
+ ACTION_TYPE.ASSIGN: ("assign", MessagingInteractionType.ASSIGN),
+ ACTION_TYPE.UNRESOLVE: ("unresolve", MessagingInteractionType.UNRESOLVE),
+ ACTION_TYPE.UNASSIGN: ("unassign", MessagingInteractionType.UNASSIGN),
+ }
+
def _issue_state_change(self, group: Group, identity: RpcIdentity, data) -> Response:
event_write_key = ApiKey(
organization_id=group.project.organization_id, scope_list=["event:write"]
)
- # undoing the enum structure of ACTION_TYPE to
- # get a more sensible analytics_event
- action_types = {
- ACTION_TYPE.RESOLVE: "resolve",
- ACTION_TYPE.IGNORE: "ignore",
- ACTION_TYPE.ASSIGN: "assign",
- ACTION_TYPE.UNRESOLVE: "unresolve",
- ACTION_TYPE.UNASSIGN: "unassign",
- }
action_data = self._make_action_data(data, identity.user_id)
- status = action_types[data["payload"]["actionType"]]
+ status, interaction_type = self._ACTION_TYPES[data["payload"]["actionType"]]
analytics_event = f"integrations.msteams.{status}"
analytics.record(
analytics_event,
@@ -470,13 +483,19 @@ def _issue_state_change(self, group: Group, identity: RpcIdentity, data) -> Resp
organization_id=group.project.organization.id,
)
- return client.put(
- path=f"/projects/{group.project.organization.slug}/{group.project.slug}/issues/",
- params={"id": group.id},
- data=action_data,
- user=user_service.get_user(user_id=identity.user_id),
- auth=event_write_key,
- )
+ with MessagingInteractionEvent(
+ interaction_type, MsTeamsMessagingSpec()
+ ).capture() as lifecycle:
+ response = client.put(
+ path=f"/projects/{group.project.organization.slug}/{group.project.slug}/issues/",
+ params={"id": group.id},
+ data=action_data,
+ user=user_service.get_user(user_id=identity.user_id),
+ auth=event_write_key,
+ )
+ if response.status_code >= 400:
+ lifecycle.record_failure()
+ return response
def _handle_action_submitted(self, request: Request) -> Response:
# pull out parameters
@@ -506,7 +525,9 @@ def _handle_action_submitted(self, request: Request) -> Response:
group = Group.objects.select_related("project__organization").filter(id=group_id).first()
if group:
- integration = integration_service.get_integration(integration_id=integration.id)
+ integration = integration_service.get_integration(
+ integration_id=integration.id, status=ObjectStatus.ACTIVE
+ )
if integration is None:
group = None
@@ -602,27 +623,54 @@ def _handle_channel_message(self, request: Request) -> Response:
def _handle_personal_message(self, request: Request) -> Response:
data = request.data
command_text = data.get("text", "").strip()
- lowercase_command = command_text.lower()
- conversation_id = data["conversation"]["id"]
- teams_user_id = data["from"]["id"]
-
- # only supporting unlink for now
- if "unlink" in lowercase_command:
- unlink_url = build_unlinking_url(conversation_id, data["serviceUrl"], teams_user_id)
- card = build_unlink_identity_card(unlink_url)
- elif "help" in lowercase_command:
- card = build_help_command_card()
- elif "link" == lowercase_command: # don't to match other types of link commands
- has_linked_identity = (
- identity_service.get_identity(filter={"identity_ext_id": teams_user_id}) is not None
- )
- if has_linked_identity:
- card = build_already_linked_identity_command_card()
- else:
- card = build_link_identity_command_card()
- else:
+
+ dispatcher = MsTeamsCommandDispatcher(data)
+ try:
+ card = dispatcher.dispatch(CommandInput(command_text))
+ except CommandNotMatchedError:
card = build_unrecognized_command_card(command_text)
client = get_preinstall_client(data["serviceUrl"])
- client.send_card(conversation_id, card)
+ client.send_card(dispatcher.conversation_id, card)
return self.respond(status=204)
+
+
+@dataclass(frozen=True)
+class MsTeamsCommandDispatcher(MessagingIntegrationCommandDispatcher[AdaptiveCard]):
+ data: dict[str, Any]
+
+ @property
+ def integration_spec(self) -> MessagingIntegrationSpec:
+ return MsTeamsMessagingSpec()
+
+ @property
+ def conversation_id(self) -> str:
+ return self.data["conversation"]["id"]
+
+ @property
+ def teams_user_id(self) -> str:
+ return self.data["from"]["id"]
+
+ @property
+ def command_handlers(
+ self,
+ ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], AdaptiveCard]]]:
+ yield commands.HELP, (lambda _: build_help_command_card())
+ yield commands.LINK_IDENTITY, self.link_identity
+ yield commands.UNLINK_IDENTITY, self.unlink_identity
+
+ def link_identity(self, _: CommandInput) -> AdaptiveCard:
+ linked_identity = identity_service.get_identity(
+ filter={"identity_ext_id": self.teams_user_id}
+ )
+ has_linked_identity = linked_identity is not None
+ if has_linked_identity:
+ return build_already_linked_identity_command_card()
+ else:
+ return build_link_identity_command_card()
+
+ def unlink_identity(self, _: CommandInput) -> AdaptiveCard:
+ unlink_url = build_unlinking_url(
+ self.conversation_id, self.data["serviceUrl"], self.teams_user_id
+ )
+ return build_unlink_identity_card(unlink_url)
diff --git a/src/sentry/integrations/on_call/metrics.py b/src/sentry/integrations/on_call/metrics.py
new file mode 100644
index 0000000000000..1df97a5a6c744
--- /dev/null
+++ b/src/sentry/integrations/on_call/metrics.py
@@ -0,0 +1,57 @@
+from enum import Enum
+
+from attr import dataclass
+
+from sentry.integrations.base import IntegrationDomain
+from sentry.integrations.on_call.spec import OnCallSpec
+from sentry.integrations.utils.metrics import EventLifecycleMetric, EventLifecycleOutcome
+from sentry.models.organization import Organization
+from sentry.organizations.services.organization import RpcOrganization
+from sentry.users.models import User
+from sentry.users.services.user import RpcUser
+
+
+class OnCallInteractionType(Enum):
+ """
+ A way in which a user can interact with Sentry through an on-call app.
+ """
+
+ # General interactions
+ ADD_KEY = "ADD_KEY"
+ POST_INSTALL = "POST_INSTALL"
+ # Interacting with external alerts
+ CREATE = "CREATE" # create an alert in Opsgenie/Pagerduty
+ RESOLVE = "RESOLVE" # resolve an alert in Opsgenie/Pagerduty
+
+ # Opsgenie only
+ VERIFY_KEYS = "VERIFY_KEYS"
+ VERIFY_TEAM = "VERIFY_TEAM"
+ MIGRATE_PLUGIN = "MIGRATE_PLUGIN"
+
+ # PagerDuty only
+ VALIDATE_SERVICE = "VALIDATE_SERVICE"
+
+ def __str__(self) -> str:
+ return self.value.lower()
+
+
+@dataclass
+class OnCallInteractionEvent(EventLifecycleMetric):
+ """
+ An instance to be recorded of a user interacting with Sentry through an on-call app.
+ """
+
+ interaction_type: OnCallInteractionType
+ spec: OnCallSpec
+
+ # Optional attributes to populate extras
+ user: User | RpcUser | None = None
+ organization: Organization | RpcOrganization | None = None
+
+ def get_key(self, outcome: EventLifecycleOutcome) -> str:
+ return self.get_standard_key(
+ domain=IntegrationDomain.ON_CALL_SCHEDULING,
+ integration_name=self.spec.provider_slug,
+ interaction_type=str(self.interaction_type),
+ outcome=outcome,
+ )
diff --git a/src/sentry/integrations/on_call/spec.py b/src/sentry/integrations/on_call/spec.py
new file mode 100644
index 0000000000000..130c537976e8d
--- /dev/null
+++ b/src/sentry/integrations/on_call/spec.py
@@ -0,0 +1,35 @@
+from abc import ABC, abstractmethod
+
+from sentry.models.notificationaction import ActionService
+
+
+class OnCallSpec(ABC):
+ @property
+ @abstractmethod
+ def provider_slug(self):
+ raise NotImplementedError
+
+ @property
+ @abstractmethod
+ def action_service(self):
+ raise NotImplementedError
+
+
+class OpsgenieOnCallSpec(OnCallSpec):
+ @property
+ def provider_slug(self):
+ return "opsgenie"
+
+ @property
+ def action_service(self):
+ return ActionService.OPSGENIE
+
+
+class PagerDutyOnCallSpec(OnCallSpec):
+ @property
+ def provider_slug(self):
+ return "pagerduty"
+
+ @property
+ def action_service(self):
+ return ActionService.PAGERDUTY
diff --git a/src/sentry/integrations/opsgenie/actions/form.py b/src/sentry/integrations/opsgenie/actions/form.py
index b2b6284dd0d9b..a6a29d9d20814 100644
--- a/src/sentry/integrations/opsgenie/actions/form.py
+++ b/src/sentry/integrations/opsgenie/actions/form.py
@@ -1,19 +1,16 @@
from __future__ import annotations
from collections.abc import Mapping
-from typing import Any, cast
+from typing import Any
from django import forms
from django.utils.translation import gettext_lazy as _
-from sentry.integrations.opsgenie.integration import OpsgenieIntegration
+from sentry.integrations.on_call.metrics import OnCallInteractionType
+from sentry.integrations.opsgenie.metrics import record_event
from sentry.integrations.opsgenie.utils import get_team
from sentry.integrations.services.integration import integration_service
-from sentry.integrations.services.integration.model import (
- RpcIntegration,
- RpcOrganizationIntegration,
-)
-from sentry.shared_integrations.exceptions import ApiError
+from sentry.integrations.services.integration.model import RpcOrganizationIntegration
INVALID_TEAM = 1
INVALID_KEY = 2
@@ -59,65 +56,41 @@ def __init__(self, *args, **kwargs):
def _get_team_status(
self,
team_id: str | None,
- integration: RpcIntegration,
org_integration: RpcOrganizationIntegration,
) -> int:
team = get_team(team_id, org_integration)
if not team or not team_id:
return INVALID_TEAM
- install = cast(
- "OpsgenieIntegration",
- integration.get_installation(organization_id=org_integration.organization_id),
- )
- client = install.get_keyring_client(keyid=team_id)
- # the integration should be of type "sentry"
- # there's no way to authenticate that a key is an integration key
- # without specifying the type... even though the type is arbitrary
- # and all integration keys do the same thing
- try:
- client.authorize_integration(type="sentry")
- except ApiError:
- return INVALID_KEY
-
return VALID_TEAM
def _validate_team(self, team_id: str | None, integration_id: int | None) -> None:
- params = {
- "account": dict(self.fields["account"].choices).get(integration_id),
- "team": dict(self.fields["team"].choices).get(team_id),
- }
- integration = integration_service.get_integration(
- integration_id=integration_id, provider="opsgenie"
- )
- org_integration = integration_service.get_organization_integration(
- integration_id=integration_id,
- organization_id=self.org_id,
- )
- if integration is None or org_integration is None:
- raise forms.ValidationError(
- _("The Opsgenie integration does not exist."),
- code="invalid_integration",
- params=params,
- )
- team_status = self._get_team_status(
- team_id=team_id, integration=integration, org_integration=org_integration
- )
- if team_status == INVALID_TEAM:
- raise forms.ValidationError(
- _('The team "%(team)s" does not belong to the %(account)s Opsgenie account.'),
- code="invalid_team",
- params=params,
+ with record_event(OnCallInteractionType.VERIFY_TEAM).capture():
+ params = {
+ "account": dict(self.fields["account"].choices).get(integration_id),
+ "team": dict(self.fields["team"].choices).get(team_id),
+ }
+ integration = integration_service.get_integration(
+ integration_id=integration_id, provider="opsgenie"
)
- elif team_status == INVALID_KEY:
- raise forms.ValidationError(
- _(
- 'The provided API key is invalid. Please make sure that the Opsgenie API \
- key is an integration key of type "Sentry" that has configuration access.'
- ),
- code="invalid_key",
- params=params,
+ org_integration = integration_service.get_organization_integration(
+ integration_id=integration_id,
+ organization_id=self.org_id,
)
+ if integration is None or org_integration is None:
+ raise forms.ValidationError(
+ _("The Opsgenie integration does not exist."),
+ code="invalid_integration",
+ params=params,
+ )
+
+ team_status = self._get_team_status(team_id=team_id, org_integration=org_integration)
+ if team_status == INVALID_TEAM:
+ raise forms.ValidationError(
+ _('The team "%(team)s" does not belong to the %(account)s Opsgenie account.'),
+ code="invalid_team",
+ params=params,
+ )
def clean(self) -> dict[str, Any] | None:
cleaned_data = super().clean()
diff --git a/src/sentry/integrations/opsgenie/actions/notification.py b/src/sentry/integrations/opsgenie/actions/notification.py
index a408d3f0420d4..c7c4ea429af54 100644
--- a/src/sentry/integrations/opsgenie/actions/notification.py
+++ b/src/sentry/integrations/opsgenie/actions/notification.py
@@ -72,7 +72,10 @@ def send_notification(event, futures):
try:
rules = [f.rule for f in futures]
resp = client.send_notification(
- data=event, priority=priority, rules=rules, notification_uuid=notification_uuid
+ data=event,
+ priority=priority,
+ rules=rules,
+ notification_uuid=notification_uuid,
)
except ApiError as e:
logger.info(
diff --git a/src/sentry/integrations/opsgenie/client.py b/src/sentry/integrations/opsgenie/client.py
index f474e18be7392..a7bd3755418b1 100644
--- a/src/sentry/integrations/opsgenie/client.py
+++ b/src/sentry/integrations/opsgenie/client.py
@@ -5,6 +5,8 @@
from sentry.eventstore.models import Event, GroupEvent
from sentry.integrations.client import ApiClient
from sentry.integrations.models.integration import Integration
+from sentry.integrations.on_call.metrics import OnCallInteractionType
+from sentry.integrations.opsgenie.metrics import record_event
from sentry.integrations.services.integration.model import RpcIntegration
from sentry.models.group import Group
from sentry.shared_integrations.client.base import BaseApiResponseX
@@ -36,11 +38,6 @@ def get_alerts(self, limit: int | None = 1) -> BaseApiResponseX:
path = f"/alerts?limit={limit}"
return self.get(path=path, headers=self._get_auth_headers())
- def authorize_integration(self, type: str) -> BaseApiResponseX:
- body = {"type": type}
- path = "/integrations/authenticate"
- return self.post(path=path, headers=self._get_auth_headers(), data=body)
-
def _get_rule_urls(self, group, rules):
organization = group.project.organization
rule_urls = []
@@ -97,6 +94,7 @@ def send_notification(
notification_uuid: str | None = None,
):
headers = self._get_auth_headers()
+ interaction_type = OnCallInteractionType.CREATE
if isinstance(data, (Event, GroupEvent)):
group = data.group
event = data
@@ -111,6 +109,7 @@ def send_notification(
else:
# if we're acknowledging the alert—meaning that the Sentry alert was resolved
if data.get("identifier"):
+ interaction_type = OnCallInteractionType.RESOLVE
alias = data["identifier"]
resp = self.post(
f"/alerts/{alias}/acknowledge",
@@ -121,5 +120,6 @@ def send_notification(
return resp
# this is a metric alert
payload = data
- resp = self.post("/alerts", data=payload, headers=headers)
+ with record_event(interaction_type).capture():
+ resp = self.post("/alerts", data=payload, headers=headers)
return resp
diff --git a/src/sentry/integrations/opsgenie/integration.py b/src/sentry/integrations/opsgenie/integration.py
index 29315fd8334c9..8842f60895613 100644
--- a/src/sentry/integrations/opsgenie/integration.py
+++ b/src/sentry/integrations/opsgenie/integration.py
@@ -10,6 +10,7 @@
from rest_framework.request import Request
from rest_framework.serializers import ValidationError
+from sentry.constants import ObjectStatus
from sentry.integrations.base import (
FeatureDescription,
IntegrationFeatures,
@@ -19,6 +20,8 @@
)
from sentry.integrations.models.integration import Integration
from sentry.integrations.models.organization_integration import OrganizationIntegration
+from sentry.integrations.on_call.metrics import OnCallInteractionType
+from sentry.integrations.opsgenie.metrics import record_event
from sentry.integrations.opsgenie.tasks import migrate_opsgenie_plugin
from sentry.organizations.services.organization import RpcOrganizationSummary
from sentry.pipeline import PipelineView
@@ -119,7 +122,7 @@ def dispatch(self, request: Request, pipeline) -> HttpResponse: # type: ignore[
class OpsgenieIntegration(IntegrationInstallation):
- def get_keyring_client(self, keyid: str) -> OpsgenieClient:
+ def get_keyring_client(self, keyid: int | str) -> OpsgenieClient:
org_integration = self.org_integration
assert org_integration, "OrganizationIntegration is required"
team = get_team(team_id=keyid, org_integration=org_integration)
@@ -169,7 +172,7 @@ def update_organization_config(self, data: MutableMapping[str, Any]) -> None:
}
integration = integration_service.get_integration(
- organization_integration_id=self.org_integration.id
+ organization_integration_id=self.org_integration.id, status=ObjectStatus.ACTIVE
)
if not integration:
raise IntegrationError("Integration does not exist")
@@ -180,40 +183,37 @@ def update_organization_config(self, data: MutableMapping[str, Any]) -> None:
team["id"] = str(self.org_integration.id) + "-" + team["team"]
invalid_keys = []
- for team in teams:
- # skip if team, key pair already exist in config
- if (team["team"], team["integration_key"]) in existing_team_key_pairs:
- continue
-
- integration_key = team["integration_key"]
-
- # validate integration keys
- client = OpsgenieClient(
- integration=integration,
- integration_key=integration_key,
- )
- # call an API to test the integration key
- try:
- client.get_alerts()
- except ApiError as e:
- logger.info(
- "opsgenie.authorization_error",
- extra={"error": str(e), "status_code": e.code},
+ with record_event(OnCallInteractionType.VERIFY_KEYS).capture():
+ for team in teams:
+ # skip if team, key pair already exist in config
+ if (team["team"], team["integration_key"]) in existing_team_key_pairs:
+ continue
+
+ integration_key = team["integration_key"]
+
+ # validate integration keys
+ client = OpsgenieClient(
+ integration=integration,
+ integration_key=integration_key,
)
- if e.code == 429:
- raise ApiRateLimitedError(
- "Too many requests. Please try updating one team/key at a time."
- )
- elif e.code == 401:
- invalid_keys.append(integration_key)
- pass
- elif e.json and e.json.get("message"):
- raise ApiError(e.json["message"])
- else:
- raise
-
- if invalid_keys:
- raise ApiUnauthorized(f"Invalid integration key: {str(invalid_keys)}")
+ # call an API to test the integration key
+ try:
+ client.get_alerts()
+ except ApiError as e:
+ if e.code == 429:
+ raise ApiRateLimitedError(
+ "Too many requests. Please try updating one team/key at a time."
+ )
+ elif e.code == 401:
+ invalid_keys.append(integration_key)
+ pass
+ elif e.json and e.json.get("message"):
+ raise ApiError(e.json["message"])
+ else:
+ raise
+
+ if invalid_keys:
+ raise ApiUnauthorized(f"Invalid integration key: {str(invalid_keys)}")
return super().update_organization_config(data)
@@ -256,21 +256,22 @@ def post_install(
organization: RpcOrganizationSummary,
extra: Any | None = None,
) -> None:
- try:
- org_integration = OrganizationIntegration.objects.get(
- integration=integration, organization_id=organization.id
- )
+ with record_event(OnCallInteractionType.POST_INSTALL).capture():
+ try:
+ org_integration = OrganizationIntegration.objects.get(
+ integration=integration, organization_id=organization.id
+ )
- except OrganizationIntegration.DoesNotExist:
- logger.exception("The Opsgenie post_install step failed.")
- return
+ except OrganizationIntegration.DoesNotExist:
+ logger.exception("The Opsgenie post_install step failed.")
+ return
- key = integration.metadata["api_key"]
- team_table = []
- if key:
- team_name = "my-first-key"
- team_id = f"{org_integration.id}-{team_name}"
- team_table.append({"team": team_name, "id": team_id, "integration_key": key})
+ key = integration.metadata["api_key"]
+ team_table = []
+ if key:
+ team_name = "my-first-key"
+ team_id = f"{org_integration.id}-{team_name}"
+ team_table.append({"team": team_name, "id": team_id, "integration_key": key})
- org_integration.config.update({"team_table": team_table})
- org_integration.update(config=org_integration.config)
+ org_integration.config.update({"team_table": team_table})
+ org_integration.update(config=org_integration.config)
diff --git a/src/sentry/integrations/opsgenie/metrics.py b/src/sentry/integrations/opsgenie/metrics.py
new file mode 100644
index 0000000000000..fcdde5ce27258
--- /dev/null
+++ b/src/sentry/integrations/opsgenie/metrics.py
@@ -0,0 +1,6 @@
+from sentry.integrations.on_call.metrics import OnCallInteractionEvent, OnCallInteractionType
+from sentry.integrations.on_call.spec import OpsgenieOnCallSpec
+
+
+def record_event(event: OnCallInteractionType):
+ return OnCallInteractionEvent(event, OpsgenieOnCallSpec())
diff --git a/src/sentry/integrations/opsgenie/tasks.py b/src/sentry/integrations/opsgenie/tasks.py
index 1de4561016100..dc30dbc9d88b0 100644
--- a/src/sentry/integrations/opsgenie/tasks.py
+++ b/src/sentry/integrations/opsgenie/tasks.py
@@ -4,11 +4,12 @@
from sentry.integrations.models.integration import Integration
from sentry.integrations.models.organization_integration import OrganizationIntegration
+from sentry.integrations.on_call.metrics import OnCallInteractionType
+from sentry.integrations.opsgenie.metrics import record_event
from sentry.integrations.services.integration.service import integration_service
from sentry.models.project import Project
from sentry.models.rule import Rule
from sentry.tasks.base import instrumented_task, retry
-from sentry.utils import metrics
ALERT_LEGACY_INTEGRATIONS = {"id": "sentry.rules.actions.notify_event.NotifyEventAction"}
ALERT_LEGACY_INTEGRATIONS_WITH_NAME = {
@@ -26,99 +27,101 @@
)
@retry(exclude=(Integration.DoesNotExist, OrganizationIntegration.DoesNotExist))
def migrate_opsgenie_plugin(integration_id: int, organization_id: int) -> None:
- from sentry_plugins.opsgenie.plugin import OpsGeniePlugin
+ with record_event(OnCallInteractionType.MIGRATE_PLUGIN).capture():
+ from sentry_plugins.opsgenie.plugin import OpsGeniePlugin
- result = integration_service.organization_context(
- organization_id=organization_id, integration_id=integration_id
- )
- integration = result.integration
- organization_integration = result.organization_integration
- if not integration:
- raise Integration.DoesNotExist
- if not organization_integration:
- raise OrganizationIntegration.DoesNotExist
+ result = integration_service.organization_context(
+ organization_id=organization_id, integration_id=integration_id
+ )
+ integration = result.integration
+ organization_integration = result.organization_integration
+ if not integration:
+ raise Integration.DoesNotExist
+ if not organization_integration:
+ raise OrganizationIntegration.DoesNotExist
- config = organization_integration.config
- team_table = config["team_table"]
+ config = organization_integration.config
+ team_table = config["team_table"]
- seen_keys = {}
- for i in range(len(config["team_table"])):
- seen_keys[team_table[i]["integration_key"]] = i
+ seen_keys = {}
+ for i in range(len(config["team_table"])):
+ seen_keys[team_table[i]["integration_key"]] = i
- all_projects = Project.objects.filter(organization_id=organization_id)
- plugin = OpsGeniePlugin()
- opsgenie_projects = [
- p for p in all_projects if plugin.is_enabled(project=p) and plugin.is_configured(project=p)
- ]
+ all_projects = Project.objects.filter(organization_id=organization_id)
+ plugin = OpsGeniePlugin()
+ opsgenie_projects = [
+ p
+ for p in all_projects
+ if plugin.is_enabled(project=p) and plugin.is_configured(project=p)
+ ]
- # migrate keys
- for project in opsgenie_projects:
- api_key = plugin.get_option("api_key", project)
- if seen_keys.get(api_key) is None:
- seen_keys[api_key] = len(team_table)
- team = {
- "team": f"{project.name} [MIGRATED]",
- "id": f"{str(organization_integration.id)}-{project.name}",
- "integration_key": api_key,
- }
- team_table.append(team)
- config.update({"team_table": team_table})
+ # migrate keys
+ for project in opsgenie_projects:
+ api_key = plugin.get_option("api_key", project)
+ if seen_keys.get(api_key) is None:
+ seen_keys[api_key] = len(team_table)
+ team = {
+ "team": f"{project.name} [MIGRATED]",
+ "id": f"{str(organization_integration.id)}-{project.name}",
+ "integration_key": api_key,
+ }
+ team_table.append(team)
+ config.update({"team_table": team_table})
- oi = integration_service.update_organization_integration(
- org_integration_id=organization_integration.id, config=config
- )
- if not oi: # the call to update_organization_integration failed
- raise Exception("Failed to update team table.")
- logger.info(
- "api_keys.migrated",
- extra={
- "integration_id": integration_id,
- "organization_id": organization_id,
- "plugin": plugin.slug,
- },
- )
+ oi = integration_service.update_organization_integration(
+ org_integration_id=organization_integration.id, config=config
+ )
+ if not oi: # the call to update_organization_integration failed
+ raise Exception("Failed to update team table.")
+ logger.info(
+ "api_keys.migrated",
+ extra={
+ "integration_id": integration_id,
+ "organization_id": organization_id,
+ "plugin": plugin.slug,
+ },
+ )
- # migrate alert rules
- for project in opsgenie_projects:
- api_key = plugin.get_option("api_key", project)
- team = team_table[seen_keys[api_key]]
- rules_to_migrate = [
- rule
- for rule in Rule.objects.filter(project_id=project.id)
- if ALERT_LEGACY_INTEGRATIONS in rule.data["actions"]
- or ALERT_LEGACY_INTEGRATIONS_WITH_NAME in rule.data["actions"]
- ]
- with transaction.atomic(router.db_for_write(Rule)):
- for rule in rules_to_migrate:
- actions = rule.data["actions"]
- new_action = {
- "id": "sentry.integrations.opsgenie.notify_action.OpsgenieNotifyTeamAction",
- "account": integration.id,
- "team": team["id"],
- }
- if new_action not in actions:
- actions.append(new_action)
- logger.info(
- "alert_rule.migrated",
- extra={
- "integration_id": integration_id,
- "organization_id": organization_id,
- "project_id": project.id,
- "plugin": plugin.slug,
- },
- )
- else:
- logger.info(
- "alert_rule.already_exists",
- extra={
- "integration_id": integration_id,
- "organization_id": organization_id,
- "project_id": project.id,
- "plugin": plugin.slug,
- },
- )
- rule.save()
+ # migrate alert rules
+ for project in opsgenie_projects:
+ api_key = plugin.get_option("api_key", project)
+ team = team_table[seen_keys[api_key]]
+ rules_to_migrate = [
+ rule
+ for rule in Rule.objects.filter(project_id=project.id)
+ if ALERT_LEGACY_INTEGRATIONS in rule.data["actions"]
+ or ALERT_LEGACY_INTEGRATIONS_WITH_NAME in rule.data["actions"]
+ ]
+ with transaction.atomic(router.db_for_write(Rule)):
+ for rule in rules_to_migrate:
+ actions = rule.data["actions"]
+ new_action = {
+ "id": "sentry.integrations.opsgenie.notify_action.OpsgenieNotifyTeamAction",
+ "account": integration.id,
+ "team": team["id"],
+ }
+ if new_action not in actions:
+ actions.append(new_action)
+ logger.info(
+ "alert_rule.migrated",
+ extra={
+ "integration_id": integration_id,
+ "organization_id": organization_id,
+ "project_id": project.id,
+ "plugin": plugin.slug,
+ },
+ )
+ else:
+ logger.info(
+ "alert_rule.already_exists",
+ extra={
+ "integration_id": integration_id,
+ "organization_id": organization_id,
+ "project_id": project.id,
+ "plugin": plugin.slug,
+ },
+ )
+ rule.save()
- # disable plugin
- plugin.reset_options(project)
- metrics.incr("opsgenie.migration_success", skip_internal=False)
+ # disable plugin
+ plugin.reset_options(project)
diff --git a/src/sentry/integrations/opsgenie/utils.py b/src/sentry/integrations/opsgenie/utils.py
index 980fe96be9ca0..62847e8d1bf78 100644
--- a/src/sentry/integrations/opsgenie/utils.py
+++ b/src/sentry/integrations/opsgenie/utils.py
@@ -59,7 +59,7 @@ def attach_custom_priority(
return data
-def get_team(team_id: str | None, org_integration: RpcOrganizationIntegration | None):
+def get_team(team_id: int | str | None, org_integration: RpcOrganizationIntegration | None):
if not org_integration:
return None
teams = org_integration.config.get("team_table")
diff --git a/src/sentry/integrations/pagerduty/actions/form.py b/src/sentry/integrations/pagerduty/actions/form.py
index bd9e2a3a7a92e..3ac96179b5479 100644
--- a/src/sentry/integrations/pagerduty/actions/form.py
+++ b/src/sentry/integrations/pagerduty/actions/form.py
@@ -6,6 +6,8 @@
from django import forms
from django.utils.translation import gettext_lazy as _
+from sentry.integrations.on_call.metrics import OnCallInteractionType
+from sentry.integrations.pagerduty.metrics import record_event
from sentry.integrations.services.integration import integration_service
from sentry.integrations.types import ExternalProviders
@@ -45,32 +47,33 @@ def __init__(self, *args, **kwargs):
self.fields["service"].widget.choices = self.fields["service"].choices
def _validate_service(self, service_id: int, integration_id: int) -> None:
- params = {
- "account": dict(self.fields["account"].choices).get(integration_id),
- "service": dict(self.fields["service"].choices).get(service_id),
- }
-
- org_integrations = integration_service.get_organization_integrations(
- integration_id=integration_id,
- providers=[ExternalProviders.PAGERDUTY.name],
- )
-
- if not any(
- pds
- for oi in org_integrations
- for pds in oi.config.get("pagerduty_services", [])
- if pds["id"] == service_id
- ):
- # We need to make sure that the service actually belongs to that integration,
- # meaning that it belongs under the appropriate account in PagerDuty.
- raise forms.ValidationError(
- _(
- 'The service "%(service)s" has not been granted access in the %(account)s Pagerduty account.'
- ),
- code="invalid",
- params=params,
+ with record_event(OnCallInteractionType.VALIDATE_SERVICE).capture():
+ params = {
+ "account": dict(self.fields["account"].choices).get(integration_id),
+ "service": dict(self.fields["service"].choices).get(service_id),
+ }
+
+ org_integrations = integration_service.get_organization_integrations(
+ integration_id=integration_id,
+ providers=[ExternalProviders.PAGERDUTY.name],
)
+ if not any(
+ pds
+ for oi in org_integrations
+ for pds in oi.config.get("pagerduty_services", [])
+ if pds["id"] == service_id
+ ):
+ # We need to make sure that the service actually belongs to that integration,
+ # meaning that it belongs under the appropriate account in PagerDuty.
+ raise forms.ValidationError(
+ _(
+ 'The service "%(service)s" has not been granted access in the %(account)s Pagerduty account.'
+ ),
+ code="invalid",
+ params=params,
+ )
+
def clean(self) -> dict[str, Any] | None:
cleaned_data = super().clean()
diff --git a/src/sentry/integrations/pagerduty/client.py b/src/sentry/integrations/pagerduty/client.py
index 81d4f61402a8a..ffbf667cad5b8 100644
--- a/src/sentry/integrations/pagerduty/client.py
+++ b/src/sentry/integrations/pagerduty/client.py
@@ -5,6 +5,8 @@
from sentry.api.serializers import ExternalEventSerializer, serialize
from sentry.eventstore.models import Event, GroupEvent
from sentry.integrations.client import ApiClient
+from sentry.integrations.on_call.metrics import OnCallInteractionType
+from sentry.integrations.pagerduty.metrics import record_event
from sentry.shared_integrations.client.base import BaseApiResponseX
LEVEL_SEVERITY_MAP = {
@@ -78,5 +80,5 @@ def send_trigger(
else:
# the payload is for a metric alert
payload = data
-
- return self.post("/", data=payload)
+ with record_event(OnCallInteractionType.CREATE).capture():
+ return self.post("/", data=payload)
diff --git a/src/sentry/integrations/pagerduty/integration.py b/src/sentry/integrations/pagerduty/integration.py
index dbddbb3f2eec2..fcf259d2c5cb0 100644
--- a/src/sentry/integrations/pagerduty/integration.py
+++ b/src/sentry/integrations/pagerduty/integration.py
@@ -19,6 +19,8 @@
)
from sentry.integrations.models.integration import Integration
from sentry.integrations.models.organization_integration import OrganizationIntegration
+from sentry.integrations.on_call.metrics import OnCallInteractionType
+from sentry.integrations.pagerduty.metrics import record_event
from sentry.organizations.services.organization import RpcOrganizationSummary
from sentry.pipeline import PipelineView
from sentry.shared_integrations.exceptions import IntegrationError
@@ -67,7 +69,7 @@
class PagerDutyIntegration(IntegrationInstallation):
- def get_keyring_client(self, keyid: str) -> PagerDutyClient:
+ def get_keyring_client(self, keyid: int | str) -> PagerDutyClient:
org_integration = self.org_integration
assert org_integration, "Cannot get client without an organization integration"
@@ -179,22 +181,23 @@ def post_install(
organization: RpcOrganizationSummary,
extra: Any | None = None,
) -> None:
- services = integration.metadata["services"]
- try:
- org_integration = OrganizationIntegration.objects.get(
- integration=integration, organization_id=organization.id
- )
- except OrganizationIntegration.DoesNotExist:
- logger.exception("The PagerDuty post_install step failed.")
- return
-
- with transaction.atomic(router.db_for_write(OrganizationIntegration)):
- for service in services:
- add_service(
- org_integration,
- integration_key=service["integration_key"],
- service_name=service["name"],
+ with record_event(OnCallInteractionType.POST_INSTALL).capture():
+ services = integration.metadata["services"]
+ try:
+ org_integration = OrganizationIntegration.objects.get(
+ integration=integration, organization_id=organization.id
)
+ except OrganizationIntegration.DoesNotExist:
+ logger.exception("The PagerDuty post_install step failed.")
+ return
+
+ with transaction.atomic(router.db_for_write(OrganizationIntegration)):
+ for service in services:
+ add_service(
+ org_integration,
+ integration_key=service["integration_key"],
+ service_name=service["name"],
+ )
def build_integration(self, state):
config = orjson.loads(state.get("config"))
diff --git a/src/sentry/integrations/pagerduty/metrics.py b/src/sentry/integrations/pagerduty/metrics.py
new file mode 100644
index 0000000000000..8f82ec36285eb
--- /dev/null
+++ b/src/sentry/integrations/pagerduty/metrics.py
@@ -0,0 +1,6 @@
+from sentry.integrations.on_call.metrics import OnCallInteractionEvent, OnCallInteractionType
+from sentry.integrations.on_call.spec import PagerDutyOnCallSpec
+
+
+def record_event(event: OnCallInteractionType):
+ return OnCallInteractionEvent(event, PagerDutyOnCallSpec())
diff --git a/src/sentry/integrations/pipeline.py b/src/sentry/integrations/pipeline.py
index 34975d7c914b9..38d15d5864bb5 100644
--- a/src/sentry/integrations/pipeline.py
+++ b/src/sentry/integrations/pipeline.py
@@ -19,6 +19,7 @@
from sentry.shared_integrations.exceptions import IntegrationError, IntegrationProviderError
from sentry.silo.base import SiloMode
from sentry.users.models.identity import Identity, IdentityProvider, IdentityStatus
+from sentry.utils import metrics
from sentry.web.helpers import render_to_response
__all__ = ["IntegrationPipeline"]
@@ -85,6 +86,13 @@ def get_analytics_entry(self) -> PipelineAnalyticsEntry | None:
pipeline_type = "reauth" if self.fetch_state("integration_id") else "install"
return PipelineAnalyticsEntry("integrations.pipeline_step", pipeline_type)
+ def initialize(self) -> None:
+ super().initialize()
+
+ metrics.incr(
+ "sentry.integrations.installation_attempt", tags={"integration": self.provider.key}
+ )
+
def finish_pipeline(self):
try:
data = self.provider.build_integration(self.state.data)
@@ -118,6 +126,11 @@ def finish_pipeline(self):
)
self.provider.post_install(self.integration, self.organization, extra=extra)
self.clear_session()
+
+ metrics.incr(
+ "sentry.integrations.installation_finished", tags={"integration": self.provider.key}
+ )
+
return response
def _finish_pipeline(self, data):
diff --git a/src/sentry/integrations/services/assignment_source.py b/src/sentry/integrations/services/assignment_source.py
new file mode 100644
index 0000000000000..fbf4c85bf9f7c
--- /dev/null
+++ b/src/sentry/integrations/services/assignment_source.py
@@ -0,0 +1,35 @@
+from __future__ import annotations
+
+from dataclasses import asdict, dataclass
+from datetime import datetime
+from typing import TYPE_CHECKING, Any
+
+from django.utils import timezone
+
+if TYPE_CHECKING:
+ from sentry.integrations.models import Integration
+ from sentry.integrations.services.integration import RpcIntegration
+
+
+@dataclass(frozen=True)
+class AssignmentSource:
+ source_name: str
+ integration_id: int
+ queued: datetime = timezone.now()
+
+ @classmethod
+ def from_integration(cls, integration: Integration | RpcIntegration) -> AssignmentSource:
+ return AssignmentSource(
+ source_name=integration.name,
+ integration_id=integration.id,
+ )
+
+ def to_dict(self) -> dict[str, Any]:
+ return asdict(self)
+
+ @classmethod
+ def from_dict(cls, input_dict: dict[str, Any]) -> AssignmentSource | None:
+ try:
+ return cls(**input_dict)
+ except (ValueError, TypeError):
+ return None
diff --git a/src/sentry/integrations/services/integration/impl.py b/src/sentry/integrations/services/integration/impl.py
index 169e067997005..cbf01a3c33464 100644
--- a/src/sentry/integrations/services/integration/impl.py
+++ b/src/sentry/integrations/services/integration/impl.py
@@ -9,7 +9,6 @@
from sentry import analytics
from sentry.api.paginator import OffsetPaginator
-from sentry.api.serializers import AppPlatformEvent
from sentry.constants import SentryAppInstallationStatus
from sentry.hybridcloud.rpc.pagination import RpcPaginationArgs, RpcPaginationResult
from sentry.incidents.models.incident import INCIDENT_STATUS, IncidentStatus
@@ -35,6 +34,7 @@
serialize_organization_integration,
)
from sentry.rules.actions.notify_event_service import find_alert_rule_action_ui_component
+from sentry.sentry_apps.api.serializers.app_platform_event import AppPlatformEvent
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
from sentry.shared_integrations.exceptions import ApiError
diff --git a/src/sentry/integrations/slack/actions/notification.py b/src/sentry/integrations/slack/actions/notification.py
index bb50b9e923ef0..45731bb2f0937 100644
--- a/src/sentry/integrations/slack/actions/notification.py
+++ b/src/sentry/integrations/slack/actions/notification.py
@@ -10,6 +10,10 @@
from sentry.api.serializers.rest_framework.rule import ACTION_UUID_KEY
from sentry.constants import ISSUE_ALERTS_THREAD_DEFAULT
from sentry.eventstore.models import GroupEvent
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.integrations.models.integration import Integration
from sentry.integrations.repository import get_default_issue_alert_repository
from sentry.integrations.repository.base import NotificationMessageValidationError
@@ -28,7 +32,9 @@
SLACK_ISSUE_ALERT_SUCCESS_DATADOG_METRIC,
)
from sentry.integrations.slack.sdk_client import SlackSdkClient
+from sentry.integrations.slack.spec import SlackMessagingSpec
from sentry.integrations.slack.utils.channel import SlackChannelIdData, get_channel_id
+from sentry.integrations.utils.metrics import EventLifecycle
from sentry.models.options.organization_option import OrganizationOption
from sentry.models.rule import Rule
from sentry.notifications.additional_attachment_manager import get_additional_attachment
@@ -122,41 +128,56 @@ def send_notification(event: GroupEvent, futures: Sequence[RuleFuture]) -> None:
rule_action_uuid=rule_action_uuid,
)
- # We need to search by rule action uuid and rule id, so only search if they exist
- reply_broadcast = False
- thread_ts = None
- if (
- OrganizationOption.objects.get_value(
- organization=self.project.organization,
- key="sentry:issue_alerts_thread_flag",
- default=ISSUE_ALERTS_THREAD_DEFAULT,
- )
- and rule_action_uuid
- and rule_id
- ):
- parent_notification_message = None
+ def get_thread_ts(lifecycle: EventLifecycle) -> str | None:
+ """Find the thread in which to post this notification as a reply.
+
+ Return None to post the notification as a top-level message.
+ """
+
+ # We need to search by rule action uuid and rule id, so only search if they exist
+ if not (
+ rule_action_uuid
+ and rule_id
+ and OrganizationOption.objects.get_value(
+ organization=self.project.organization,
+ key="sentry:issue_alerts_thread_flag",
+ default=ISSUE_ALERTS_THREAD_DEFAULT,
+ )
+ ):
+ return None
+
try:
parent_notification_message = self._repository.get_parent_notification_message(
rule_id=rule_id,
group_id=event.group.id,
rule_action_uuid=rule_action_uuid,
)
- except Exception:
+ except Exception as e:
+ lifecycle.record_halt(e)
+
# if there's an error trying to grab a parent notification, don't let that error block this flow
# we already log at the repository layer, no need to log again here
- pass
+ return None
- if parent_notification_message:
- # If a parent notification exists for this rule and action, then we can reply in a thread
- # Make sure we track that this reply will be in relation to the parent row
- new_notification_message_object.parent_notification_message_id = (
- parent_notification_message.id
- )
- # To reply to a thread, use the specific key in the payload as referenced by the docs
- # https://api.slack.com/methods/chat.postMessage#arg_thread_ts
- thread_ts = parent_notification_message.message_identifier
- # If this flow is triggered again for the same issue, we want it to be seen in the main channel
- reply_broadcast = True
+ if parent_notification_message is None:
+ return None
+
+ # If a parent notification exists for this rule and action, then we can reply in a thread
+ # Make sure we track that this reply will be in relation to the parent row
+ new_notification_message_object.parent_notification_message_id = (
+ parent_notification_message.id
+ )
+ # To reply to a thread, use the specific key in the payload as referenced by the docs
+ # https://api.slack.com/methods/chat.postMessage#arg_thread_ts
+ return parent_notification_message.message_identifier
+
+ with MessagingInteractionEvent(
+ MessagingInteractionType.GET_PARENT_NOTIFICATION, SlackMessagingSpec()
+ ).capture() as lifecycle:
+ thread_ts = get_thread_ts(lifecycle)
+
+ # If this flow is triggered again for the same issue, we want it to be seen in the main channel
+ reply_broadcast = thread_ts is not None
client = SlackSdkClient(integration_id=integration.id)
text = str(blocks.get("text"))
diff --git a/src/sentry/integrations/slack/message_builder/base/block.py b/src/sentry/integrations/slack/message_builder/base/block.py
index 47c532614b14f..ecbbfbff11dda 100644
--- a/src/sentry/integrations/slack/message_builder/base/block.py
+++ b/src/sentry/integrations/slack/message_builder/base/block.py
@@ -210,4 +210,4 @@ def _build_blocks(
return blocks
def as_payload(self) -> Mapping[str, Any]:
- return self.build() # type: ignore[return-value]
+ return self.build()
diff --git a/src/sentry/integrations/slack/message_builder/notifications/rule_save_edit.py b/src/sentry/integrations/slack/message_builder/notifications/rule_save_edit.py
index 74c202dde397e..0241d925a561f 100644
--- a/src/sentry/integrations/slack/message_builder/notifications/rule_save_edit.py
+++ b/src/sentry/integrations/slack/message_builder/notifications/rule_save_edit.py
@@ -48,7 +48,7 @@ def build(self) -> SlackBlock:
else:
rule_text = "*Alert rule updated*\n\n"
rule_text += f"{rule_url} in the {project_url} project was recently updated."
- # TODO potentially use old name if it's changed?
+ # TODO: potentially use old name if it's changed?
blocks.append(self.get_markdown_block(rule_text))
diff --git a/src/sentry/integrations/slack/message_builder/types.py b/src/sentry/integrations/slack/message_builder/types.py
index 0479f4ed77916..1aabe8d29f236 100644
--- a/src/sentry/integrations/slack/message_builder/types.py
+++ b/src/sentry/integrations/slack/message_builder/types.py
@@ -5,7 +5,7 @@
# TODO(mgaeta): Continue fleshing out these types.
SlackAttachment = dict[str, Any]
SlackBlock = dict[str, Any]
-SlackBody = Union[SlackAttachment, SlackBlock, list[SlackAttachment]]
+SlackBody = Union[SlackAttachment, SlackBlock]
# Attachment colors used for issues with no actions take.
LEVEL_TO_COLOR = {
diff --git a/src/sentry/integrations/slack/notifications.py b/src/sentry/integrations/slack/notifications.py
index 78c3dcc6e873a..50cfb9e9d62b6 100644
--- a/src/sentry/integrations/slack/notifications.py
+++ b/src/sentry/integrations/slack/notifications.py
@@ -54,16 +54,14 @@ def send_notification_as_slack(
Sending Slack notifications to a channel is in integrations/slack/actions/notification.py"""
service = SlackService.default()
- with sentry_sdk.start_span(
- op="notification.send_slack", description="gen_channel_integration_map"
- ):
+ with sentry_sdk.start_span(op="notification.send_slack", name="gen_channel_integration_map"):
data = get_integrations_by_channel_by_recipient(
notification.organization, recipients, ExternalProviders.SLACK
)
for recipient, integrations_by_channel in data.items():
- with sentry_sdk.start_span(op="notification.send_slack", description="send_one"):
- with sentry_sdk.start_span(op="notification.send_slack", description="gen_attachments"):
+ with sentry_sdk.start_span(op="notification.send_slack", name="send_one"):
+ with sentry_sdk.start_span(op="notification.send_slack", name="gen_attachments"):
attachments = service.get_attachments(
notification,
recipient,
diff --git a/src/sentry/integrations/slack/requests/base.py b/src/sentry/integrations/slack/requests/base.py
index 5c4e75fe1db97..f546bd9ff2f25 100644
--- a/src/sentry/integrations/slack/requests/base.py
+++ b/src/sentry/integrations/slack/requests/base.py
@@ -10,8 +10,10 @@
from slack_sdk.signature import SignatureVerifier
from sentry import options
+from sentry.constants import ObjectStatus
from sentry.identity.services.identity import RpcIdentity, identity_service
from sentry.identity.services.identity.model import RpcIdentityProvider
+from sentry.integrations.messaging.commands import CommandInput
from sentry.integrations.services.integration import RpcIntegration, integration_service
from sentry.users.services.user import RpcUser
from sentry.users.services.user.service import user_service
@@ -224,7 +226,7 @@ def _check_verification_token(self, verification_token: str) -> bool:
def validate_integration(self) -> None:
if not self._integration:
self._integration = integration_service.get_integration(
- provider="slack", external_id=self.team_id
+ provider="slack", external_id=self.team_id, status=ObjectStatus.ACTIVE
)
if not self._integration:
@@ -276,5 +278,9 @@ def get_command_and_args(self) -> tuple[str, Sequence[str]]:
return "", []
return command[0], command[1:]
+ def get_command_input(self) -> CommandInput:
+ cmd, args = self.get_command_and_args()
+ return CommandInput(cmd, tuple(args))
+
def _validate_identity(self) -> None:
self.user = self.get_identity_user()
diff --git a/src/sentry/integrations/slack/service.py b/src/sentry/integrations/slack/service.py
index e11784fef24b2..9b2dd314ca53f 100644
--- a/src/sentry/integrations/slack/service.py
+++ b/src/sentry/integrations/slack/service.py
@@ -335,7 +335,7 @@ def notify_recipient(
"""Send an "activity" or "alert rule" notification to a Slack user or team, but NOT to a channel directly.
This is used in the send_notification_as_slack function."""
- with sentry_sdk.start_span(op="notification.send_slack", description="notify_recipient"):
+ with sentry_sdk.start_span(op="notification.send_slack", name="notify_recipient"):
# Make a local copy to which we can append.
local_attachments = copy(attachments)
diff --git a/src/sentry/integrations/slack/tasks/link_slack_user_identities.py b/src/sentry/integrations/slack/tasks/link_slack_user_identities.py
index 1417383534330..6dbc706b17ec9 100644
--- a/src/sentry/integrations/slack/tasks/link_slack_user_identities.py
+++ b/src/sentry/integrations/slack/tasks/link_slack_user_identities.py
@@ -5,9 +5,10 @@
from django.utils import timezone
+from sentry.constants import ObjectStatus
from sentry.integrations.services.integration import integration_service
from sentry.integrations.slack.utils.users import SlackUserData, get_slack_data_by_user
-from sentry.integrations.utils import get_identities_by_user
+from sentry.integrations.utils.identities import get_identities_by_user
from sentry.organizations.services.organization import organization_service
from sentry.silo.base import SiloMode
from sentry.tasks.base import instrumented_task
@@ -28,13 +29,20 @@ def link_slack_user_identities(
integration_id: int,
organization_id: int,
) -> None:
- integration = integration_service.get_integration(integration_id=integration_id)
+ integration = integration_service.get_integration(
+ integration_id=integration_id, status=ObjectStatus.ACTIVE
+ )
organization_context = organization_service.get_organization_by_id(id=organization_id)
organization = organization_context.organization if organization_context else None
if organization is None or integration is None:
logger.error(
"slack.post_install.link_identities.invalid_params",
- extra={"organization": organization_id, "integration": integration_id},
+ extra={
+ "organization_id": organization_id,
+ "integration_id": integration_id,
+ "integration": bool(integration),
+ "organization": bool(organization),
+ },
)
return None
diff --git a/src/sentry/integrations/slack/unfurl/discover.py b/src/sentry/integrations/slack/unfurl/discover.py
index 61a2be526cefd..eccb01927e41a 100644
--- a/src/sentry/integrations/slack/unfurl/discover.py
+++ b/src/sentry/integrations/slack/unfurl/discover.py
@@ -15,9 +15,14 @@
from sentry.charts import backend as charts
from sentry.charts.types import ChartType
from sentry.discover.arithmetic import is_equation
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.integrations.models.integration import Integration
from sentry.integrations.services.integration import integration_service
from sentry.integrations.slack.message_builder.discover import SlackDiscoverMessageBuilder
+from sentry.integrations.slack.spec import SlackMessagingSpec
from sentry.integrations.slack.unfurl.types import Handler, UnfurlableUrl, UnfurledUrl
from sentry.models.apikey import ApiKey
from sentry.models.organization import Organization
@@ -115,6 +120,18 @@ def unfurl_discover(
integration: Integration,
links: list[UnfurlableUrl],
user: User | None = None,
+) -> UnfurledUrl:
+ event = MessagingInteractionEvent(
+ MessagingInteractionType.UNFURL_DISCOVER, SlackMessagingSpec(), user=user
+ )
+ with event.capture():
+ return _unfurl_discover(integration, links, user)
+
+
+def _unfurl_discover(
+ integration: Integration,
+ links: list[UnfurlableUrl],
+ user: User | None = None,
) -> UnfurledUrl:
org_integrations = integration_service.get_organization_integrations(
integration_id=integration.id
diff --git a/src/sentry/integrations/slack/unfurl/issues.py b/src/sentry/integrations/slack/unfurl/issues.py
index 135b9402cbe97..2524cfe30cca3 100644
--- a/src/sentry/integrations/slack/unfurl/issues.py
+++ b/src/sentry/integrations/slack/unfurl/issues.py
@@ -5,9 +5,14 @@
from django.http.request import HttpRequest
from sentry import eventstore
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.integrations.models.integration import Integration
from sentry.integrations.services.integration import integration_service
from sentry.integrations.slack.message_builder.issues import SlackIssuesMessageBuilder
+from sentry.integrations.slack.spec import SlackMessagingSpec
from sentry.integrations.slack.unfurl.types import (
Handler,
UnfurlableUrl,
@@ -37,6 +42,14 @@ def unfurl_issues(
for a particular issue by the URL of the yet-unfurled links a user included
in their Slack message.
"""
+ event = MessagingInteractionEvent(
+ MessagingInteractionType.UNFURL_ISSUES, SlackMessagingSpec(), user=user
+ )
+ with event.capture():
+ return _unfurl_issues(integration, links)
+
+
+def _unfurl_issues(integration: Integration, links: list[UnfurlableUrl]) -> UnfurledUrl:
org_integrations = integration_service.get_organization_integrations(
integration_id=integration.id
)
diff --git a/src/sentry/integrations/slack/unfurl/metric_alerts.py b/src/sentry/integrations/slack/unfurl/metric_alerts.py
index 5153d180531a1..9e0c895abb15f 100644
--- a/src/sentry/integrations/slack/unfurl/metric_alerts.py
+++ b/src/sentry/integrations/slack/unfurl/metric_alerts.py
@@ -14,9 +14,14 @@
from sentry.incidents.charts import build_metric_alert_chart
from sentry.incidents.models.alert_rule import AlertRule
from sentry.incidents.models.incident import Incident
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.integrations.models.integration import Integration
from sentry.integrations.services.integration import integration_service
from sentry.integrations.slack.message_builder.metric_alerts import SlackMetricAlertMessageBuilder
+from sentry.integrations.slack.spec import SlackMessagingSpec
from sentry.integrations.slack.unfurl.types import (
Handler,
UnfurlableUrl,
@@ -43,6 +48,18 @@ def unfurl_metric_alerts(
integration: Integration,
links: list[UnfurlableUrl],
user: User | None = None,
+) -> UnfurledUrl:
+ event = MessagingInteractionEvent(
+ MessagingInteractionType.UNFURL_METRIC_ALERTS, SlackMessagingSpec(), user=user
+ )
+ with event.capture():
+ return _unfurl_metric_alerts(integration, links, user)
+
+
+def _unfurl_metric_alerts(
+ integration: Integration,
+ links: list[UnfurlableUrl],
+ user: User | None = None,
) -> UnfurledUrl:
alert_filter_query = Q()
incident_filter_query = Q()
diff --git a/src/sentry/integrations/slack/webhooks/action.py b/src/sentry/integrations/slack/webhooks/action.py
index 1b998f67f26cb..f97a632a8b69b 100644
--- a/src/sentry/integrations/slack/webhooks/action.py
+++ b/src/sentry/integrations/slack/webhooks/action.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import logging
+from abc import ABC, abstractmethod
from collections.abc import Mapping, MutableMapping, Sequence
from typing import Any
@@ -15,7 +16,7 @@
from slack_sdk.models.views import View
from slack_sdk.webhook import WebhookClient
-from sentry import analytics, options
+from sentry import analytics
from sentry.api import client
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
@@ -24,9 +25,12 @@
from sentry.api.helpers.group_index import update_groups
from sentry.auth.access import from_member
from sentry.exceptions import UnableToAcceptMemberInvitationException
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.integrations.services.integration import integration_service
from sentry.integrations.slack.message_builder.issues import SlackIssuesMessageBuilder
-from sentry.integrations.slack.message_builder.types import SlackBody
from sentry.integrations.slack.metrics import (
SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
SLACK_WEBHOOK_GROUP_ACTIONS_SUCCESS_DATADOG_METRIC,
@@ -34,6 +38,7 @@
from sentry.integrations.slack.requests.action import SlackActionRequest
from sentry.integrations.slack.requests.base import SlackRequestError
from sentry.integrations.slack.sdk_client import SlackSdkClient
+from sentry.integrations.slack.spec import SlackMessagingSpec
from sentry.integrations.slack.utils.errors import MODAL_NOT_FOUND, unpack_slack_api_error
from sentry.integrations.types import ExternalProviderEnum
from sentry.integrations.utils.scope import bind_org_context_from_integration
@@ -44,6 +49,7 @@
from sentry.notifications.services import notifications_service
from sentry.notifications.utils.actions import BlockKitMessageAction, MessageAction
from sentry.shared_integrations.exceptions import ApiError
+from sentry.users.models import User
from sentry.users.services.user import RpcUser
from sentry.utils import metrics
@@ -160,8 +166,7 @@ def get_group(slack_request: SlackActionRequest) -> Group | None:
def _is_message(data: Mapping[str, Any]) -> bool:
"""
- XXX(epurkhiser): Used in coordination with construct_reply.
- Bot posted messages will not have the type at all.
+ Bot posted messages will not have the type at all.
"""
return data.get("original_message", {}).get("type") == "message"
@@ -324,251 +329,6 @@ def on_status(
user_id=user.id,
)
- def build_format_options(self, options: dict[str, str]) -> list[dict[str, Any]]:
- return [
- {
- "text": {
- "type": "plain_text",
- "text": text,
- "emoji": True,
- },
- "value": value,
- }
- for text, value in options.items()
- ]
-
- def build_modal_payload(
- self,
- title: str,
- action_text: str,
- options: dict[str, str],
- initial_option_text: str,
- initial_option_value: str,
- callback_id: str,
- metadata: str,
- ) -> View:
- formatted_options = self.build_format_options(options)
-
- return View(
- type="modal",
- title={"type": "plain_text", "text": f"{title} Issue"},
- blocks=[
- {
- "type": "section",
- "text": {"type": "mrkdwn", "text": action_text},
- "accessory": {
- "type": "static_select",
- "initial_option": {
- "text": {
- "type": "plain_text",
- "text": initial_option_text,
- "emoji": True,
- },
- "value": initial_option_value,
- },
- "options": formatted_options,
- "action_id": "static_select-action",
- },
- }
- ],
- close={"type": "plain_text", "text": "Cancel"},
- submit={"type": "plain_text", "text": title},
- private_metadata=metadata,
- callback_id=callback_id,
- )
-
- def build_resolve_modal_payload(self, callback_id: str, metadata: str) -> View:
- return self.build_modal_payload(
- title="Resolve",
- action_text="Resolve",
- options=RESOLVE_OPTIONS,
- initial_option_text="Immediately",
- initial_option_value="resolved",
- callback_id=callback_id,
- metadata=metadata,
- )
-
- def build_archive_modal_payload(self, callback_id: str, metadata: str) -> View:
- return self.build_modal_payload(
- title="Archive",
- action_text="Archive",
- options=ARCHIVE_OPTIONS,
- initial_option_text="Until escalating",
- initial_option_value="ignored:archived_until_escalating",
- callback_id=callback_id,
- metadata=metadata,
- )
-
- def _update_modal(
- self,
- slack_client: SlackSdkClient,
- external_id: str,
- modal_payload: View,
- slack_request: SlackActionRequest,
- ) -> None:
- try:
- slack_client.views_update(
- external_id=external_id,
- view=modal_payload,
- )
- except SlackApiError as e:
- # If the external_id is not found, Slack we send `not_found` error
- # https://api.slack.com/methods/views.update
- if unpack_slack_api_error(e) == MODAL_NOT_FOUND:
- metrics.incr(
- SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"type": "update_modal"},
- )
- logging_data = slack_request.get_logging_data()
- _logger.exception(
- "slack.action.update-modal-not-found",
- extra={
- **logging_data,
- "trigger_id": slack_request.data["trigger_id"],
- "dialog": "resolve",
- },
- )
- # The modal was not found, so we need to open a new one
- self._open_modal(slack_client, modal_payload, slack_request)
- else:
- raise
-
- def _open_modal(
- self, slack_client: SlackSdkClient, modal_payload: View, slack_request: SlackActionRequest
- ) -> None:
- # Error handling is done in the calling function
- slack_client.views_open(
- trigger_id=slack_request.data["trigger_id"],
- view=modal_payload,
- )
-
- def open_resolve_dialog(self, slack_request: SlackActionRequest, group: Group) -> None:
- # XXX(epurkhiser): In order to update the original message we have to
- # keep track of the response_url in the callback_id. Definitely hacky,
- # but seems like there's no other solutions [1]:
- #
- # [1]: https://stackoverflow.com/questions/46629852/update-a-bot-message-after-responding-to-a-slack-dialog#comment80795670_46629852
- org = group.project.organization
- callback_id_dict = {
- "issue": group.id,
- "orig_response_url": slack_request.data["response_url"],
- "is_message": _is_message(slack_request.data),
- }
- if slack_request.data.get("channel"):
- callback_id_dict["channel_id"] = slack_request.data["channel"]["id"]
- callback_id_dict["rule"] = slack_request.callback_data.get("rule")
- callback_id = orjson.dumps(callback_id_dict).decode()
-
- # only add tags to metadata
- metadata_dict = callback_id_dict.copy()
- metadata_dict["tags"] = list(slack_request.get_tags())
- metadata = orjson.dumps(metadata_dict).decode()
-
- # XXX(CEO): the second you make a selection (without hitting Submit) it sends a slightly different request
- modal_payload = self.build_resolve_modal_payload(callback_id, metadata=metadata)
- slack_client = SlackSdkClient(integration_id=slack_request.integration.id)
- try:
- # We need to use the action_ts as the external_id to update the modal
- # We passed this in control when we sent the loading modal to beat the 3 second timeout
- external_id = slack_request.get_action_ts()
-
- if not external_id or not options.get("send-slack-response-from-control-silo"):
- # If we don't have an external_id or option is disabled we need to open a new modal
- self._open_modal(slack_client, modal_payload, slack_request)
- else:
- self._update_modal(slack_client, external_id, modal_payload, slack_request)
-
- metrics.incr(
- SLACK_WEBHOOK_GROUP_ACTIONS_SUCCESS_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"type": "resolve_modal_open"},
- )
- except SlackApiError:
- metrics.incr(
- SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"type": "resolve_modal_open"},
- )
- _logger.exception(
- "slack.action.response-error",
- extra={
- "organization_id": org.id,
- "integration_id": slack_request.integration.id,
- "trigger_id": slack_request.data["trigger_id"],
- "dialog": "resolve",
- },
- )
-
- def open_archive_dialog(self, slack_request: SlackActionRequest, group: Group) -> None:
- org = group.project.organization
-
- callback_id_dict = {
- "issue": group.id,
- "orig_response_url": slack_request.data["response_url"],
- "is_message": _is_message(slack_request.data),
- "rule": slack_request.callback_data.get("rule"),
- }
-
- if slack_request.data.get("channel"):
- callback_id_dict["channel_id"] = slack_request.data["channel"]["id"]
- callback_id = orjson.dumps(callback_id_dict).decode()
-
- # only add tags to metadata
- metadata_dict = callback_id_dict.copy()
- metadata_dict["tags"] = list(slack_request.get_tags())
- metadata = orjson.dumps(metadata_dict).decode()
-
- modal_payload = self.build_archive_modal_payload(callback_id, metadata=metadata)
- slack_client = SlackSdkClient(integration_id=slack_request.integration.id)
- try:
- # We need to use the action_ts as the external_id to update the modal
- # We passed this in control when we sent the loading modal to beat the 3 second timeout
- external_id = slack_request.get_action_ts()
-
- if not external_id or not options.get("send-slack-response-from-control-silo"):
- # If we don't have an external_id or option is disabled we need to open a new modal
- self._open_modal(slack_client, modal_payload, slack_request)
- else:
- self._update_modal(slack_client, external_id, modal_payload, slack_request)
-
- metrics.incr(
- SLACK_WEBHOOK_GROUP_ACTIONS_SUCCESS_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"type": "archive_modal_open"},
- )
- except SlackApiError:
- metrics.incr(
- SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"type": "archive_modal_open"},
- )
- _logger.exception(
- "slack.action.response-error",
- extra={
- "organization_id": org.id,
- "integration_id": slack_request.integration.id,
- "trigger_id": slack_request.data["trigger_id"],
- "dialog": "archive",
- },
- )
-
- def construct_reply(self, attachment: SlackBody, is_message: bool = False) -> SlackBody:
- # XXX(epurkhiser): Slack is inconsistent about it's expected responses
- # for interactive action requests.
- #
- # * For _unfurled_ action responses, slack expects the entire
- # attachment body used to replace the unfurled attachment to be at
- # the top level of the json response body.
- #
- # * For _bot posted message_ action responses, slack expects the
- # attachment body used to replace the attachment to be within an
- # `attachments` array.
- if is_message:
- attachment = {"attachments": [attachment]}
-
- return attachment
-
def _handle_group_actions(
self,
slack_request: SlackActionRequest,
@@ -677,23 +437,33 @@ def _handle_group_actions(
# response_url later to update it.
defer_attachment_update = False
+ def record_event(interaction_type: MessagingInteractionType) -> MessagingInteractionEvent:
+ user = request.user
+ return MessagingInteractionEvent(
+ interaction_type,
+ SlackMessagingSpec(),
+ user=(user if isinstance(user, User) else None),
+ organization=(group.project.organization if group else None),
+ )
+
# Handle interaction actions
for action in action_list:
try:
- if action.name in (
- "status",
- "unresolved:ongoing",
- ):
- self.on_status(request, identity_user, group, action)
+ if action.name in ("status", "unresolved:ongoing"):
+ with record_event(MessagingInteractionType.STATUS).capture():
+ self.on_status(request, identity_user, group, action)
elif (
action.name == "assign"
): # TODO: remove this as it is replaced by the options-load endpoint
- self.on_assign(request, identity_user, group, action)
+ with record_event(MessagingInteractionType.ASSIGN).capture():
+ self.on_assign(request, identity_user, group, action)
elif action.name == "resolve_dialog":
- self.open_resolve_dialog(slack_request, group)
+ with record_event(MessagingInteractionType.RESOLVE_DIALOG).capture():
+ _ResolveDialog().open_dialog(slack_request, group)
defer_attachment_update = True
elif action.name == "archive_dialog":
- self.open_archive_dialog(slack_request, group)
+ with record_event(MessagingInteractionType.ARCHIVE_DIALOG).capture():
+ _ArchiveDialog().open_dialog(slack_request, group)
defer_attachment_update = True
except client.ApiError as error:
return self.api_error(slack_request, group, identity_user, error, action.name)
@@ -970,3 +740,204 @@ def handle_member_approval(self, slack_request: SlackActionRequest, action: str)
)
return self.respond({"text": message})
+
+
+class _ModalDialog(ABC):
+ @property
+ @abstractmethod
+ def dialog_type(self) -> str:
+ raise NotImplementedError
+
+ def _build_format_options(self, options: dict[str, str]) -> list[dict[str, Any]]:
+ return [
+ {
+ "text": {
+ "type": "plain_text",
+ "text": text,
+ "emoji": True,
+ },
+ "value": value,
+ }
+ for text, value in options.items()
+ ]
+
+ def build_modal_payload(
+ self,
+ title: str,
+ action_text: str,
+ options: dict[str, str],
+ initial_option_text: str,
+ initial_option_value: str,
+ callback_id: str,
+ metadata: str,
+ ) -> View:
+ formatted_options = self._build_format_options(options)
+
+ return View(
+ type="modal",
+ title={"type": "plain_text", "text": f"{title} Issue"},
+ blocks=[
+ {
+ "type": "section",
+ "text": {"type": "mrkdwn", "text": action_text},
+ "accessory": {
+ "type": "static_select",
+ "initial_option": {
+ "text": {
+ "type": "plain_text",
+ "text": initial_option_text,
+ "emoji": True,
+ },
+ "value": initial_option_value,
+ },
+ "options": formatted_options,
+ "action_id": "static_select-action",
+ },
+ }
+ ],
+ close={"type": "plain_text", "text": "Cancel"},
+ submit={"type": "plain_text", "text": title},
+ private_metadata=metadata,
+ callback_id=callback_id,
+ )
+
+ @abstractmethod
+ def get_modal_payload(self, callback_id: str, metadata: str) -> View:
+ raise NotImplementedError
+
+ def _update_modal(
+ self,
+ slack_client: SlackSdkClient,
+ external_id: str,
+ modal_payload: View,
+ slack_request: SlackActionRequest,
+ ) -> None:
+ try:
+ slack_client.views_update(
+ external_id=external_id,
+ view=modal_payload,
+ )
+ except SlackApiError as e:
+ # If the external_id is not found, Slack we send `not_found` error
+ # https://api.slack.com/methods/views.update
+ if unpack_slack_api_error(e) == MODAL_NOT_FOUND:
+ metrics.incr(
+ SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
+ sample_rate=1.0,
+ tags={"type": "update_modal"},
+ )
+ logging_data = slack_request.get_logging_data()
+ _logger.exception(
+ "slack.action.update-modal-not-found",
+ extra={
+ **logging_data,
+ "trigger_id": slack_request.data["trigger_id"],
+ "dialog": self.dialog_type,
+ },
+ )
+ # The modal was not found, so we need to open a new one
+ self._open_modal(slack_client, modal_payload, slack_request)
+ else:
+ raise
+
+ def _open_modal(
+ self, slack_client: SlackSdkClient, modal_payload: View, slack_request: SlackActionRequest
+ ) -> None:
+ # Error handling is done in the calling function
+ slack_client.views_open(
+ trigger_id=slack_request.data["trigger_id"],
+ view=modal_payload,
+ )
+
+ def open_dialog(self, slack_request: SlackActionRequest, group: Group) -> None:
+ # XXX(epurkhiser): In order to update the original message we have to
+ # keep track of the response_url in the callback_id. Definitely hacky,
+ # but seems like there's no other solutions [1]:
+ #
+ # [1]: https://stackoverflow.com/questions/46629852/update-a-bot-message-after-responding-to-a-slack-dialog#comment80795670_46629852
+ org = group.project.organization
+
+ callback_id_dict = {
+ "issue": group.id,
+ "orig_response_url": slack_request.data["response_url"],
+ "is_message": _is_message(slack_request.data),
+ "rule": slack_request.callback_data.get("rule"),
+ }
+
+ if slack_request.data.get("channel"):
+ callback_id_dict["channel_id"] = slack_request.data["channel"]["id"]
+ callback_id = orjson.dumps(callback_id_dict).decode()
+
+ # only add tags to metadata
+ metadata_dict = callback_id_dict.copy()
+ metadata_dict["tags"] = list(slack_request.get_tags())
+ metadata = orjson.dumps(metadata_dict).decode()
+
+ # XXX(CEO): the second you make a selection (without hitting Submit) it sends a slightly different request
+ modal_payload = self.get_modal_payload(callback_id, metadata=metadata)
+ slack_client = SlackSdkClient(integration_id=slack_request.integration.id)
+ try:
+ # We need to use the action_ts as the external_id to update the modal
+ # We passed this in control when we sent the loading modal to beat the 3 second timeout
+ external_id = slack_request.get_action_ts()
+
+ if not external_id:
+ # If we don't have an external_id or option is disabled we need to open a new modal
+ self._open_modal(slack_client, modal_payload, slack_request)
+ else:
+ self._update_modal(slack_client, external_id, modal_payload, slack_request)
+
+ metrics.incr(
+ SLACK_WEBHOOK_GROUP_ACTIONS_SUCCESS_DATADOG_METRIC,
+ sample_rate=1.0,
+ tags={"type": f"{self.dialog_type}_modal_open"},
+ )
+ except SlackApiError:
+ metrics.incr(
+ SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
+ sample_rate=1.0,
+ tags={"type": f"{self.dialog_type}_modal_open"},
+ )
+ _logger.exception(
+ "slack.action.response-error",
+ extra={
+ "organization_id": org.id,
+ "integration_id": slack_request.integration.id,
+ "trigger_id": slack_request.data["trigger_id"],
+ "dialog": self.dialog_type,
+ },
+ )
+
+
+class _ResolveDialog(_ModalDialog):
+ @property
+ def dialog_type(self) -> str:
+ return "resolve"
+
+ def get_modal_payload(self, callback_id: str, metadata: str) -> View:
+ return self.build_modal_payload(
+ title="Resolve",
+ action_text="Resolve",
+ options=RESOLVE_OPTIONS,
+ initial_option_text="Immediately",
+ initial_option_value="resolved",
+ callback_id=callback_id,
+ metadata=metadata,
+ )
+
+
+class _ArchiveDialog(_ModalDialog):
+ @property
+ def dialog_type(self) -> str:
+ return "archive"
+
+ def get_modal_payload(self, callback_id: str, metadata: str) -> View:
+ return self.build_modal_payload(
+ title="Archive",
+ action_text="Archive",
+ options=ARCHIVE_OPTIONS,
+ initial_option_text="Until escalating",
+ initial_option_value="ignored:archived_until_escalating",
+ callback_id=callback_id,
+ metadata=metadata,
+ )
diff --git a/src/sentry/integrations/slack/webhooks/base.py b/src/sentry/integrations/slack/webhooks/base.py
index 1d2eba49c6ba1..b0663cccebb8d 100644
--- a/src/sentry/integrations/slack/webhooks/base.py
+++ b/src/sentry/integrations/slack/webhooks/base.py
@@ -1,17 +1,30 @@
from __future__ import annotations
import abc
+import logging
+from collections.abc import Callable, Iterable
+from dataclasses import dataclass
from rest_framework import status
from rest_framework.response import Response
from sentry.api.base import Endpoint
+from sentry.integrations.messaging import commands
+from sentry.integrations.messaging.commands import (
+ CommandInput,
+ CommandNotMatchedError,
+ MessagingIntegrationCommand,
+ MessagingIntegrationCommandDispatcher,
+)
+from sentry.integrations.messaging.spec import MessagingIntegrationSpec
from sentry.integrations.slack.message_builder.help import SlackHelpMessageBuilder
from sentry.integrations.slack.metrics import (
SLACK_WEBHOOK_DM_ENDPOINT_FAILURE_DATADOG_METRIC,
SLACK_WEBHOOK_DM_ENDPOINT_SUCCESS_DATADOG_METRIC,
)
from sentry.integrations.slack.requests.base import SlackDMRequest, SlackRequestError
+from sentry.integrations.slack.spec import SlackMessagingSpec
+from sentry.utils import metrics
LINK_USER_MESSAGE = (
"<{associate_url}|Link your Slack identity> to your Sentry account to receive notifications. "
@@ -24,9 +37,6 @@
NOT_LINKED_MESSAGE = "You do not have a linked identity to unlink."
ALREADY_LINKED_MESSAGE = "You are already linked as `{username}`."
-import logging
-
-from sentry.utils import metrics
logger = logging.getLogger(__name__)
@@ -42,33 +52,21 @@ def post_dispatcher(self, request: SlackDMRequest) -> Response:
All Slack commands are handled by this endpoint. This block just
validates the request and dispatches it to the right handler.
"""
- command, args = request.get_command_and_args()
-
- if command in ["help", "", "support", "docs"]:
- return self.respond(SlackHelpMessageBuilder(command=command).build())
-
- if command == "link":
- if not args:
- return self.link_user(request)
-
- if args[0] == "team":
- return self.link_team(request)
-
- if command == "unlink":
- if not args:
- return self.unlink_user(request)
-
- if args[0] == "team":
- return self.unlink_team(request)
-
- # If we cannot interpret the command, print help text.
- request_data = request.data
- unknown_command = request_data.get("text", "").lower()
- return self.respond(SlackHelpMessageBuilder(unknown_command).build())
+ cmd_input = request.get_command_input()
+ try:
+ return SlackCommandDispatcher(self, request).dispatch(cmd_input)
+ except CommandNotMatchedError:
+ # If we cannot interpret the command, print help text.
+ request_data = request.data
+ unknown_command = request_data.get("text", "").lower()
+ return self.help(unknown_command)
def reply(self, slack_request: SlackDMRequest, message: str) -> Response:
raise NotImplementedError
+ def help(self, command: str) -> Response:
+ return self.respond(SlackHelpMessageBuilder(command).build())
+
def link_user(self, slack_request: SlackDMRequest) -> Response:
from sentry.integrations.slack.views.link_identity import build_linking_url
@@ -124,3 +122,23 @@ def link_team(self, slack_request: SlackDMRequest) -> Response:
def unlink_team(self, slack_request: SlackDMRequest) -> Response:
raise NotImplementedError
+
+
+@dataclass(frozen=True)
+class SlackCommandDispatcher(MessagingIntegrationCommandDispatcher[Response]):
+ endpoint: SlackDMEndpoint
+ request: SlackDMRequest
+
+ @property
+ def integration_spec(self) -> MessagingIntegrationSpec:
+ return SlackMessagingSpec()
+
+ @property
+ def command_handlers(
+ self,
+ ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], Response]]]:
+ yield commands.HELP, (lambda i: self.endpoint.help(i.cmd_value))
+ yield commands.LINK_IDENTITY, (lambda i: self.endpoint.link_user(self.request))
+ yield commands.UNLINK_IDENTITY, (lambda i: self.endpoint.unlink_user(self.request))
+ yield commands.LINK_TEAM, (lambda i: self.endpoint.link_team(self.request))
+ yield commands.UNLINK_TEAM, (lambda i: self.endpoint.unlink_team(self.request))
diff --git a/src/sentry/integrations/slack/webhooks/command.py b/src/sentry/integrations/slack/webhooks/command.py
index f9730ef18758f..ea0f111566da4 100644
--- a/src/sentry/integrations/slack/webhooks/command.py
+++ b/src/sentry/integrations/slack/webhooks/command.py
@@ -6,6 +6,7 @@
from rest_framework.request import Request
from rest_framework.response import Response
+from sentry import features
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
@@ -102,7 +103,10 @@ def link_team(self, slack_request: SlackDMRequest) -> Response:
has_valid_role = False
for organization_membership in organization_memberships:
- if is_team_linked_to_channel(organization_membership.organization, slack_request):
+ if not features.has(
+ "organizations:slack-multiple-team-single-channel-linking",
+ organization_membership.organization,
+ ) and is_team_linked_to_channel(organization_membership.organization, slack_request):
return self.reply(slack_request, CHANNEL_ALREADY_LINKED_MESSAGE)
if is_valid_role(organization_membership) or is_team_admin(organization_membership):
diff --git a/src/sentry/integrations/source_code_management/commit_context.py b/src/sentry/integrations/source_code_management/commit_context.py
index 5b89bc23c6ac4..590431ef4d4c7 100644
--- a/src/sentry/integrations/source_code_management/commit_context.py
+++ b/src/sentry/integrations/source_code_management/commit_context.py
@@ -4,22 +4,51 @@
from abc import ABC, abstractmethod
from collections.abc import Mapping, Sequence
from dataclasses import dataclass
-from datetime import datetime
+from datetime import datetime, timedelta, timezone
from typing import Any
-from django.utils import timezone
+import sentry_sdk
+from django.utils import timezone as django_timezone
from sentry import analytics
from sentry.auth.exceptions import IdentityNotValid
from sentry.integrations.models.repository_project_path_config import RepositoryProjectPathConfig
-from sentry.models.pullrequest import CommentType, PullRequestComment
+from sentry.locks import locks
+from sentry.models.commit import Commit
+from sentry.models.group import Group
+from sentry.models.groupowner import GroupOwner
+from sentry.models.options.organization_option import OrganizationOption
+from sentry.models.project import Project
+from sentry.models.pullrequest import (
+ CommentType,
+ PullRequest,
+ PullRequestComment,
+ PullRequestCommit,
+)
from sentry.models.repository import Repository
from sentry.users.models.identity import Identity
from sentry.utils import metrics
+from sentry.utils.cache import cache
logger = logging.getLogger(__name__)
+def _debounce_pr_comment_cache_key(pullrequest_id: int) -> str:
+ return f"pr-comment-{pullrequest_id}"
+
+
+def _debounce_pr_comment_lock_key(pullrequest_id: int) -> str:
+ return f"queue_comment_task:{pullrequest_id}"
+
+
+def _pr_comment_log(integration_name: str, suffix: str) -> str:
+ return f"{integration_name}.pr_comment.{suffix}"
+
+
+PR_COMMENT_TASK_TTL = timedelta(minutes=5).total_seconds()
+PR_COMMENT_WINDOW = 14 # days
+
+
@dataclass
class SourceLineInfo:
lineno: int
@@ -84,6 +113,122 @@ def get_commit_context_all_frames(
"""
return self.get_blame_for_files(files, extra)
+ def queue_comment_task_if_needed(
+ self,
+ project: Project,
+ commit: Commit,
+ group_owner: GroupOwner,
+ group_id: int,
+ ) -> None:
+ if not OrganizationOption.objects.get_value(
+ organization=project.organization,
+ key="sentry:github_pr_bot",
+ default=True,
+ ):
+ logger.info(
+ _pr_comment_log(integration_name=self.integration_name, suffix="disabled"),
+ extra={"organization_id": project.organization_id},
+ )
+ return
+
+ repo_query = Repository.objects.filter(id=commit.repository_id).order_by("-date_added")
+ group = Group.objects.get_from_cache(id=group_id)
+ if not (
+ group.level is not logging.INFO and repo_query.exists()
+ ): # Don't comment on info level issues
+ logger.info(
+ _pr_comment_log(
+ integration_name=self.integration_name, suffix="incorrect_repo_config"
+ ),
+ extra={"organization_id": project.organization_id},
+ )
+ return
+
+ repo: Repository = repo_query.get()
+
+ logger.info(
+ _pr_comment_log(integration_name=self.integration_name, suffix="queue_comment_check"),
+ extra={"organization_id": commit.organization_id, "merge_commit_sha": commit.key},
+ )
+ from sentry.integrations.github.tasks.pr_comment import github_comment_workflow
+
+ # client will raise an Exception if the request is not successful
+ try:
+ client = self.get_client()
+ merge_commit_sha = client.get_merge_commit_sha_from_commit(
+ repo=repo.name, sha=commit.key
+ )
+ except Exception as e:
+ sentry_sdk.capture_exception(e)
+ return
+
+ if merge_commit_sha is None:
+ logger.info(
+ _pr_comment_log(
+ integration_name=self.integration_name,
+ suffix="queue_comment_workflow.commit_not_in_default_branch",
+ ),
+ extra={
+ "organization_id": commit.organization_id,
+ "repository_id": repo.id,
+ "commit_sha": commit.key,
+ },
+ )
+ return
+
+ pr_query = PullRequest.objects.filter(
+ organization_id=commit.organization_id,
+ repository_id=commit.repository_id,
+ merge_commit_sha=merge_commit_sha,
+ )
+ if not pr_query.exists():
+ logger.info(
+ _pr_comment_log(
+ integration_name=self.integration_name,
+ suffix="queue_comment_workflow.missing_pr",
+ ),
+ extra={
+ "organization_id": commit.organization_id,
+ "repository_id": repo.id,
+ "commit_sha": commit.key,
+ },
+ )
+ return
+
+ pr = pr_query.first()
+ assert pr is not None
+ # need to query explicitly for merged PR comments since we can have multiple comments per PR
+ merged_pr_comment_query = PullRequestComment.objects.filter(
+ pull_request_id=pr.id, comment_type=CommentType.MERGED_PR
+ )
+ if pr.date_added >= datetime.now(tz=timezone.utc) - timedelta(days=PR_COMMENT_WINDOW) and (
+ not merged_pr_comment_query.exists()
+ or group_owner.group_id not in merged_pr_comment_query[0].group_ids
+ ):
+ lock = locks.get(
+ _debounce_pr_comment_lock_key(pr.id), duration=10, name="queue_comment_task"
+ )
+ with lock.acquire():
+ cache_key = _debounce_pr_comment_cache_key(pullrequest_id=pr.id)
+ if cache.get(cache_key) is not None:
+ return
+
+ # create PR commit row for suspect commit and PR
+ PullRequestCommit.objects.get_or_create(commit=commit, pull_request=pr)
+
+ logger.info(
+ _pr_comment_log(
+ integration_name=self.integration_name, suffix="queue_comment_workflow"
+ ),
+ extra={"pullrequest_id": pr.id, "project_id": group_owner.project_id},
+ )
+
+ cache.set(cache_key, True, PR_COMMENT_TASK_TTL)
+
+ github_comment_workflow.delay(
+ pullrequest_id=pr.id, project_id=group_owner.project_id
+ )
+
def create_or_update_comment(
self,
repo: Repository,
@@ -94,6 +239,7 @@ def create_or_update_comment(
metrics_base: str,
comment_type: int = CommentType.MERGED_PR,
language: str | None = None,
+ github_copilot_actions: list[dict[str, Any]] | None = None,
):
client = self.get_client()
@@ -105,10 +251,19 @@ def create_or_update_comment(
# client will raise ApiError if the request is not successful
if pr_comment is None:
resp = client.create_comment(
- repo=repo.name, issue_id=str(pr_key), data={"body": comment_body}
+ repo=repo.name,
+ issue_id=str(pr_key),
+ data=(
+ {
+ "body": comment_body,
+ "actions": github_copilot_actions,
+ }
+ if github_copilot_actions
+ else {"body": comment_body}
+ ),
)
- current_time = timezone.now()
+ current_time = django_timezone.now()
comment = PullRequestComment.objects.create(
external_id=resp.body["id"],
pull_request_id=pullrequest_id,
@@ -134,12 +289,19 @@ def create_or_update_comment(
repo=repo.name,
issue_id=str(pr_key),
comment_id=pr_comment.external_id,
- data={"body": comment_body},
+ data=(
+ {
+ "body": comment_body,
+ "actions": github_copilot_actions,
+ }
+ if github_copilot_actions
+ else {"body": comment_body}
+ ),
)
metrics.incr(
metrics_base.format(integration=self.integration_name, key="comment_updated")
)
- pr_comment.updated_at = timezone.now()
+ pr_comment.updated_at = django_timezone.now()
pr_comment.group_ids = issue_list
pr_comment.save()
@@ -169,3 +331,7 @@ def update_comment(
self, repo: str, issue_id: str, comment_id: str, data: Mapping[str, Any]
) -> Any:
raise NotImplementedError
+
+ @abstractmethod
+ def get_merge_commit_sha_from_commit(self, repo: str, sha: str) -> str | None:
+ raise NotImplementedError
diff --git a/src/sentry/integrations/tasks/create_comment.py b/src/sentry/integrations/tasks/create_comment.py
index 2e383b2a26877..e1f82248a0003 100644
--- a/src/sentry/integrations/tasks/create_comment.py
+++ b/src/sentry/integrations/tasks/create_comment.py
@@ -2,12 +2,11 @@
from sentry.integrations.models.external_issue import ExternalIssue
from sentry.integrations.tasks import should_comment_sync
from sentry.models.activity import Activity
-from sentry.silo.base import SiloMode, region_silo_function
+from sentry.silo.base import SiloMode
from sentry.tasks.base import instrumented_task
from sentry.types.activity import ActivityType
-@region_silo_function
@instrumented_task(
name="sentry.integrations.tasks.create_comment",
queue="integrations",
diff --git a/src/sentry/integrations/tasks/sync_assignee_outbound.py b/src/sentry/integrations/tasks/sync_assignee_outbound.py
index 9b68da6c19379..78b24fe9273a2 100644
--- a/src/sentry/integrations/tasks/sync_assignee_outbound.py
+++ b/src/sentry/integrations/tasks/sync_assignee_outbound.py
@@ -1,6 +1,10 @@
+from typing import Any
+
from sentry import analytics, features
+from sentry.constants import ObjectStatus
from sentry.integrations.models.external_issue import ExternalIssue
from sentry.integrations.models.integration import Integration
+from sentry.integrations.services.assignment_source import AssignmentSource
from sentry.integrations.services.integration import integration_service
from sentry.models.organization import Organization
from sentry.silo.base import SiloMode
@@ -24,7 +28,12 @@
Organization.DoesNotExist,
)
)
-def sync_assignee_outbound(external_issue_id: int, user_id: int | None, assign: bool) -> None:
+def sync_assignee_outbound(
+ external_issue_id: int,
+ user_id: int | None,
+ assign: bool,
+ assignment_source_dict: dict[str, Any] | None = None,
+) -> None:
# Sync Sentry assignee to an external issue.
external_issue = ExternalIssue.objects.get(id=external_issue_id)
@@ -32,7 +41,9 @@ def sync_assignee_outbound(external_issue_id: int, user_id: int | None, assign:
has_issue_sync = features.has("organizations:integrations-issue-sync", organization)
if not has_issue_sync:
return
- integration = integration_service.get_integration(integration_id=external_issue.integration_id)
+ integration = integration_service.get_integration(
+ integration_id=external_issue.integration_id, status=ObjectStatus.ACTIVE
+ )
if not integration:
return
@@ -42,10 +53,15 @@ def sync_assignee_outbound(external_issue_id: int, user_id: int | None, assign:
):
return
- if installation.should_sync("outbound_assignee"):
+ parsed_assignment_source = (
+ AssignmentSource.from_dict(assignment_source_dict) if assignment_source_dict else None
+ )
+ if installation.should_sync("outbound_assignee", parsed_assignment_source):
# Assume unassign if None.
user = user_service.get_user(user_id) if user_id else None
- installation.sync_assignee_outbound(external_issue, user, assign=assign)
+ installation.sync_assignee_outbound(
+ external_issue, user, assign=assign, assignment_source=parsed_assignment_source
+ )
analytics.record(
"integration.issue.assignee.synced",
provider=integration.provider,
diff --git a/src/sentry/integrations/tasks/sync_status_inbound.py b/src/sentry/integrations/tasks/sync_status_inbound.py
index e738c09ce888d..729428d511821 100644
--- a/src/sentry/integrations/tasks/sync_status_inbound.py
+++ b/src/sentry/integrations/tasks/sync_status_inbound.py
@@ -7,6 +7,7 @@
from sentry import analytics
from sentry.api.helpers.group_index.update import get_current_release_version_of_group
+from sentry.constants import ObjectStatus
from sentry.integrations.models.integration import Integration
from sentry.integrations.services.integration import integration_service
from sentry.models.group import Group, GroupStatus
@@ -181,7 +182,9 @@ def sync_status_inbound(
) -> None:
from sentry.integrations.mixins import ResolveSyncAction
- integration = integration_service.get_integration(integration_id=integration_id)
+ integration = integration_service.get_integration(
+ integration_id=integration_id, status=ObjectStatus.ACTIVE
+ )
if integration is None:
raise Integration.DoesNotExist
diff --git a/src/sentry/integrations/tasks/sync_status_outbound.py b/src/sentry/integrations/tasks/sync_status_outbound.py
index 3ea2807e28ead..7aa1fb3afcc9d 100644
--- a/src/sentry/integrations/tasks/sync_status_outbound.py
+++ b/src/sentry/integrations/tasks/sync_status_outbound.py
@@ -1,4 +1,5 @@
from sentry import analytics, features
+from sentry.constants import ObjectStatus
from sentry.integrations.models.external_issue import ExternalIssue
from sentry.integrations.models.integration import Integration
from sentry.integrations.services.integration import integration_service
@@ -34,7 +35,9 @@ def sync_status_outbound(group_id: int, external_issue_id: int) -> bool | None:
# Issue link could have been deleted while sync job was in the queue.
return None
- integration = integration_service.get_integration(integration_id=external_issue.integration_id)
+ integration = integration_service.get_integration(
+ integration_id=external_issue.integration_id, status=ObjectStatus.ACTIVE
+ )
if not integration:
return None
installation = integration.get_installation(organization_id=external_issue.organization_id)
diff --git a/src/sentry/integrations/utils/__init__.py b/src/sentry/integrations/utils/__init__.py
index ca99167c50117..e69de29bb2d1d 100644
--- a/src/sentry/integrations/utils/__init__.py
+++ b/src/sentry/integrations/utils/__init__.py
@@ -1,24 +0,0 @@
-__all__ = (
- "AtlassianConnectValidationError",
- "authenticate_asymmetric_jwt",
- "get_identities_by_user",
- "get_identity_or_404",
- "get_integration_from_jwt",
- "get_integration_from_request",
- "get_query_hash",
- "sync_group_assignee_inbound",
- "sync_group_assignee_outbound",
- "verify_claims",
- "where_should_sync",
-)
-
-from .atlassian_connect import (
- AtlassianConnectValidationError,
- authenticate_asymmetric_jwt,
- get_integration_from_jwt,
- get_integration_from_request,
- get_query_hash,
- verify_claims,
-)
-from .identities import get_identities_by_user, get_identity_or_404
-from .sync import sync_group_assignee_inbound, sync_group_assignee_outbound, where_should_sync
diff --git a/src/sentry/integrations/utils/common.py b/src/sentry/integrations/utils/common.py
index e7caf84a8b9de..59629f4c100d3 100644
--- a/src/sentry/integrations/utils/common.py
+++ b/src/sentry/integrations/utils/common.py
@@ -1,8 +1,8 @@
import logging
+from sentry.constants import ObjectStatus
from sentry.integrations.services.integration import RpcIntegration, integration_service
from sentry.integrations.types import ExternalProviderEnum
-from sentry.models.organization import OrganizationStatus
_default_logger = logging.getLogger(__name__)
@@ -13,7 +13,7 @@ def get_active_integration_for_organization(
try:
return integration_service.get_integration(
organization_id=organization_id,
- status=OrganizationStatus.ACTIVE,
+ status=ObjectStatus.ACTIVE,
provider=provider.value,
)
except Exception as err:
diff --git a/src/sentry/integrations/utils/identities.py b/src/sentry/integrations/utils/identities.py
index 7d93614d5ddc6..e42eb8e09434c 100644
--- a/src/sentry/integrations/utils/identities.py
+++ b/src/sentry/integrations/utils/identities.py
@@ -1,6 +1,7 @@
import logging
from collections.abc import Iterable, Mapping
+from django.contrib.auth.models import AnonymousUser
from django.http import Http404
from sentry.constants import ObjectStatus
@@ -19,17 +20,17 @@
@control_silo_function
def get_identity_or_404(
provider: ExternalProviders,
- user: User,
+ user: User | AnonymousUser,
integration_id: int,
organization_id: int | None = None,
) -> tuple[RpcOrganization, Integration, IdentityProvider]:
+ """For endpoints, short-circuit with a 404 if we cannot find everything we need."""
logger_metadata = {
"integration_provider": provider,
"integration_id": integration_id,
"organization_id": organization_id,
"user_id": user.id,
}
- """For endpoints, short-circuit with a 404 if we cannot find everything we need."""
if provider not in EXTERNAL_PROVIDERS:
_logger.info("provider is not part of supported external providers", extra=logger_metadata)
raise Http404
diff --git a/src/sentry/integrations/utils/metrics.py b/src/sentry/integrations/utils/metrics.py
new file mode 100644
index 0000000000000..83c2bc755017b
--- /dev/null
+++ b/src/sentry/integrations/utils/metrics.py
@@ -0,0 +1,269 @@
+import itertools
+import logging
+from abc import ABC, abstractmethod
+from collections.abc import Mapping
+from dataclasses import dataclass
+from enum import Enum
+from types import TracebackType
+from typing import Any, Self
+
+from django.conf import settings
+
+from sentry.integrations.base import IntegrationDomain
+from sentry.utils import metrics
+
+logger = logging.getLogger(__name__)
+
+
+class EventLifecycleOutcome(Enum):
+ STARTED = "STARTED"
+ HALTED = "HALTED"
+ SUCCESS = "SUCCESS"
+ FAILURE = "FAILURE"
+
+ def __str__(self) -> str:
+ return self.value.lower()
+
+
+class EventLifecycleMetric(ABC):
+ """Information about an event to be measured.
+
+ This class is intended to be used across different integrations that share the
+ same business concern. Generally a subclass would represent one business concern
+ (such as MessagingInteractionEvent, which extends this class and is used in the
+ `slack`, `msteams`, and `discord` integration packages).
+ """
+
+ @abstractmethod
+ def get_key(self, outcome: EventLifecycleOutcome) -> str:
+ """Construct the metrics key that will represent this event.
+
+ It is recommended to implement this method by delegating to a
+ `get_standard_key` call.
+ """
+
+ raise NotImplementedError
+
+ @staticmethod
+ def get_standard_key(
+ domain: str,
+ integration_name: str,
+ interaction_type: str,
+ outcome: EventLifecycleOutcome,
+ *extra_tokens: str,
+ ) -> str:
+ """Construct a key with a standard cross-integration structure.
+
+ Implementations of `get_key` generally should delegate to this method in
+ order to ensure consistency across integrations.
+
+ :param domain: a constant string representing the category of business
+ concern or vertical domain that the integration belongs
+ to (e.g., "messaging" or "source_code_management")
+ :param integration_name: the name of the integration (generally should match a
+ package name from `sentry.integrations`)
+ :param interaction_type: a key representing the category of interaction being
+ captured (generally should come from an Enum class)
+ :param outcome: the object representing the event outcome
+ :param extra_tokens: additional tokens to add extra context, if needed
+ :return: a key to represent the event in metrics or logging
+ """
+
+ # For now, universally include an "slo" token to distinguish from any
+ # previously existing metrics keys.
+ # TODO: Merge with or replace existing keys?
+ root_tokens = ("sentry", "integrations", "slo")
+
+ specific_tokens = (domain, integration_name, interaction_type, str(outcome))
+ return ".".join(itertools.chain(root_tokens, specific_tokens, extra_tokens))
+
+ def get_extras(self) -> Mapping[str, Any]:
+ """Get extra data to log."""
+ return {}
+
+ def capture(self, assume_success: bool = True) -> "EventLifecycle":
+ """Open a context to measure the event."""
+ return EventLifecycle(self, assume_success)
+
+
+class EventLifecycle:
+ """Context object that measures an event that may succeed or fail.
+
+ The `assume_success` attribute can be set to False for events where exiting the
+ context may or may not represent a failure condition. In this state,
+ if the program exits the context without `record_success` or `record_failure`
+ being called first, it will log the outcome "halted" in place of "success" or
+ "failure". "Halted" could mean that we received an ambiguous exception from a
+ remote service that may have been caused either by a bug or user error, or merely
+ that inserting `record_failure` calls is still a dev to-do item.
+ """
+
+ def __init__(self, payload: EventLifecycleMetric, assume_success: bool = True) -> None:
+ self.payload = payload
+ self.assume_success = assume_success
+ self._state: EventLifecycleOutcome | None = None
+ self._extra = dict(self.payload.get_extras())
+
+ def add_extra(self, name: str, value: Any) -> None:
+ """Add a value to logged "extra" data.
+
+ Overwrites the name with a new value if it was previously used.
+ """
+ self._extra[name] = value
+
+ def record_event(
+ self, outcome: EventLifecycleOutcome, exc: BaseException | None = None
+ ) -> None:
+ """Record a starting or halting event.
+
+ This method is public so that unit tests may mock it, but it should be called
+ only by the other "record" methods.
+ """
+
+ key = self.payload.get_key(outcome)
+
+ sample_rate = (
+ 1.0 if outcome == EventLifecycleOutcome.FAILURE else settings.SENTRY_METRICS_SAMPLE_RATE
+ )
+ metrics.incr(key, sample_rate=sample_rate)
+
+ if outcome == EventLifecycleOutcome.FAILURE:
+ logger.error(key, extra=self._extra, exc_info=exc)
+
+ @staticmethod
+ def _report_flow_error(message) -> None:
+ logger.error("EventLifecycle flow error: %s", message)
+
+ def _terminate(
+ self, new_state: EventLifecycleOutcome, exc: BaseException | None = None
+ ) -> None:
+ if self._state is None:
+ self._report_flow_error("The lifecycle has not yet been entered")
+ if self._state != EventLifecycleOutcome.STARTED:
+ self._report_flow_error("The lifecycle has already been exited")
+ self._state = new_state
+ self.record_event(new_state, exc)
+
+ def record_success(self) -> None:
+ """Record that the event halted successfully.
+
+ Exiting the context without raising an exception will call this method
+ automatically, unless the context was initialized with `assume_success` set
+ to False.
+ """
+
+ self._terminate(EventLifecycleOutcome.SUCCESS)
+
+ def record_failure(
+ self, exc: BaseException | None = None, extra: dict[str, Any] | None = None
+ ) -> None:
+ """Record that the event halted in failure. Additional data may be passed
+ to be logged.
+
+ There is no need to call this method directly if an exception is raised from
+ inside the context. It will be called automatically when exiting the context
+ on an exception.
+
+ This method should be called if we return a soft failure from the event. For
+ example, if we receive an error status from a remote service and gracefully
+ display an error response to the user, it would be necessary to manually call
+ `record_failure` on the context object.
+ """
+
+ if extra:
+ self._extra.update(extra)
+ self._terminate(EventLifecycleOutcome.FAILURE, exc)
+
+ def record_halt(self, exc: BaseException | None = None) -> None:
+ """Record that the event halted in an ambiguous state.
+
+ This method can be called in response to a sufficiently ambiguous exception
+ or other error condition, where it may have been caused by a user error or
+ other expected condition, but there is some substantial chance that it
+ represents a bug.
+
+ Such cases usually mean that we want to:
+ (1) document the ambiguity;
+ (2) monitor it for sudden spikes in frequency; and
+ (3) investigate whether more detailed error information is available
+ (but probably later, as a backlog item).
+ """
+
+ self._terminate(EventLifecycleOutcome.HALTED, exc)
+
+ def __enter__(self) -> Self:
+ if self._state is not None:
+ self._report_flow_error("The lifecycle has already been entered")
+ self._state = EventLifecycleOutcome.STARTED
+ self.record_event(EventLifecycleOutcome.STARTED)
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType,
+ ) -> None:
+ if self._state != EventLifecycleOutcome.STARTED:
+ # The context called record_success or record_failure being closing,
+ # so we can just exit quietly.
+ return
+
+ if exc_value is not None:
+ # We were forced to exit the context by a raised exception.
+ self.record_failure(exc_value)
+ else:
+ # We exited the context without record_success or record_failure being
+ # called. Assume success if we were told to do so. Else, log a halt
+ # indicating that there is no clear success or failure signal.
+ self._terminate(
+ EventLifecycleOutcome.SUCCESS
+ if self.assume_success
+ else EventLifecycleOutcome.HALTED
+ )
+
+
+class IntegrationPipelineViewType(Enum):
+ """A specific step in an integration's pipeline that is not a static page."""
+
+ # IdentityProviderPipeline
+ IDENTITY_LOGIN = "IDENTITY_LOGIN"
+ IDENTITY_LINK = "IDENTITY_LINK"
+
+ # GitHub
+ OAUTH_LOGIN = "OAUTH_LOGIN"
+ GITHUB_INSTALLATION = "GITHUB_INSTALLATION"
+
+ # Bitbucket
+ VERIFY_INSTALLATION = "VERIFY_INSTALLATION"
+
+ # Bitbucket Server
+ # OAUTH_LOGIN = "OAUTH_LOGIN"
+ OAUTH_CALLBACK = "OAUTH_CALLBACK"
+
+ # Azure DevOps
+ ACCOUNT_CONFIG = "ACCOUNT_CONFIG"
+
+ def __str__(self) -> str:
+ return self.value.lower()
+
+
+@dataclass
+class IntegrationPipelineViewEvent(EventLifecycleMetric):
+ """An instance to be recorded of a user going through an integration pipeline view (step)."""
+
+ interaction_type: IntegrationPipelineViewType
+ domain: IntegrationDomain
+ provider_key: str
+
+ def get_key(self, outcome: EventLifecycleOutcome) -> str:
+ # not reporting as SLOs
+ root_tokens = ("sentry", "integrations", "installation")
+ specific_tokens = (
+ self.domain,
+ self.provider_key,
+ str(self.interaction_type),
+ str(outcome),
+ )
+
+ return ".".join(itertools.chain(root_tokens, specific_tokens))
diff --git a/src/sentry/integrations/utils/stacktrace_link.py b/src/sentry/integrations/utils/stacktrace_link.py
index 9ba5efc4cb08d..5aff6b93e60c9 100644
--- a/src/sentry/integrations/utils/stacktrace_link.py
+++ b/src/sentry/integrations/utils/stacktrace_link.py
@@ -3,6 +3,7 @@
import logging
from typing import TYPE_CHECKING, NotRequired, TypedDict
+from sentry.constants import ObjectStatus
from sentry.integrations.models.repository_project_path_config import RepositoryProjectPathConfig
from sentry.integrations.services.integration import integration_service
from sentry.integrations.source_code_management.repository import RepositoryIntegration
@@ -30,7 +31,7 @@ def get_link(
result: RepositoryLinkOutcome = {}
integration = integration_service.get_integration(
- organization_integration_id=config.organization_integration_id
+ organization_integration_id=config.organization_integration_id, status=ObjectStatus.ACTIVE
)
if not integration:
result["error"] = "integration_not_found"
diff --git a/src/sentry/integrations/utils/sync.py b/src/sentry/integrations/utils/sync.py
index a97c6dd78faca..a2ac81f567035 100644
--- a/src/sentry/integrations/utils/sync.py
+++ b/src/sentry/integrations/utils/sync.py
@@ -5,6 +5,8 @@
from typing import TYPE_CHECKING
from sentry import features
+from sentry.integrations.models.integration import Integration
+from sentry.integrations.services.assignment_source import AssignmentSource
from sentry.integrations.services.integration import integration_service
from sentry.integrations.tasks.sync_assignee_outbound import sync_assignee_outbound
from sentry.models.group import Group
@@ -20,7 +22,7 @@
@region_silo_function
def where_should_sync(
- integration: RpcIntegration,
+ integration: RpcIntegration | Integration,
key: str,
organization_id: int | None = None,
) -> Sequence[Organization]:
@@ -62,9 +64,9 @@ def get_user_id(projects_by_user: Mapping[int, Sequence[int]], group: Group) ->
@region_silo_function
def sync_group_assignee_inbound(
- integration: RpcIntegration,
+ integration: RpcIntegration | Integration,
email: str | None,
- external_issue_key: str,
+ external_issue_key: str | None,
assign: bool = True,
) -> Sequence[Group]:
"""
@@ -92,7 +94,11 @@ def sync_group_assignee_inbound(
if not assign:
for group in affected_groups:
- GroupAssignee.objects.deassign(group)
+ GroupAssignee.objects.deassign(
+ group,
+ assignment_source=AssignmentSource.from_integration(integration),
+ )
+
return affected_groups
users = user_service.get_many_by_email(emails=[email], is_verified=True)
@@ -104,14 +110,23 @@ def sync_group_assignee_inbound(
user_id = get_user_id(projects_by_user, group)
user = users_by_id.get(user_id)
if user:
- GroupAssignee.objects.assign(group, user)
+ GroupAssignee.objects.assign(
+ group,
+ user,
+ assignment_source=AssignmentSource.from_integration(integration),
+ )
groups_assigned.append(group)
else:
logger.info("assignee-not-found-inbound", extra=log_context)
return groups_assigned
-def sync_group_assignee_outbound(group: Group, user_id: int | None, assign: bool = True) -> None:
+def sync_group_assignee_outbound(
+ group: Group,
+ user_id: int | None,
+ assign: bool = True,
+ assignment_source: AssignmentSource | None = None,
+) -> None:
from sentry.models.grouplink import GroupLink
external_issue_ids = GroupLink.objects.filter(
@@ -120,5 +135,12 @@ def sync_group_assignee_outbound(group: Group, user_id: int | None, assign: bool
for external_issue_id in external_issue_ids:
sync_assignee_outbound.apply_async(
- kwargs={"external_issue_id": external_issue_id, "user_id": user_id, "assign": assign}
+ kwargs={
+ "external_issue_id": external_issue_id,
+ "user_id": user_id,
+ "assign": assign,
+ "assignment_source_dict": assignment_source.to_dict()
+ if assignment_source
+ else None,
+ }
)
diff --git a/src/sentry/integrations/vsts/client.py b/src/sentry/integrations/vsts/client.py
index 7bab65d8fda58..598d53f5310c4 100644
--- a/src/sentry/integrations/vsts/client.py
+++ b/src/sentry/integrations/vsts/client.py
@@ -8,6 +8,7 @@
from requests import PreparedRequest
from rest_framework.response import Response
+from sentry.constants import ObjectStatus
from sentry.exceptions import InvalidIdentity
from sentry.integrations.base import IntegrationFeatureNotImplementedError
from sentry.integrations.client import ApiClient
@@ -207,7 +208,7 @@ def _refresh_auth_if_expired(self):
from sentry.integrations.vsts.integration import VstsIntegrationProvider
integration = integration_service.get_integration(
- organization_integration_id=self.org_integration_id
+ organization_integration_id=self.org_integration_id, status=ObjectStatus.ACTIVE
)
# check if integration has migrated to new identity provider
migration_version = integration.metadata.get("integration_migration_version", 0)
diff --git a/src/sentry/integrations/vsts/integration.py b/src/sentry/integrations/vsts/integration.py
index 831f39458bf64..b6b84563f5921 100644
--- a/src/sentry/integrations/vsts/integration.py
+++ b/src/sentry/integrations/vsts/integration.py
@@ -42,6 +42,7 @@
IntegrationProviderError,
)
from sentry.silo.base import SiloMode
+from sentry.utils import metrics
from sentry.utils.http import absolute_uri
from sentry.web.helpers import render_to_response
@@ -527,7 +528,24 @@ def build_integration(self, state: Mapping[str, Any]) -> Mapping[str, Any]:
status=ObjectStatus.ACTIVE,
).exists()
+ metrics.incr(
+ "integrations.migration.vsts_integration_migration",
+ sample_rate=1.0,
+ )
+
except (IntegrationModel.DoesNotExist, AssertionError, KeyError):
+ logger.warning(
+ "vsts.build_integration.error",
+ extra={
+ "organization_id": (
+ self.pipeline.organization.id
+ if self.pipeline and self.pipeline.organization
+ else None
+ ),
+ "user_id": user["id"],
+ "account": account,
+ },
+ )
subscription_id, subscription_secret = self.create_subscription(
base_url=base_url, oauth_data=oauth_data
)
@@ -564,7 +582,9 @@ def create_subscription(
raise IntegrationProviderError(
"Sentry cannot communicate with this Azure DevOps organization.\n"
"Please ensure third-party app access via OAuth is enabled \n"
- "in the organization's security policy."
+ "in the organization's security policy \n"
+ "The user installing the integration must have project administrator permissions. \n"
+ "The user installing might also need admin permissions depending on the organization's security policy."
)
raise
diff --git a/src/sentry/integrations/vsts/issues.py b/src/sentry/integrations/vsts/issues.py
index 7e897f813d13c..9abfd9b3feb86 100644
--- a/src/sentry/integrations/vsts/issues.py
+++ b/src/sentry/integrations/vsts/issues.py
@@ -6,6 +6,7 @@
from mistune import markdown
from rest_framework.response import Response
+from sentry.constants import ObjectStatus
from sentry.integrations.mixins import ResolveSyncAction
from sentry.integrations.mixins.issues import IssueSyncIntegration
from sentry.integrations.services.integration import integration_service
@@ -361,7 +362,7 @@ def search_issues(self, query: str | None, **kwargs) -> dict[str, Any]:
client = self.get_client()
integration = integration_service.get_integration(
- integration_id=self.org_integration.integration_id
+ integration_id=self.org_integration.integration_id, status=ObjectStatus.ACTIVE
)
if not integration:
raise IntegrationError("Azure DevOps integration not found")
diff --git a/src/sentry/integrations/vsts/webhooks.py b/src/sentry/integrations/vsts/webhooks.py
index 43192af80c210..d0d6ea877fd48 100644
--- a/src/sentry/integrations/vsts/webhooks.py
+++ b/src/sentry/integrations/vsts/webhooks.py
@@ -12,9 +12,10 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import Endpoint, region_silo_endpoint
+from sentry.constants import ObjectStatus
from sentry.integrations.mixins.issues import IssueSyncIntegration
from sentry.integrations.services.integration import integration_service
-from sentry.integrations.utils import sync_group_assignee_inbound
+from sentry.integrations.utils.sync import sync_group_assignee_inbound
from sentry.utils.email import parse_email
if TYPE_CHECKING:
@@ -52,7 +53,7 @@ def post(self, request: Request, *args: Any, **kwargs: Any) -> Response:
# https://docs.microsoft.com/en-us/azure/devops/service-hooks/events?view=azure-devops#workitem.updated
if event_type == "workitem.updated":
integration = integration_service.get_integration(
- provider=PROVIDER_KEY, external_id=external_id
+ provider=PROVIDER_KEY, external_id=external_id, status=ObjectStatus.ACTIVE
)
if integration is None:
logger.info(
diff --git a/src/sentry/interfaces/contexts.py b/src/sentry/interfaces/contexts.py
index 35285b823c058..161a1f292365c 100644
--- a/src/sentry/interfaces/contexts.py
+++ b/src/sentry/interfaces/contexts.py
@@ -3,6 +3,7 @@
import string
from typing import Any, ClassVar, TypeVar
+import sentry_sdk
from django.utils.encoding import force_str
from sentry.interfaces.base import Interface
@@ -232,7 +233,12 @@ def to_python(cls, data, **kwargs):
@classmethod
def normalize_context(cls, alias, data):
ctx_type = data.get("type", alias)
- ctx_cls = context_types.get(ctx_type, DefaultContextType)
+ try:
+ ctx_cls = context_types.get(ctx_type, DefaultContextType)
+ except TypeError:
+ # Debugging information for SENTRY-FOR-SENTRY-2NH2.
+ sentry_sdk.set_context("ctx_type", ctx_type)
+ raise
return ctx_cls(alias, data)
def iter_contexts(self):
diff --git a/src/sentry/interfaces/user.py b/src/sentry/interfaces/user.py
index 58b7afe5b5503..0923ddcc6ae1b 100644
--- a/src/sentry/interfaces/user.py
+++ b/src/sentry/interfaces/user.py
@@ -1,12 +1,23 @@
__all__ = ("User",)
+from typing import Any, TypedDict
+
from sentry.interfaces.base import Interface
from sentry.interfaces.geo import Geo
from sentry.utils.json import prune_empty_keys
from sentry.web.helpers import render_to_string
+class EventUserApiContext(TypedDict, total=False):
+ id: str | None
+ email: str | None
+ username: str | None
+ ip_address: str | None
+ name: str | None
+ data: dict[str, Any] | None
+
+
class User(Interface):
"""
An interface which describes the authenticated User for a request.
@@ -51,7 +62,7 @@ def to_json(self):
}
)
- def get_api_context(self, is_public=False, platform=None):
+ def get_api_context(self, is_public=False, platform=None) -> EventUserApiContext:
return {
"id": self.id,
"email": self.email,
diff --git a/src/sentry/issues/attributes.py b/src/sentry/issues/attributes.py
index 22ab70d49eab6..902a113961463 100644
--- a/src/sentry/issues/attributes.py
+++ b/src/sentry/issues/attributes.py
@@ -252,7 +252,7 @@ def process_update_fields(updated_fields) -> set[str]:
# we'll need to assume any of the attributes are updated in that case
updated_fields = {"all"}
else:
- VALID_FIELDS = {"status", "substatus", "num_comments"}
+ VALID_FIELDS = {"status", "substatus", "num_comments", "priority", "first_release"}
updated_fields = VALID_FIELDS.intersection(updated_fields or ())
if updated_fields:
_log_group_attributes_changed(Operation.UPDATED, "group", "-".join(sorted(updated_fields)))
diff --git a/src/sentry/issues/endpoints/__init__.py b/src/sentry/issues/endpoints/__init__.py
index 50e5e852f05c3..36c255daf1830 100644
--- a/src/sentry/issues/endpoints/__init__.py
+++ b/src/sentry/issues/endpoints/__init__.py
@@ -9,6 +9,8 @@
from .group_participants import GroupParticipantsEndpoint
from .group_similar_issues import GroupSimilarIssuesEndpoint
from .group_similar_issues_embeddings import GroupSimilarIssuesEmbeddingsEndpoint
+from .group_tombstone import GroupTombstoneEndpoint
+from .group_tombstone_details import GroupTombstoneDetailsEndpoint
from .organization_eventid import EventIdLookupEndpoint
from .organization_group_index import OrganizationGroupIndexEndpoint
from .organization_group_index_stats import OrganizationGroupIndexStatsEndpoint
@@ -21,6 +23,7 @@
from .project_group_index import ProjectGroupIndexEndpoint
from .project_group_stats import ProjectGroupStatsEndpoint
from .project_stacktrace_link import ProjectStacktraceLinkEndpoint
+from .related_issues import RelatedIssuesEndpoint
from .shared_group_details import SharedGroupDetailsEndpoint
from .source_map_debug import SourceMapDebugEndpoint
from .team_groups_old import TeamGroupsOldEndpoint
@@ -39,6 +42,8 @@
"GroupParticipantsEndpoint",
"GroupSimilarIssuesEmbeddingsEndpoint",
"GroupSimilarIssuesEndpoint",
+ "GroupTombstoneDetailsEndpoint",
+ "GroupTombstoneEndpoint",
"OrganizationGroupIndexEndpoint",
"OrganizationGroupIndexStatsEndpoint",
"OrganizationGroupSearchViewsEndpoint",
@@ -49,6 +54,7 @@
"ProjectGroupIndexEndpoint",
"ProjectGroupStatsEndpoint",
"ProjectStacktraceLinkEndpoint",
+ "RelatedIssuesEndpoint",
"SharedGroupDetailsEndpoint",
"ShortIdLookupEndpoint",
"SourceMapDebugEndpoint",
diff --git a/src/sentry/issues/endpoints/group_details.py b/src/sentry/issues/endpoints/group_details.py
index cfb080d49b60b..40117285948b6 100644
--- a/src/sentry/issues/endpoints/group_details.py
+++ b/src/sentry/issues/endpoints/group_details.py
@@ -22,7 +22,6 @@
)
from sentry.api.serializers import GroupSerializer, GroupSerializerSnuba, serialize
from sentry.api.serializers.models.group_stream import get_actions, get_available_issue_plugins
-from sentry.api.serializers.models.platformexternalissue import PlatformExternalIssueSerializer
from sentry.api.serializers.models.plugin import PluginSerializer
from sentry.api.serializers.models.team import TeamSerializer
from sentry.integrations.api.serializers.models.external_issue import ExternalIssueSerializer
@@ -38,10 +37,13 @@
from sentry.models.groupowner import get_owner_details
from sentry.models.groupseen import GroupSeen
from sentry.models.groupsubscription import GroupSubscriptionManager
-from sentry.models.platformexternalissue import PlatformExternalIssue
from sentry.models.team import Team
from sentry.models.userreport import UserReport
from sentry.plugins.base import plugins
+from sentry.sentry_apps.api.serializers.platform_external_issue import (
+ PlatformExternalIssueSerializer,
+)
+from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
from sentry.tasks.post_process import fetch_buffered_group_stats
from sentry.types.ratelimit import RateLimit, RateLimitCategory
from sentry.users.services.user.service import user_service
@@ -382,7 +384,7 @@ def put(self, request: Request, group) -> Response:
)
return Response(e.body, status=e.status_code)
- def delete(self, request: Request, group) -> Response:
+ def delete(self, request: Request, group: Group) -> Response:
"""
Remove an Issue
```````````````
@@ -394,7 +396,11 @@ def delete(self, request: Request, group) -> Response:
"""
from sentry.utils import snuba
- if group.issue_category != GroupCategory.ERROR:
+ issue_platform_deletion_allowed = features.has(
+ "organizations:issue-platform-deletion", group.project.organization, actor=request.user
+ )
+
+ if group.issue_category != GroupCategory.ERROR and not issue_platform_deletion_allowed:
raise ValidationError(detail="Only error issues can be deleted.")
try:
diff --git a/src/sentry/issues/endpoints/group_event_details.py b/src/sentry/issues/endpoints/group_event_details.py
index bd8521b6df6c8..fffb47f40ea3b 100644
--- a/src/sentry/issues/endpoints/group_event_details.py
+++ b/src/sentry/issues/endpoints/group_event_details.py
@@ -4,6 +4,8 @@
from collections.abc import Sequence
from django.contrib.auth.models import AnonymousUser
+from drf_spectacular.types import OpenApiTypes
+from drf_spectacular.utils import OpenApiParameter, extend_schema
from rest_framework.request import Request
from rest_framework.response import Response
from snuba_sdk import Condition, Or
@@ -17,8 +19,20 @@
from sentry.api.helpers.group_index import parse_and_convert_issue_search_query
from sentry.api.helpers.group_index.validators import ValidationError
from sentry.api.serializers import EventSerializer, serialize
+from sentry.apidocs.constants import (
+ RESPONSE_BAD_REQUEST,
+ RESPONSE_FORBIDDEN,
+ RESPONSE_NOT_FOUND,
+ RESPONSE_UNAUTHORIZED,
+)
+from sentry.apidocs.examples.event_examples import EventExamples
+from sentry.apidocs.parameters import GlobalParams, IssueParams
+from sentry.apidocs.utils import inline_sentry_response_serializer
from sentry.eventstore.models import Event, GroupEvent
-from sentry.issues.endpoints.project_event_details import wrap_event_response
+from sentry.issues.endpoints.project_event_details import (
+ GroupEventDetailsResponse,
+ wrap_event_response,
+)
from sentry.issues.grouptype import GroupCategory
from sentry.models.environment import Environment
from sentry.models.group import Group
@@ -99,10 +113,11 @@ def issue_search_query_to_conditions(
return snql_conditions
+@extend_schema(tags=["Events"])
@region_silo_endpoint
class GroupEventDetailsEndpoint(GroupEndpoint):
publish_status = {
- "GET": ApiPublishStatus.UNKNOWN,
+ "GET": ApiPublishStatus.PUBLIC,
}
enforce_rate_limit = True
rate_limits = {
@@ -113,14 +128,36 @@ class GroupEventDetailsEndpoint(GroupEndpoint):
}
}
+ @extend_schema(
+ operation_id="Retrieve an Issue Event",
+ parameters=[
+ GlobalParams.ORG_ID_OR_SLUG,
+ IssueParams.ISSUES_OR_GROUPS,
+ IssueParams.ISSUE_ID,
+ GlobalParams.ENVIRONMENT,
+ OpenApiParameter(
+ name="event_id",
+ type=OpenApiTypes.STR,
+ location=OpenApiParameter.PATH,
+ description="The ID of the event to retrieve, or 'latest', 'oldest', or 'recommended'.",
+ required=True,
+ enum=["latest", "oldest", "recommended"],
+ ),
+ ],
+ responses={
+ 200: inline_sentry_response_serializer(
+ "IssueEventDetailsResponse", GroupEventDetailsResponse
+ ),
+ 400: RESPONSE_BAD_REQUEST,
+ 401: RESPONSE_UNAUTHORIZED,
+ 403: RESPONSE_FORBIDDEN,
+ 404: RESPONSE_NOT_FOUND,
+ },
+ examples=EventExamples.GROUP_EVENT_DETAILS,
+ )
def get(self, request: Request, group: Group, event_id: str) -> Response:
"""
- Retrieve the latest(most recent), oldest, or most helpful Event for an Issue
- ``````````````````````````````````````
-
- Retrieves the details of the latest/oldest/most-helpful event for an issue.
-
- :pparam string group_id: the ID of the issue
+ Retrieves the details of an issue event.
"""
environments = [e for e in get_environments(request, group.project.organization)]
environment_names = [e.name for e in environments]
@@ -133,7 +170,7 @@ def get(self, request: Request, group: Group, event_id: str) -> Response:
elif event_id == "oldest":
with metrics.timer("api.endpoints.group_event_details.get", tags={"type": "oldest"}):
event = group.get_oldest_event_for_environments(environment_names)
- elif event_id in ("helpful", "recommended"):
+ elif event_id == "recommended":
query = request.GET.get("query")
if query:
with metrics.timer(
diff --git a/src/sentry/issues/endpoints/group_events.py b/src/sentry/issues/endpoints/group_events.py
index ba559d1ea1b9c..dc3e23c1fe8c3 100644
--- a/src/sentry/issues/endpoints/group_events.py
+++ b/src/sentry/issues/endpoints/group_events.py
@@ -5,6 +5,8 @@
from typing import TYPE_CHECKING, Any
from django.utils import timezone
+from drf_spectacular.types import OpenApiTypes
+from drf_spectacular.utils import OpenApiParameter, extend_schema
from rest_framework.exceptions import ParseError
from rest_framework.request import Request
from rest_framework.response import Response
@@ -19,7 +21,17 @@
from sentry.api.helpers.events import get_direct_hit_response, get_query_builder_for_group
from sentry.api.paginator import GenericOffsetPaginator
from sentry.api.serializers import EventSerializer, SimpleEventSerializer, serialize
+from sentry.api.serializers.models.event import SimpleEventSerializerResponse
from sentry.api.utils import get_date_range_from_params
+from sentry.apidocs.constants import (
+ RESPONSE_BAD_REQUEST,
+ RESPONSE_FORBIDDEN,
+ RESPONSE_NOT_FOUND,
+ RESPONSE_UNAUTHORIZED,
+)
+from sentry.apidocs.examples.event_examples import EventExamples
+from sentry.apidocs.parameters import GlobalParams, IssueParams
+from sentry.apidocs.utils import inline_sentry_response_serializer
from sentry.eventstore.models import Event
from sentry.exceptions import InvalidParams, InvalidSearchQuery
from sentry.search.events.types import ParamsType
@@ -38,29 +50,60 @@ class GroupEventsError(Exception):
pass
+@extend_schema(tags=["Events"])
@region_silo_endpoint
class GroupEventsEndpoint(GroupEndpoint, EnvironmentMixin):
publish_status = {
- "GET": ApiPublishStatus.UNKNOWN,
+ "GET": ApiPublishStatus.PUBLIC,
}
owner = ApiOwner.ISSUES
+ @extend_schema(
+ operation_id="List an Issue's Events",
+ parameters=[
+ GlobalParams.ORG_ID_OR_SLUG,
+ IssueParams.ISSUES_OR_GROUPS,
+ IssueParams.ISSUE_ID,
+ GlobalParams.START,
+ GlobalParams.END,
+ GlobalParams.STATS_PERIOD,
+ GlobalParams.ENVIRONMENT,
+ OpenApiParameter(
+ name="full",
+ type=OpenApiTypes.BOOL,
+ location=OpenApiParameter.QUERY,
+ description="Specify true to include the full event body, including the stacktrace, in the event payload.",
+ required=False,
+ ),
+ OpenApiParameter(
+ name="sample",
+ type=OpenApiTypes.BOOL,
+ location=OpenApiParameter.QUERY,
+ description="Return events in pseudo-random order. This is deterministic so an identical query will always return the same events in the same order.",
+ required=False,
+ ),
+ OpenApiParameter(
+ name="query",
+ location=OpenApiParameter.QUERY,
+ type=OpenApiTypes.STR,
+ description="An optional search query for filtering events.",
+ required=False,
+ ),
+ ],
+ responses={
+ 200: inline_sentry_response_serializer(
+ "GroupEventsResponseDict", list[SimpleEventSerializerResponse]
+ ),
+ 400: RESPONSE_BAD_REQUEST,
+ 401: RESPONSE_UNAUTHORIZED,
+ 403: RESPONSE_FORBIDDEN,
+ 404: RESPONSE_NOT_FOUND,
+ },
+ examples=EventExamples.GROUP_EVENTS_SIMPLE,
+ )
def get(self, request: Request, group: Group) -> Response:
"""
- List an Issue's Events
- ``````````````````````
-
- This endpoint lists an issue's events.
- :qparam bool full: if this is set to true then the event payload will
- include the full event body, including the stacktrace.
- Set to 1 to enable.
-
- :qparam bool sample: return events in pseudo-random order. This is deterministic,
- same query will return the same events in the same order.
-
- :pparam string issue_id: the ID of the issue to retrieve.
-
- :auth: required
+ Return a list of error events bound to an issue
"""
try:
diff --git a/src/sentry/issues/endpoints/group_hashes.py b/src/sentry/issues/endpoints/group_hashes.py
index 3a881243f9c7a..afb6a1d4320f9 100644
--- a/src/sentry/issues/endpoints/group_hashes.py
+++ b/src/sentry/issues/endpoints/group_hashes.py
@@ -18,6 +18,7 @@
@region_silo_endpoint
class GroupHashesEndpoint(GroupEndpoint):
publish_status = {
+ "PUT": ApiPublishStatus.PRIVATE,
"DELETE": ApiPublishStatus.PRIVATE,
"GET": ApiPublishStatus.PRIVATE,
}
@@ -91,6 +92,41 @@ def delete(self, request: Request, group) -> Response:
return Response(status=202)
+ def put(self, request: Request, group) -> Response:
+ """
+ Perform an unmerge by reassigning events with hash values corresponding to the given
+ grouphash ids from being part of the given group to being part of a new group.
+
+ Note that if multiple grouphash ids are given, all their corresponding events will end up in
+ a single new group together, rather than each hash's events ending in their own new group.
+ """
+ grouphash_ids = request.GET.getlist("id")
+ if not grouphash_ids:
+ return Response()
+
+ grouphashes = list(
+ GroupHash.objects.filter(
+ project_id=group.project_id, group=group.id, hash__in=grouphash_ids
+ )
+ .exclude(state=GroupHash.State.LOCKED_IN_MIGRATION)
+ .values_list("hash", flat=True)
+ )
+ if not grouphashes:
+ return Response({"detail": "Already being unmerged"}, status=409)
+
+ metrics.incr(
+ "grouping.unmerge_issues",
+ sample_rate=1.0,
+ # We assume that if someone's merged groups, they were all from the same platform
+ tags={"platform": group.platform or "unknown", "sdk": group.sdk or "unknown"},
+ )
+
+ unmerge.delay(
+ group.project_id, group.id, None, grouphashes, request.user.id if request.user else None
+ )
+
+ return Response(status=202)
+
def __handle_results(self, project_id, group_id, user, results):
return [self.__handle_result(user, project_id, group_id, result) for result in results]
diff --git a/src/sentry/issues/endpoints/group_notes_details.py b/src/sentry/issues/endpoints/group_notes_details.py
index 65fb6012f2eed..7097802a0f0f0 100644
--- a/src/sentry/issues/endpoints/group_notes_details.py
+++ b/src/sentry/issues/endpoints/group_notes_details.py
@@ -84,7 +84,7 @@ def put(self, request: Request, group, note_id) -> Response:
if serializer.is_valid():
payload = serializer.validated_data
- # TODO adding mentions to a note doesn't send notifications. Should it?
+ # TODO: adding mentions to a note doesn't send notifications. Should it?
# Remove mentions as they shouldn't go into the database
payload.pop("mentions", [])
diff --git a/src/sentry/api/endpoints/group_tombstone.py b/src/sentry/issues/endpoints/group_tombstone.py
similarity index 100%
rename from src/sentry/api/endpoints/group_tombstone.py
rename to src/sentry/issues/endpoints/group_tombstone.py
diff --git a/src/sentry/api/endpoints/group_tombstone_details.py b/src/sentry/issues/endpoints/group_tombstone_details.py
similarity index 100%
rename from src/sentry/api/endpoints/group_tombstone_details.py
rename to src/sentry/issues/endpoints/group_tombstone_details.py
diff --git a/src/sentry/issues/endpoints/project_event_details.py b/src/sentry/issues/endpoints/project_event_details.py
index 83a716da203f4..518b20d67083b 100644
--- a/src/sentry/issues/endpoints/project_event_details.py
+++ b/src/sentry/issues/endpoints/project_event_details.py
@@ -10,15 +10,21 @@
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import IssueEventSerializer, serialize
+from sentry.api.serializers.models.event import IssueEventSerializerResponse
from sentry.eventstore.models import Event, GroupEvent
+class GroupEventDetailsResponse(IssueEventSerializerResponse):
+ nextEventID: str | None
+ previousEventID: str | None
+
+
def wrap_event_response(
request_user: Any,
event: Event | GroupEvent,
environments: list[str],
include_full_release_data: bool = False,
-):
+) -> GroupEventDetailsResponse:
event_data = serialize(
event,
request_user,
diff --git a/src/sentry/issues/endpoints/project_events.py b/src/sentry/issues/endpoints/project_events.py
index b236c19271310..63e830ee08fde 100644
--- a/src/sentry/issues/endpoints/project_events.py
+++ b/src/sentry/issues/endpoints/project_events.py
@@ -2,6 +2,7 @@
from functools import partial
from django.utils import timezone
+from drf_spectacular.utils import OpenApiParameter, extend_schema
from rest_framework.request import Request
from rest_framework.response import Response
@@ -11,16 +12,22 @@
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import EventSerializer, SimpleEventSerializer, serialize
+from sentry.api.serializers.models.event import SimpleEventSerializerResponse
+from sentry.apidocs.constants import RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND, RESPONSE_UNAUTHORIZED
+from sentry.apidocs.examples.event_examples import EventExamples
+from sentry.apidocs.parameters import CursorQueryParam, GlobalParams
+from sentry.apidocs.utils import inline_sentry_response_serializer
from sentry.models.project import Project
from sentry.snuba.events import Columns
from sentry.types.ratelimit import RateLimit, RateLimitCategory
+@extend_schema(tags=["Events"])
@region_silo_endpoint
class ProjectEventsEndpoint(ProjectEndpoint):
owner = ApiOwner.ISSUES
publish_status = {
- "GET": ApiPublishStatus.EXPERIMENTAL,
+ "GET": ApiPublishStatus.PUBLIC,
}
enforce_rate_limit = True
rate_limits = {
@@ -31,26 +38,42 @@ class ProjectEventsEndpoint(ProjectEndpoint):
}
}
+ @extend_schema(
+ operation_id="List a Project's Error Events",
+ parameters=[
+ GlobalParams.ORG_ID_OR_SLUG,
+ GlobalParams.PROJECT_ID_OR_SLUG,
+ CursorQueryParam,
+ OpenApiParameter(
+ name="full",
+ description="If this is set to true, the event payload will include the full event body, including the stacktrace. Set to 1 to enable.",
+ required=False,
+ type=bool,
+ location="query",
+ default=False,
+ ),
+ OpenApiParameter(
+ name="sample",
+ description="Return events in pseudo-random order. This is deterministic so an identical query will always return the same events in the same order.",
+ required=False,
+ type=bool,
+ location="query",
+ default=False,
+ ),
+ ],
+ responses={
+ 200: inline_sentry_response_serializer(
+ "ProjectEventsResponseDict", list[SimpleEventSerializerResponse]
+ ),
+ 401: RESPONSE_UNAUTHORIZED,
+ 403: RESPONSE_FORBIDDEN,
+ 404: RESPONSE_NOT_FOUND,
+ },
+ examples=EventExamples.PROJECT_EVENTS_SIMPLE,
+ )
def get(self, request: Request, project: Project) -> Response:
"""
- List a Project's Error Events
- ```````````````````````
-
Return a list of events bound to a project.
-
- Note: This endpoint is experimental and may be removed without notice.
-
- :qparam bool full: if this is set to true then the event payload will
- include the full event body, including the stacktrace.
- Set to 1 to enable.
-
- :qparam bool sample: return events in pseudo-random order. This is deterministic,
- same query will return the same events in the same order.
-
- :pparam string organization_id_or_slug: the id or slug of the organization the
- groups belong to.
- :pparam string project_id_or_slug: the id or slug of the project the groups
- belong to.
"""
from sentry.api.paginator import GenericOffsetPaginator
diff --git a/src/sentry/api/endpoints/issues/related_issues.py b/src/sentry/issues/endpoints/related_issues.py
similarity index 100%
rename from src/sentry/api/endpoints/issues/related_issues.py
rename to src/sentry/issues/endpoints/related_issues.py
diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py
index 91887dceaa23e..37f8ab41eb16e 100644
--- a/src/sentry/issues/grouptype.py
+++ b/src/sentry/issues/grouptype.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import importlib
from collections import defaultdict
from dataclasses import dataclass, field
from datetime import timedelta
@@ -7,6 +8,7 @@
from typing import TYPE_CHECKING, Any
import sentry_sdk
+from django.apps import apps
from redis.client import StrictRedis
from rediscluster import RedisCluster
@@ -20,6 +22,10 @@
from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.users.models.user import User
+ from sentry.workflow_engine.models.detector import DetectorHandler
+import logging
+
+logger = logging.getLogger(__name__)
class GroupCategory(Enum):
@@ -30,6 +36,7 @@ class GroupCategory(Enum):
REPLAY = 5
FEEDBACK = 6
UPTIME = 7
+ METRIC_ALERT = 8
GROUP_CATEGORIES_CUSTOM_EMAIL = (
@@ -147,8 +154,10 @@ class GroupType:
enable_auto_resolve: bool = True
# Allow escalation forecasts and detection
enable_escalation_detection: bool = True
+ # Quota around many of these issue types can be created per project in a given time window
creation_quota: Quota = Quota(3600, 60, 5) # default 5 per hour, sliding window of 60 seconds
notification_config: NotificationConfig = NotificationConfig()
+ detector_handler: type[DetectorHandler] | None = None
def __init_subclass__(cls: type[GroupType], **kwargs: Any) -> None:
super().__init_subclass__(**kwargs)
@@ -627,3 +636,19 @@ def should_create_group(
else:
client.expire(key, noise_config.expiry_seconds)
return False
+
+
+def import_grouptype():
+ """
+ Ensures that grouptype.py is imported in any apps that implement it. We do this to make sure that all implemented
+ grouptypes are loaded and registered.
+ """
+ for app_config in apps.get_app_configs():
+ grouptype_module = f"{app_config.name}.grouptype"
+ try:
+ # Try to import the module
+ importlib.import_module(grouptype_module)
+ logger.debug("Imported module", extra={"module_name": grouptype_module})
+ except ModuleNotFoundError:
+ # If the module is not found, continue without any issues
+ logger.debug("No grouptypes found for app", extra={"app": app_config.name})
diff --git a/src/sentry/issues/highlights.py b/src/sentry/issues/highlights.py
index c55e9971a9b48..2e05a5aabddee 100644
--- a/src/sentry/issues/highlights.py
+++ b/src/sentry/issues/highlights.py
@@ -2,12 +2,15 @@
from collections.abc import Mapping
from typing import TypedDict
+from drf_spectacular.types import OpenApiTypes
+from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
from sentry.models.project import Project
from sentry.utils.platform_categories import BACKEND, FRONTEND, MOBILE
+@extend_schema_field(field=OpenApiTypes.OBJECT)
class HighlightContextField(serializers.Field):
def to_internal_value(self, data):
if not isinstance(data, dict):
diff --git a/src/sentry/issues/occurrence_consumer.py b/src/sentry/issues/occurrence_consumer.py
index 5d10c2eba2440..f2b76bf7cf0a3 100644
--- a/src/sentry/issues/occurrence_consumer.py
+++ b/src/sentry/issues/occurrence_consumer.py
@@ -351,7 +351,6 @@ def _process_message(
with sentry_sdk.start_transaction(
op="_process_message",
name="issues.occurrence_consumer",
- sampled=True,
) as txn:
try:
# Messages without payload_type default to an OCCURRENCE payload
diff --git a/src/sentry/issues/ongoing.py b/src/sentry/issues/ongoing.py
index 9e4bad9234342..71318a2b09231 100644
--- a/src/sentry/issues/ongoing.py
+++ b/src/sentry/issues/ongoing.py
@@ -20,7 +20,7 @@ def bulk_transition_group_to_ongoing(
group_ids: list[int],
activity_data: Mapping[str, Any] | None = None,
) -> None:
- with sentry_sdk.start_span(description="groups_to_transistion") as span:
+ with sentry_sdk.start_span(name="groups_to_transistion") as span:
# make sure we don't update the Group when its already updated by conditionally updating the Group
groups_to_transistion = Group.objects.filter(
id__in=group_ids, status=from_status, substatus=from_substatus
@@ -28,7 +28,7 @@ def bulk_transition_group_to_ongoing(
span.set_tag("group_ids", group_ids)
span.set_tag("groups_to_transistion count", len(groups_to_transistion))
- with sentry_sdk.start_span(description="update_group_status"):
+ with sentry_sdk.start_span(name="update_group_status"):
Group.objects.update_group_status(
groups=groups_to_transistion,
status=GroupStatus.UNRESOLVED,
@@ -51,10 +51,10 @@ def bulk_transition_group_to_ongoing(
sender=bulk_transition_group_to_ongoing,
)
- with sentry_sdk.start_span(description="bulk_remove_groups_from_inbox"):
+ with sentry_sdk.start_span(name="bulk_remove_groups_from_inbox"):
bulk_remove_groups_from_inbox(groups_to_transistion)
- with sentry_sdk.start_span(description="post_save_send_robust"):
+ with sentry_sdk.start_span(name="post_save_send_robust"):
if not options.get("groups.enable-post-update-signal"):
for group in groups_to_transistion:
post_save.send_robust(
diff --git a/src/sentry/issues/run.py b/src/sentry/issues/run.py
index 0eac7116a06b1..057a7023436c3 100644
--- a/src/sentry/issues/run.py
+++ b/src/sentry/issues/run.py
@@ -48,7 +48,7 @@ def __init__(
self.pool = MultiprocessingPool(num_processes)
self.worker = None
- def crate_parallel_worker(
+ def create_parallel_worker(
self,
commit: Commit,
) -> ProcessingStrategy[KafkaPayload]:
@@ -63,7 +63,7 @@ def crate_parallel_worker(
output_block_size=self.output_block_size,
)
- def creat_batched_parallel_worker(self, commit: Commit) -> ProcessingStrategy[KafkaPayload]:
+ def create_batched_parallel_worker(self, commit: Commit) -> ProcessingStrategy[KafkaPayload]:
assert self.worker is not None
batch_processor = RunTask(
function=functools.partial(process_batch, self.worker),
@@ -81,9 +81,9 @@ def create_with_partitions(
partitions: Mapping[Partition, int],
) -> ProcessingStrategy[KafkaPayload]:
if self.batched:
- return self.creat_batched_parallel_worker(commit)
+ return self.create_batched_parallel_worker(commit)
else:
- return self.crate_parallel_worker(commit)
+ return self.create_parallel_worker(commit)
def shutdown(self) -> None:
if self.pool:
diff --git a/src/sentry/lang/native/processing.py b/src/sentry/lang/native/processing.py
index 4e6ee42b7989c..70497f056ca24 100644
--- a/src/sentry/lang/native/processing.py
+++ b/src/sentry/lang/native/processing.py
@@ -5,6 +5,7 @@
from collections.abc import Callable, Mapping
from typing import Any
+import sentry_sdk
from symbolic.debuginfo import normalize_debug_id
from symbolic.exceptions import ParseDebugIdError
@@ -287,6 +288,14 @@ def process_minidump(symbolicator: Symbolicator, data: Any) -> Any:
if _handle_response_status(data, response):
_merge_full_response(data, response)
+ # Emit Apple symbol stats
+ apple_symbol_stats = response.get("apple_symbol_stats")
+ if apple_symbol_stats:
+ try:
+ emit_apple_symbol_stats(apple_symbol_stats, data)
+ except Exception as e:
+ sentry_sdk.capture_exception(e)
+
return data
@@ -302,6 +311,14 @@ def process_applecrashreport(symbolicator: Symbolicator, data: Any) -> Any:
if _handle_response_status(data, response):
_merge_full_response(data, response)
+ # Emit Apple symbol stats
+ apple_symbol_stats = response.get("apple_symbol_stats")
+ if apple_symbol_stats:
+ try:
+ emit_apple_symbol_stats(apple_symbol_stats, data)
+ except Exception as e:
+ sentry_sdk.capture_exception(e)
+
return data
@@ -409,6 +426,14 @@ def process_native_stacktraces(symbolicator: Symbolicator, data: Any) -> Any:
if not _handle_response_status(data, response):
return data
+ # Emit Apple symbol stats
+ apple_symbol_stats = response.get("apple_symbol_stats")
+ if apple_symbol_stats:
+ try:
+ emit_apple_symbol_stats(apple_symbol_stats, data)
+ except Exception as e:
+ sentry_sdk.capture_exception(e)
+
assert len(modules) == len(response["modules"]), (modules, response)
os = get_os_from_event(data)
@@ -455,6 +480,52 @@ def process_native_stacktraces(symbolicator: Symbolicator, data: Any) -> Any:
return data
+def emit_apple_symbol_stats(apple_symbol_stats, data):
+ os_name = get_path(data, "contexts", "os", "name") or get_path(
+ data, "contexts", "os", "raw_description"
+ )
+ os_version = get_path(data, "contexts", "os", "version")
+
+ if os_version:
+ os_version = os_version.split(".", 1)[0]
+
+ if neither := apple_symbol_stats.get("neither"):
+ metrics.incr(
+ "apple_symbol_availability_v2",
+ amount=neither,
+ tags={"availability": "neither", "os_name": os_name, "os_version": os_version},
+ sample_rate=1.0,
+ )
+
+ # TODO: This seems to just be wrong
+ # We want mutual exclusion here, since we don't want to double count. E.g., an event has both symbols, so we
+ # count it both in `both` and `old` or `symx` which makes it impossible for us to know the percentage of events
+ # that matched both.
+ if both := apple_symbol_stats.get("both"):
+ metrics.incr(
+ "apple_symbol_availability_v2",
+ amount=both,
+ tags={"availability": "both", "os_name": os_name, "os_version": os_version},
+ sample_rate=1.0,
+ )
+
+ if old := apple_symbol_stats.get("old"):
+ metrics.incr(
+ "apple_symbol_availability_v2",
+ amount=old,
+ tags={"availability": "old", "os_name": os_name, "os_version": os_version},
+ sample_rate=1.0,
+ )
+
+ if symx := apple_symbol_stats.get("symx"):
+ metrics.incr(
+ "apple_symbol_availability_v2",
+ amount=symx,
+ tags={"availability": "symx", "os_name": os_name, "os_version": os_version},
+ sample_rate=1.0,
+ )
+
+
def get_native_symbolication_function(
data: Mapping[str, Any], stacktraces: list[StacktraceInfo]
) -> Callable[[Symbolicator, Any], Any] | None:
diff --git a/src/sentry/lang/native/sources.py b/src/sentry/lang/native/sources.py
index 5a2e35971f2fb..51152337173c4 100644
--- a/src/sentry/lang/native/sources.py
+++ b/src/sentry/lang/native/sources.py
@@ -17,7 +17,7 @@
from sentry import features, options
from sentry.auth.system import get_system_token
from sentry.models.project import Project
-from sentry.utils import metrics, redis, safe
+from sentry.utils import redis, safe
from sentry.utils.http import get_origins
logger = logging.getLogger(__name__)
@@ -684,7 +684,7 @@ def _process_response(json):
just have their IDs.
"""
try:
- capture_apple_symbol_stats(json)
+ collect_apple_symbol_stats(json)
except Exception as e:
sentry_sdk.capture_exception(e)
for module in json.get("modules") or ():
@@ -705,7 +705,7 @@ def _process_response(json):
return (sources, _process_response)
-def capture_apple_symbol_stats(json):
+def collect_apple_symbol_stats(json):
eligible_symbols = 0
neither_has_symbol = 0
both_have_symbol = 0
@@ -748,33 +748,11 @@ def capture_apple_symbol_stats(json):
# now, we are only interested in rough numbers.
if eligible_symbols:
- metrics.incr(
- "apple_symbol_availability_v2",
- amount=neither_has_symbol,
- tags={"availability": "neither"},
- sample_rate=1.0,
- )
-
- # We want mutual exclusion here, since we don't want to double count. E.g., an event has both symbols, so we
- # count it both in `both` and `old` or `symx` which makes it impossible for us to know the percentage of events
- # that matched both.
- if both_have_symbol:
- metrics.incr(
- "apple_symbol_availability_v2",
- amount=both_have_symbol,
- tags={"availability": "both"},
- sample_rate=1.0,
- )
- else:
- metrics.incr(
- "apple_symbol_availability_v2",
- amount=old_has_symbol,
- tags={"availability": "old"},
- sample_rate=1.0,
- )
- metrics.incr(
- "apple_symbol_availability_v2",
- amount=symx_has_symbol,
- tags={"availability": "symx"},
- sample_rate=1.0,
- )
+ apple_symbol_stats = {
+ "both": both_have_symbol,
+ "neither": neither_has_symbol,
+ "symx": symx_has_symbol,
+ "old": old_has_symbol,
+ }
+
+ json["apple_symbol_stats"] = apple_symbol_stats
diff --git a/src/sentry/mail/notifications.py b/src/sentry/mail/notifications.py
index 208741ea872a7..23df661b3c9ba 100644
--- a/src/sentry/mail/notifications.py
+++ b/src/sentry/mail/notifications.py
@@ -2,7 +2,7 @@
import logging
from collections.abc import Iterable, Mapping, MutableMapping
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, TypeVar
import orjson
import sentry_sdk
@@ -17,6 +17,7 @@
from sentry.notifications.notify import register_notification_provider
from sentry.notifications.types import UnsubscribeContext
from sentry.types.actor import Actor
+from sentry.users.models.user import User
from sentry.utils.email import MessageBuilder, group_id_to_email
from sentry.utils.linksign import generate_signed_unsubscribe_link
@@ -79,7 +80,7 @@ def _log_message(notification: BaseNotification, recipient: Actor) -> None:
def get_context(
notification: BaseNotification,
- recipient: Actor | Team | RpcUser,
+ recipient: Actor | Team | RpcUser | User,
shared_context: Mapping[str, Any],
extra_context: Mapping[str, Any],
) -> Mapping[str, Any]:
@@ -113,20 +114,20 @@ def send_notification_as_email(
) -> None:
for recipient in recipients:
recipient_actor = Actor.from_object(recipient)
- with sentry_sdk.start_span(op="notification.send_email", description="one_recipient"):
+ with sentry_sdk.start_span(op="notification.send_email", name="one_recipient"):
if recipient_actor.is_team:
# TODO(mgaeta): MessageBuilder only works with Users so filter out Teams for now.
continue
_log_message(notification, recipient_actor)
- with sentry_sdk.start_span(op="notification.send_email", description="build_message"):
+ with sentry_sdk.start_span(op="notification.send_email", name="build_message"):
msg = MessageBuilder(
**get_builder_args(
notification, recipient_actor, shared_context, extra_context_by_actor
)
)
- with sentry_sdk.start_span(op="notification.send_email", description="send_message"):
+ with sentry_sdk.start_span(op="notification.send_email", name="send_message"):
# TODO: find better way of handling this
add_users_kwargs = {}
if isinstance(notification, ProjectNotification):
@@ -136,11 +137,14 @@ def send_notification_as_email(
notification.record_notification_sent(recipient_actor, ExternalProviders.EMAIL)
+RecipientT = TypeVar("RecipientT", Actor, User)
+
+
def get_builder_args(
notification: BaseNotification,
- recipient: Actor,
+ recipient: RecipientT,
shared_context: Mapping[str, Any] | None = None,
- extra_context_by_actor: Mapping[Actor, Mapping[str, Any]] | None = None,
+ extra_context_by_actor: Mapping[RecipientT, Mapping[str, Any]] | None = None,
) -> Mapping[str, Any]:
# TODO: move context logic to single notification class method
extra_context = (
diff --git a/src/sentry/mediators/__init__.py b/src/sentry/mediators/__init__.py
index a8ff0caba1639..57874a2ed719f 100644
--- a/src/sentry/mediators/__init__.py
+++ b/src/sentry/mediators/__init__.py
@@ -1,6 +1,4 @@
from .mediator import Mediator # NOQA
from .param import Param # NOQA
-from .sentry_app_installations import * # NOQA
-from .token_exchange.grant_exchanger import GrantExchanger # noqa: F401
from .token_exchange.refresher import Refresher # noqa: F401
from .token_exchange.util import AUTHORIZATION, REFRESH, GrantTypes # noqa: F401
diff --git a/src/sentry/mediators/alert_rule_actions/__init__.py b/src/sentry/mediators/alert_rule_actions/__init__.py
deleted file mode 100644
index 77138e849b32d..0000000000000
--- a/src/sentry/mediators/alert_rule_actions/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from .creator import AlertRuleActionCreator
-
-__all__ = ("AlertRuleActionCreator",)
diff --git a/src/sentry/mediators/external_issues/__init__.py b/src/sentry/mediators/external_issues/__init__.py
deleted file mode 100644
index 9b781da85a84f..0000000000000
--- a/src/sentry/mediators/external_issues/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .creator import Creator # NOQA
-from .issue_link_creator import IssueLinkCreator # NOQA
diff --git a/src/sentry/mediators/external_issues/creator.py b/src/sentry/mediators/external_issues/creator.py
deleted file mode 100644
index 8ab273bcbc352..0000000000000
--- a/src/sentry/mediators/external_issues/creator.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from html import escape
-
-from django.db import router
-
-from sentry.mediators.mediator import Mediator
-from sentry.mediators.param import Param
-from sentry.models.group import Group
-from sentry.models.platformexternalissue import PlatformExternalIssue
-from sentry.sentry_apps.services.app import RpcSentryAppInstallation
-
-
-class Creator(Mediator):
- install = Param(RpcSentryAppInstallation)
- group = Param(Group)
- web_url = Param(str)
- project = Param(str)
- identifier = Param(str)
- using = router.db_for_write(PlatformExternalIssue)
-
- def call(self):
- self._create_external_issue()
- return self.external_issue
-
- def _create_external_issue(self):
- display_name = f"{escape(self.project)}#{escape(self.identifier)}"
- self.external_issue = PlatformExternalIssue.objects.create(
- group_id=self.group.id,
- project_id=self.group.project_id,
- service_type=self.install.sentry_app.slug,
- display_name=display_name,
- web_url=self.web_url,
- )
diff --git a/src/sentry/mediators/external_issues/issue_link_creator.py b/src/sentry/mediators/external_issues/issue_link_creator.py
deleted file mode 100644
index ac23160fc7e81..0000000000000
--- a/src/sentry/mediators/external_issues/issue_link_creator.py
+++ /dev/null
@@ -1,55 +0,0 @@
-from django.db import router
-from django.utils.functional import cached_property
-
-from sentry.coreapi import APIUnauthorized
-from sentry.mediators.external_issues.creator import Creator
-from sentry.mediators.external_requests.issue_link_requester import IssueLinkRequester
-from sentry.mediators.mediator import Mediator
-from sentry.mediators.param import Param
-from sentry.models.group import Group
-from sentry.models.platformexternalissue import PlatformExternalIssue
-from sentry.sentry_apps.services.app import RpcSentryAppInstallation
-from sentry.users.services.user import RpcUser
-
-
-class IssueLinkCreator(Mediator):
- install = Param(RpcSentryAppInstallation)
- group = Param(Group)
- action = Param(str)
- fields = Param(object)
- uri = Param(str)
- user = Param(RpcUser)
- using = router.db_for_write(PlatformExternalIssue)
-
- def call(self):
- self._verify_action()
- self._make_external_request()
- self._create_external_issue()
- return self.external_issue
-
- def _verify_action(self):
- if self.action not in ["link", "create"]:
- raise APIUnauthorized(f"Invalid action '{self.action}'")
-
- def _make_external_request(self):
- self.response = IssueLinkRequester.run(
- install=self.install,
- uri=self.uri,
- group=self.group,
- fields=self.fields,
- user=self.user,
- action=self.action,
- )
-
- def _create_external_issue(self):
- self.external_issue = Creator.run(
- install=self.install,
- group=self.group,
- web_url=self.response["webUrl"],
- project=self.response["project"],
- identifier=self.response["identifier"],
- )
-
- @cached_property
- def sentry_app(self):
- return self.install.sentry_app
diff --git a/src/sentry/mediators/external_requests/__init__.py b/src/sentry/mediators/external_requests/__init__.py
deleted file mode 100644
index f534bc2263506..0000000000000
--- a/src/sentry/mediators/external_requests/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from .alert_rule_action_requester import AlertRuleActionRequester # NOQA
-from .issue_link_requester import IssueLinkRequester # NOQA
-from .select_requester import SelectRequester # NOQA
diff --git a/src/sentry/mediators/sentry_app_installations/__init__.py b/src/sentry/mediators/sentry_app_installations/__init__.py
deleted file mode 100644
index 2369e8abea593..0000000000000
--- a/src/sentry/mediators/sentry_app_installations/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .installation_notifier import InstallationNotifier # NOQA
-from .updater import Updater # NOQA
diff --git a/src/sentry/mediators/sentry_app_installations/installation_notifier.py b/src/sentry/mediators/sentry_app_installations/installation_notifier.py
deleted file mode 100644
index bb37880dfcda9..0000000000000
--- a/src/sentry/mediators/sentry_app_installations/installation_notifier.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from django.db import router
-from django.utils.functional import cached_property
-
-from sentry.api.serializers import AppPlatformEvent, SentryAppInstallationSerializer, serialize
-from sentry.coreapi import APIUnauthorized
-from sentry.mediators.mediator import Mediator
-from sentry.mediators.param import Param
-from sentry.models.apigrant import ApiGrant
-from sentry.sentry_apps.models.sentry_app import SentryApp
-from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
-from sentry.users.services.user.model import RpcUser
-from sentry.utils.sentry_apps import send_and_save_webhook_request
-
-
-class InstallationNotifier(Mediator):
- install = Param(SentryAppInstallation)
- user = Param(RpcUser)
- action = Param(str)
- using = router.db_for_write(SentryAppInstallation)
-
- def call(self) -> None:
- self._verify_action()
- self._send_webhook()
-
- def _verify_action(self) -> None:
- if self.action not in ["created", "deleted"]:
- raise APIUnauthorized(f"Invalid action '{self.action}'")
-
- def _send_webhook(self) -> None:
- send_and_save_webhook_request(self.sentry_app, self.request)
-
- @property
- def request(self) -> AppPlatformEvent:
- data = serialize(
- [self.install],
- user=self.user,
- serializer=SentryAppInstallationSerializer(),
- is_webhook=True,
- )[0]
- return AppPlatformEvent(
- resource="installation",
- action=self.action,
- install=self.install,
- data={"installation": data},
- actor=self.user,
- )
-
- @cached_property
- def sentry_app(self) -> SentryApp:
- return self.install.sentry_app
-
- @cached_property
- def api_grant(self) -> ApiGrant | None:
- return self.install.api_grant_id and self.install.api_grant
diff --git a/src/sentry/mediators/sentry_app_installations/updater.py b/src/sentry/mediators/sentry_app_installations/updater.py
deleted file mode 100644
index 4a6fac446b8c2..0000000000000
--- a/src/sentry/mediators/sentry_app_installations/updater.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from django.db import router
-
-from sentry import analytics
-from sentry.constants import SentryAppInstallationStatus
-from sentry.mediators.mediator import Mediator
-from sentry.mediators.param import Param
-from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
-from sentry.sentry_apps.services.app import RpcSentryAppInstallation
-
-
-class Updater(Mediator):
- sentry_app_installation = Param(RpcSentryAppInstallation)
- status = Param(str, required=False)
- using = router.db_for_write(SentryAppInstallation)
-
- def call(self):
- self._update_status()
- return self.sentry_app_installation
-
- def _update_status(self):
- # convert from string to integer
- if self.status == SentryAppInstallationStatus.INSTALLED_STR:
- for install in SentryAppInstallation.objects.filter(id=self.sentry_app_installation.id):
- install.update(status=SentryAppInstallationStatus.INSTALLED)
-
- def record_analytics(self):
- analytics.record(
- "sentry_app_installation.updated",
- sentry_app_installation_id=self.sentry_app_installation.id,
- sentry_app_id=self.sentry_app_installation.sentry_app.id,
- organization_id=self.sentry_app_installation.organization_id,
- )
diff --git a/src/sentry/mediators/token_exchange/__init__.py b/src/sentry/mediators/token_exchange/__init__.py
index 6ce401b0e890f..84bcc14774369 100644
--- a/src/sentry/mediators/token_exchange/__init__.py
+++ b/src/sentry/mediators/token_exchange/__init__.py
@@ -1,4 +1,3 @@
-from .grant_exchanger import GrantExchanger # NOQA
from .refresher import Refresher # NOQA
from .util import AUTHORIZATION, REFRESH, GrantTypes, token_expiration # NOQA
from .validator import Validator # NOQA
diff --git a/src/sentry/middleware/devtoolbar.py b/src/sentry/middleware/devtoolbar.py
new file mode 100644
index 0000000000000..0d1ef60c8e011
--- /dev/null
+++ b/src/sentry/middleware/devtoolbar.py
@@ -0,0 +1,63 @@
+import logging
+
+from django.http import HttpRequest, HttpResponse
+
+from sentry import analytics, options
+from sentry.utils.http import origin_from_request
+from sentry.utils.http import query_string as get_query_string
+from sentry.utils.urls import parse_id_or_slug_param
+
+logger = logging.getLogger(__name__)
+
+
+class DevToolbarAnalyticsMiddleware:
+ def __init__(self, get_response):
+ self.get_response = get_response
+
+ def __call__(self, request):
+ response = self.get_response(request)
+ try:
+ # Note ordering of conditions to reduce option queries. GET contains the query params, regardless of method.
+ if request.GET.get("queryReferrer") == "devtoolbar" and options.get(
+ "devtoolbar.analytics.enabled"
+ ):
+ _record_api_request(request, response)
+ except Exception:
+ logger.exception("devtoolbar: exception while recording api analytics event.")
+
+ return response
+
+
+def _record_api_request(request: HttpRequest, response: HttpResponse) -> None:
+ resolver_match = request.resolver_match
+ if resolver_match is None:
+ raise ValueError(f"Request URL not resolved: {request.path_info}")
+
+ kwargs, route, view_name = (
+ resolver_match.kwargs,
+ resolver_match.route,
+ resolver_match.view_name,
+ )
+
+ org_id_or_slug = kwargs.get("organization_id_or_slug", kwargs.get("organization_slug"))
+ org_id, org_slug = parse_id_or_slug_param(org_id_or_slug)
+ project_id_or_slug = kwargs.get("project_id_or_slug")
+ project_id, project_slug = parse_id_or_slug_param(project_id_or_slug)
+
+ origin = origin_from_request(request)
+ query_string: str = get_query_string(request) # starts with ? if non-empty
+
+ analytics.record(
+ "devtoolbar.api_request",
+ view_name=view_name,
+ route=route,
+ query_string=query_string,
+ origin=origin,
+ method=request.method,
+ status_code=response.status_code,
+ organization_id=org_id or None,
+ organization_slug=org_slug,
+ project_id=project_id or None,
+ project_slug=project_slug,
+ user_id=request.user.id if hasattr(request, "user") and request.user else None,
+ )
diff --git a/src/sentry/middleware/integrations/parsers/slack.py b/src/sentry/middleware/integrations/parsers/slack.py
index 7c39ef9a34e95..e8c12815cbd9e 100644
--- a/src/sentry/middleware/integrations/parsers/slack.py
+++ b/src/sentry/middleware/integrations/parsers/slack.py
@@ -11,7 +11,6 @@
from rest_framework.request import Request
from slack_sdk.errors import SlackApiError
-from sentry import options
from sentry.hybridcloud.outbox.category import WebhookProviderIdentifier
from sentry.integrations.middleware.hybrid_cloud.parser import (
BaseRequestParser,
@@ -186,10 +185,7 @@ def get_async_region_response(self, regions: Sequence[Region]) -> HttpResponseBa
# if we are able to send a response to Slack from control itself to beat the 3 second timeout, we should do so
try:
- if (
- options.get("send-slack-response-from-control-silo")
- and self.action_option in CONTROL_RESPONSE_ACTIONS
- ):
+ if self.action_option in CONTROL_RESPONSE_ACTIONS:
CONTROL_RESPONSE_ACTIONS[self.action_option](self.request, self.action_option)
except ValueError:
logger.exception(
diff --git a/src/sentry/middleware/locale.py b/src/sentry/middleware/locale.py
index a4c62acf16d7a..b9b769431cb2c 100644
--- a/src/sentry/middleware/locale.py
+++ b/src/sentry/middleware/locale.py
@@ -8,7 +8,7 @@
class SentryLocaleMiddleware(LocaleMiddleware):
def process_request(self, request: HttpRequest) -> None:
- with sentry_sdk.start_span(op="middleware.locale", description="process_request"):
+ with sentry_sdk.start_span(op="middleware.locale", name="process_request"):
# No locale for static media
# This avoids touching user session, which means we avoid
# setting `Vary: Cookie` as a response header which will
diff --git a/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py b/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py
index 76ce0088bbaed..5592f4a25a27a 100644
--- a/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py
+++ b/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py
@@ -26,12 +26,12 @@
import sentry.db.models.fields.text
import sentry.db.models.fields.uuid
import sentry.db.models.indexes
+import sentry.deletions.models.scheduleddeletion
import sentry.models.apiapplication
import sentry.models.apigrant
import sentry.models.apitoken
import sentry.models.broadcast
import sentry.models.groupshare
-import sentry.models.scheduledeletion
import sentry.sentry_apps.models.sentry_app
import sentry.sentry_apps.models.sentry_app_installation
import sentry.sentry_apps.models.servicehook
@@ -2162,7 +2162,7 @@ class Migration(CheckedMigration):
(
"guid",
models.CharField(
- default=sentry.models.scheduledeletion.default_guid,
+ default=sentry.deletions.models.scheduleddeletion.default_guid,
max_length=32,
unique=True,
),
@@ -2174,7 +2174,7 @@ class Migration(CheckedMigration):
(
"date_scheduled",
models.DateTimeField(
- default=sentry.models.scheduledeletion.default_date_schedule
+ default=sentry.deletions.models.scheduleddeletion.default_date_schedule
),
),
("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
@@ -8993,7 +8993,7 @@ class Migration(CheckedMigration):
(
"guid",
models.CharField(
- default=sentry.models.scheduledeletion.default_guid,
+ default=sentry.deletions.models.scheduleddeletion.default_guid,
max_length=32,
unique=True,
),
@@ -9005,7 +9005,7 @@ class Migration(CheckedMigration):
(
"date_scheduled",
models.DateTimeField(
- default=sentry.models.scheduledeletion.default_date_schedule
+ default=sentry.deletions.models.scheduleddeletion.default_date_schedule
),
),
("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
diff --git a/src/sentry/migrations/0507_delete_pending_deletion_rules.py b/src/sentry/migrations/0507_delete_pending_deletion_rules.py
index 03920d2d91164..3fce0584bc82e 100644
--- a/src/sentry/migrations/0507_delete_pending_deletion_rules.py
+++ b/src/sentry/migrations/0507_delete_pending_deletion_rules.py
@@ -11,8 +11,8 @@
from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
if TYPE_CHECKING:
+ from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.models.rule import Rule
- from sentry.models.scheduledeletion import RegionScheduledDeletion
class ObjectStatus:
diff --git a/src/sentry/migrations/0515_slugify_invalid_monitors.py b/src/sentry/migrations/0515_slugify_invalid_monitors.py
index 693701cf440b8..556d126238907 100644
--- a/src/sentry/migrations/0515_slugify_invalid_monitors.py
+++ b/src/sentry/migrations/0515_slugify_invalid_monitors.py
@@ -13,8 +13,8 @@
from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
if TYPE_CHECKING:
+ from sentry.deletions.models.scheduleddeletion import BaseScheduledDeletion
from sentry.models.rule import Rule
- from sentry.models.scheduledeletion import BaseScheduledDeletion
from sentry.monitors.models import Monitor
diff --git a/src/sentry/migrations/0766_fix_substatus_for_pending_merge.py b/src/sentry/migrations/0766_fix_substatus_for_pending_merge.py
new file mode 100644
index 0000000000000..b134194b7683f
--- /dev/null
+++ b/src/sentry/migrations/0766_fix_substatus_for_pending_merge.py
@@ -0,0 +1,53 @@
+# Generated by Django 5.1.1 on 2024-09-24 17:28
+
+from django.apps.registry import Apps
+from django.db import migrations
+from django.db.backends.base.schema import BaseDatabaseSchemaEditor
+
+from sentry.new_migrations.migrations import CheckedMigration
+from sentry.utils.query import RangeQuerySetWrapperWithProgressBarApprox
+
+
+class GroupStatus:
+ PENDING_MERGE = 5
+
+
+# End copy
+
+
+def fix_substatus_for_pending_merge(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None:
+ Group = apps.get_model("sentry", "Group")
+
+ for group in RangeQuerySetWrapperWithProgressBarApprox(
+ Group.objects.filter(status=GroupStatus.PENDING_MERGE, substatus__isnull=False)
+ ):
+ group.substatus = None
+ group.save(update_fields=["substatus"])
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = True
+
+ dependencies = [
+ ("sentry", "0765_add_org_to_api_auth"),
+ ]
+
+ operations = [
+ migrations.RunPython(
+ fix_substatus_for_pending_merge,
+ migrations.RunPython.noop,
+ hints={"tables": ["sentry_groupedmessage", "sentry_grouphistory"]},
+ ),
+ ]
diff --git a/src/sentry/migrations/0767_add_selected_aggregate_to_dashboards_widget_query.py b/src/sentry/migrations/0767_add_selected_aggregate_to_dashboards_widget_query.py
new file mode 100644
index 0000000000000..84c2064cefac4
--- /dev/null
+++ b/src/sentry/migrations/0767_add_selected_aggregate_to_dashboards_widget_query.py
@@ -0,0 +1,33 @@
+# Generated by Django 5.1.1 on 2024-09-24 19:39
+
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0766_fix_substatus_for_pending_merge"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="dashboardwidgetquery",
+ name="selected_aggregate",
+ field=models.IntegerField(null=True),
+ ),
+ ]
diff --git a/src/sentry/migrations/0768_fix_old_group_first_seen_dates.py b/src/sentry/migrations/0768_fix_old_group_first_seen_dates.py
new file mode 100644
index 0000000000000..5f2637e9dfd88
--- /dev/null
+++ b/src/sentry/migrations/0768_fix_old_group_first_seen_dates.py
@@ -0,0 +1,47 @@
+# Generated by Django 5.1.1 on 2024-09-24 20:28
+
+from datetime import datetime, timezone
+
+from django.apps.registry import Apps
+from django.db import migrations
+from django.db.backends.base.schema import BaseDatabaseSchemaEditor
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+OLD_FIRST_SEEN_CUTOFF = datetime(2000, 1, 1, tzinfo=timezone.utc)
+
+
+def update_old_first_seen_dates(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None:
+ Group = apps.get_model("sentry", "Group")
+
+ for group in Group.objects.filter(first_seen__lt=OLD_FIRST_SEEN_CUTOFF):
+ group.first_seen = group.active_at
+ group.save(update_fields=["first_seen"])
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = True
+
+ dependencies = [
+ ("sentry", "0767_add_selected_aggregate_to_dashboards_widget_query"),
+ ]
+
+ operations = [
+ migrations.RunPython(
+ update_old_first_seen_dates,
+ migrations.RunPython.noop,
+ hints={"tables": ["sentry_groupedmessage"]},
+ ),
+ ]
diff --git a/src/sentry/migrations/0769_add_seer_fields_to_grouphash_metadata.py b/src/sentry/migrations/0769_add_seer_fields_to_grouphash_metadata.py
new file mode 100644
index 0000000000000..b623da7f2fa36
--- /dev/null
+++ b/src/sentry/migrations/0769_add_seer_fields_to_grouphash_metadata.py
@@ -0,0 +1,60 @@
+# Generated by Django 5.1.1 on 2024-09-27 21:29
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+import sentry.db.models.fields.foreignkey
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0768_fix_old_group_first_seen_dates"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="grouphashmetadata",
+ name="seer_date_sent",
+ field=models.DateTimeField(null=True),
+ ),
+ migrations.AddField(
+ model_name="grouphashmetadata",
+ name="seer_event_sent",
+ field=models.CharField(max_length=32, null=True),
+ ),
+ migrations.AddField(
+ model_name="grouphashmetadata",
+ name="seer_match_distance",
+ field=models.FloatField(null=True),
+ ),
+ migrations.AddField(
+ model_name="grouphashmetadata",
+ name="seer_matched_grouphash",
+ field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.DO_NOTHING,
+ related_name="seer_matchees",
+ to="sentry.grouphash",
+ ),
+ ),
+ migrations.AddField(
+ model_name="grouphashmetadata",
+ name="seer_model",
+ field=models.CharField(null=True),
+ ),
+ ]
diff --git a/src/sentry/migrations/0770_increase_project_slug_max_length.py b/src/sentry/migrations/0770_increase_project_slug_max_length.py
new file mode 100644
index 0000000000000..c131b8b6fe76a
--- /dev/null
+++ b/src/sentry/migrations/0770_increase_project_slug_max_length.py
@@ -0,0 +1,34 @@
+# Generated by Django 5.1.1 on 2024-09-30 19:46
+
+from django.db import migrations
+
+import sentry.db.models.fields.slug
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = True
+
+ dependencies = [
+ ("sentry", "0769_add_seer_fields_to_grouphash_metadata"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="project",
+ name="slug",
+ field=sentry.db.models.fields.slug.SentrySlugField(max_length=100, null=True),
+ ),
+ ]
diff --git a/src/sentry/migrations/0771_add_grouping_config_to_grouphash_metadata.py b/src/sentry/migrations/0771_add_grouping_config_to_grouphash_metadata.py
new file mode 100644
index 0000000000000..48bfb3c9b48d4
--- /dev/null
+++ b/src/sentry/migrations/0771_add_grouping_config_to_grouphash_metadata.py
@@ -0,0 +1,33 @@
+# Generated by Django 5.1.1 on 2024-10-01 02:06
+
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0770_increase_project_slug_max_length"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="grouphashmetadata",
+ name="latest_grouping_config",
+ field=models.CharField(null=True),
+ ),
+ ]
diff --git a/src/sentry/migrations/0772_backfill_grouphash_metadata_grouping_config.py b/src/sentry/migrations/0772_backfill_grouphash_metadata_grouping_config.py
new file mode 100644
index 0000000000000..6e2474e507c8f
--- /dev/null
+++ b/src/sentry/migrations/0772_backfill_grouphash_metadata_grouping_config.py
@@ -0,0 +1,45 @@
+# Generated by Django 5.1.1 on 2024-10-01 00:47
+
+from django.apps.registry import Apps
+from django.db import migrations
+from django.db.backends.base.schema import BaseDatabaseSchemaEditor
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+DEFAULT_GROUPING_CONFIG = "newstyle:2023-01-11"
+
+
+def fill_in_missing_grouping_config(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None:
+ GroupHashMetadata = apps.get_model("sentry", "GroupHashMetadata")
+
+ for gh_metadata in GroupHashMetadata.objects.filter(latest_grouping_config=None):
+ gh_metadata.latest_grouping_config = DEFAULT_GROUPING_CONFIG
+ gh_metadata.save(update_fields=["latest_grouping_config"])
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0771_add_grouping_config_to_grouphash_metadata"),
+ ]
+
+ operations = [
+ migrations.RunPython(
+ fill_in_missing_grouping_config,
+ migrations.RunPython.noop,
+ hints={"tables": ["sentry_groupedhashmetadata"]},
+ ),
+ ]
diff --git a/src/sentry/migrations/0773_make_group_score_nullable.py b/src/sentry/migrations/0773_make_group_score_nullable.py
new file mode 100644
index 0000000000000..7bad3299992d0
--- /dev/null
+++ b/src/sentry/migrations/0773_make_group_score_nullable.py
@@ -0,0 +1,34 @@
+# Generated by Django 5.1.1 on 2024-10-08 16:00
+
+from django.db import migrations
+
+import sentry.db.models.fields.bounded
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0772_backfill_grouphash_metadata_grouping_config"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="group",
+ name="score",
+ field=sentry.db.models.fields.bounded.BoundedIntegerField(default=0, null=True),
+ ),
+ ]
diff --git a/src/sentry/migrations/0774_drop_group_score_in_state_only.py b/src/sentry/migrations/0774_drop_group_score_in_state_only.py
new file mode 100644
index 0000000000000..1a4295bcf78b8
--- /dev/null
+++ b/src/sentry/migrations/0774_drop_group_score_in_state_only.py
@@ -0,0 +1,34 @@
+# Generated by Django 5.1.1 on 2024-10-09 15:39
+
+from django.db import migrations
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0773_make_group_score_nullable"),
+ ]
+
+ operations = [
+ migrations.SeparateDatabaseAndState(
+ database_operations=[],
+ state_operations=[
+ migrations.RemoveField(model_name="group", name="score"),
+ ],
+ )
+ ]
diff --git a/src/sentry/migrations/0775_add_dashboard_permissions_model.py b/src/sentry/migrations/0775_add_dashboard_permissions_model.py
new file mode 100644
index 0000000000000..ac35c3644f555
--- /dev/null
+++ b/src/sentry/migrations/0775_add_dashboard_permissions_model.py
@@ -0,0 +1,51 @@
+# Generated by Django 5.1.1 on 2024-10-10 18:10
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+import sentry.db.models.fields.bounded
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0774_drop_group_score_in_state_only"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="DashboardPermissions",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("is_creator_only_editable", models.BooleanField(default=False)),
+ (
+ "dashboard",
+ models.OneToOneField(
+ on_delete=django.db.models.deletion.CASCADE, to="sentry.dashboard"
+ ),
+ ),
+ ],
+ options={
+ "db_table": "sentry_dashboardpermissions",
+ },
+ ),
+ ]
diff --git a/src/sentry/migrations/0776_drop_group_score_in_database.py b/src/sentry/migrations/0776_drop_group_score_in_database.py
new file mode 100644
index 0000000000000..53ed5f75b6f0d
--- /dev/null
+++ b/src/sentry/migrations/0776_drop_group_score_in_database.py
@@ -0,0 +1,42 @@
+# Generated by Django 5.1.1 on 2024-10-10 20:34
+
+from django.db import migrations
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = True
+
+ dependencies = [
+ ("sentry", "0775_add_dashboard_permissions_model"),
+ ]
+
+ operations = [
+ migrations.SeparateDatabaseAndState(
+ database_operations=[
+ migrations.RunSQL(
+ """
+ ALTER TABLE "sentry_groupedmessage" DROP COLUMN "score";
+ """,
+ reverse_sql="""
+ ALTER TABLE "sentry_groupedmessage" ADD COLUMN "score" int NULL;
+ """,
+ hints={"tables": ["sentry_groupedmessage"]},
+ )
+ ],
+ state_operations=[],
+ )
+ ]
diff --git a/src/sentry/migrations/0777_add_related_name_to_dashboard_permissions.py b/src/sentry/migrations/0777_add_related_name_to_dashboard_permissions.py
new file mode 100644
index 0000000000000..4617ea31099ea
--- /dev/null
+++ b/src/sentry/migrations/0777_add_related_name_to_dashboard_permissions.py
@@ -0,0 +1,38 @@
+# Generated by Django 5.1.1 on 2024-10-15 18:09
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0776_drop_group_score_in_database"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="dashboardpermissions",
+ name="dashboard",
+ field=models.OneToOneField(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="permissions",
+ to="sentry.dashboard",
+ ),
+ ),
+ ]
diff --git a/src/sentry/models/__init__.py b/src/sentry/models/__init__.py
index f300d344c9a82..cf171644ad67a 100644
--- a/src/sentry/models/__init__.py
+++ b/src/sentry/models/__init__.py
@@ -1,3 +1,4 @@
+from ..sentry_apps.models.platformexternalissue import * # NOQA
from .activity import * # NOQA
from .apiapplication import * # NOQA
from .apiauthorization import * # NOQA
@@ -19,6 +20,7 @@
from .commitfilechange import CommitFileChange # noqa
from .counter import * # NOQA
from .dashboard import * # NOQA
+from .dashboard_permissions import * # NOQA
from .dashboard_widget import * # NOQA
from .debugfile import * # NOQA
from .deletedentry import * # NOQA
@@ -75,7 +77,6 @@
from .organizationslugreservation import * # NOQA
from .organizationslugreservationreplica import * # NOQA
from .orgauthtoken import * # NOQA
-from .platformexternalissue import * # NOQA
from .project import * # NOQA
from .projectbookmark import * # NOQA
from .projectcodeowners import * # NOQA
@@ -104,7 +105,6 @@
from .rulefirehistory import RuleFireHistory # NOQA
from .rulesnooze import RuleSnooze # NOQA
from .savedsearch import * # NOQA
-from .scheduledeletion import * # NOQA
from .search_common import * # NOQA
from .sentryshot import * # NOQA
from .sourcemapprocessingissue import * # NOQA
diff --git a/src/sentry/models/dashboard.py b/src/sentry/models/dashboard.py
index 80c35e5709831..5252a0321d277 100644
--- a/src/sentry/models/dashboard.py
+++ b/src/sentry/models/dashboard.py
@@ -186,9 +186,11 @@ def get_prebuilt_dashboards(organization, user) -> list[dict[str, Any]]:
"queries": [
{
"name": "Known Users",
- "conditions": "has:user.email"
- if has_discover_split
- else "has:user.email !event.type:transaction",
+ "conditions": (
+ "has:user.email"
+ if has_discover_split
+ else "has:user.email !event.type:transaction"
+ ),
"fields": ["count_unique(user)"],
"aggregates": ["count_unique(user)"],
"columns": [],
@@ -196,9 +198,11 @@ def get_prebuilt_dashboards(organization, user) -> list[dict[str, Any]]:
},
{
"name": "Anonymous Users",
- "conditions": "!has:user.email"
- if has_discover_split
- else "!has:user.email !event.type:transaction",
+ "conditions": (
+ "!has:user.email"
+ if has_discover_split
+ else "!has:user.email !event.type:transaction"
+ ),
"fields": ["count_unique(user)"],
"aggregates": ["count_unique(user)"],
"columns": [],
@@ -238,9 +242,11 @@ def get_prebuilt_dashboards(organization, user) -> list[dict[str, Any]]:
"queries": [
{
"name": "Error counts",
- "conditions": "has:geo.country_code"
- if has_discover_split
- else "has:geo.country_code !event.type:transaction",
+ "conditions": (
+ "has:geo.country_code"
+ if has_discover_split
+ else "has:geo.country_code !event.type:transaction"
+ ),
"fields": ["geo.country_code", "geo.region", "count()"],
"aggregates": ["count()"],
"columns": ["geo.country_code", "geo.region"],
@@ -256,9 +262,11 @@ def get_prebuilt_dashboards(organization, user) -> list[dict[str, Any]]:
"queries": [
{
"name": "",
- "conditions": "has:browser.name"
- if has_discover_split
- else "has:browser.name !event.type:transaction",
+ "conditions": (
+ "has:browser.name"
+ if has_discover_split
+ else "has:browser.name !event.type:transaction"
+ ),
"fields": ["browser.name", "count()"],
"aggregates": ["count()"],
"columns": ["browser.name"],
diff --git a/src/sentry/models/dashboard_permissions.py b/src/sentry/models/dashboard_permissions.py
new file mode 100644
index 0000000000000..56d98180b9963
--- /dev/null
+++ b/src/sentry/models/dashboard_permissions.py
@@ -0,0 +1,27 @@
+from __future__ import annotations
+
+from django.db import models
+
+from sentry.backup.scopes import RelocationScope
+from sentry.db.models import Model, region_silo_model
+from sentry.db.models.base import sane_repr
+
+
+@region_silo_model
+class DashboardPermissions(Model):
+ """
+ Edit permissions for a Dashboard.
+ """
+
+ __relocation_scope__ = RelocationScope.Organization
+
+ is_creator_only_editable = models.BooleanField(default=False)
+ dashboard = models.OneToOneField(
+ "sentry.Dashboard", on_delete=models.CASCADE, related_name="permissions"
+ )
+
+ class Meta:
+ app_label = "sentry"
+ db_table = "sentry_dashboardpermissions"
+
+ __repr__ = sane_repr("is_creator_only_editable")
diff --git a/src/sentry/models/dashboard_widget.py b/src/sentry/models/dashboard_widget.py
index bb375e7cfc79a..0d8fe3d21c3ae 100644
--- a/src/sentry/models/dashboard_widget.py
+++ b/src/sentry/models/dashboard_widget.py
@@ -96,6 +96,14 @@ class DatasetSourcesTypes(Enum):
Was an ambiguous dataset forced to split (i.e. we picked a default)
"""
FORCED = 3
+ """
+ Dataset inferred by split script, version 1
+ """
+ SPLIT_VERSION_1 = 4
+ """
+ Dataset inferred by split script, version 2
+ """
+ SPLIT_VERSION_2 = 5
@classmethod
def as_choices(cls):
@@ -161,6 +169,8 @@ class DashboardWidgetQuery(Model):
date_modified = models.DateTimeField(default=timezone.now)
# Whether this query is hidden from the UI, used by metric widgets
is_hidden = models.BooleanField(default=False)
+ # Used by Big Number to select aggregate displayed
+ selected_aggregate = models.IntegerField(null=True)
class Meta:
app_label = "sentry"
diff --git a/src/sentry/models/files/abstractfile.py b/src/sentry/models/files/abstractfile.py
index 2bca88fc73448..e05ded4949002 100644
--- a/src/sentry/models/files/abstractfile.py
+++ b/src/sentry/models/files/abstractfile.py
@@ -204,7 +204,7 @@ def read(self, n=-1):
# Django doesn't permit models to have parent classes that are Generic
# this kludge lets satisfy both mypy and django
class _Parent(Generic[BlobIndexType, BlobType]):
- ...
+ pass
else:
diff --git a/src/sentry/models/files/abstractfileblob.py b/src/sentry/models/files/abstractfileblob.py
index 48cd7371faefd..f53a4dbb84dbf 100644
--- a/src/sentry/models/files/abstractfileblob.py
+++ b/src/sentry/models/files/abstractfileblob.py
@@ -31,7 +31,7 @@
# Django doesn't permit models to have parent classes that are Generic
# this kludge lets satisfy both mypy and django
class _Parent(Generic[BlobOwnerType]):
- ...
+ pass
else:
diff --git a/src/sentry/models/group.py b/src/sentry/models/group.py
index 1cb427a26837a..0e5ff5f5005f4 100644
--- a/src/sentry/models/group.py
+++ b/src/sentry/models/group.py
@@ -1,7 +1,6 @@
from __future__ import annotations
import logging
-import math
import re
import warnings
from collections import defaultdict, namedtuple
@@ -397,7 +396,7 @@ def get_groups_by_external_issue(
self,
integration: RpcIntegration,
organizations: Iterable[Organization],
- external_issue_key: str,
+ external_issue_key: str | None,
) -> QuerySet[Group]:
from sentry.integrations.models.external_issue import ExternalIssue
from sentry.integrations.services.integration import integration_service
@@ -570,7 +569,6 @@ class Group(Model):
active_at = models.DateTimeField(null=True, db_index=True)
time_spent_total = BoundedIntegerField(default=0)
time_spent_count = BoundedIntegerField(default=0)
- score = BoundedIntegerField(default=0)
# deprecated, do not use. GroupShare has superseded
is_public = models.BooleanField(default=False, null=True)
data: models.Field[dict[str, Any] | None, dict[str, Any]] = GzippedDictField(
@@ -624,9 +622,6 @@ def save(self, *args, **kwargs):
self.message = truncatechars(self.message.splitlines()[0], 255)
if self.times_seen is None:
self.times_seen = 1
- self.score = type(self).calculate_score(
- times_seen=self.times_seen, last_seen=self.last_seen
- )
super().save(*args, **kwargs)
def get_absolute_url(
@@ -769,9 +764,6 @@ def get_share_id(self):
# Otherwise it has not been shared yet.
return None
- def get_score(self):
- return type(self).calculate_score(self.times_seen, self.last_seen)
-
def get_latest_event(self) -> GroupEvent | None:
if not hasattr(self, "_latest_event"):
self._latest_event = self.get_latest_event_for_environments()
@@ -922,10 +914,6 @@ def count_users_seen(self, referrer=Referrer.TAGSTORE_GET_GROUPS_USER_COUNTS.val
referrer=referrer,
)[self.id]
- @classmethod
- def calculate_score(cls, times_seen, last_seen):
- return math.log(float(times_seen or 1)) * 600 + float(last_seen.strftime("%s"))
-
def get_assignee(self) -> Team | RpcUser | None:
from sentry.models.groupassignee import GroupAssignee
diff --git a/src/sentry/models/groupassignee.py b/src/sentry/models/groupassignee.py
index e3c979eb3eb56..eeea257073f4a 100644
--- a/src/sentry/models/groupassignee.py
+++ b/src/sentry/models/groupassignee.py
@@ -12,6 +12,7 @@
from sentry.db.models import FlexibleForeignKey, Model, region_silo_model, sane_repr
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
from sentry.db.models.manager.base import BaseManager
+from sentry.integrations.services.assignment_source import AssignmentSource
from sentry.models.grouphistory import GroupHistoryStatus, record_group_history
from sentry.models.groupowner import GroupOwner
from sentry.models.groupsubscription import GroupSubscription
@@ -134,8 +135,9 @@ def assign(
create_only: bool = False,
extra: dict[str, str] | None = None,
force_autoassign: bool = False,
+ assignment_source: AssignmentSource | None = None,
):
- from sentry.integrations.utils import sync_group_assignee_outbound
+ from sentry.integrations.utils.sync import sync_group_assignee_outbound
from sentry.models.activity import Activity
from sentry.models.groupsubscription import GroupSubscription
@@ -187,7 +189,9 @@ def assign(
if assignee_type == "user" and features.has(
"organizations:integrations-issue-sync", group.organization, actor=acting_user
):
- sync_group_assignee_outbound(group, assigned_to.id, assign=True)
+ sync_group_assignee_outbound(
+ group, assigned_to.id, assign=True, assignment_source=assignment_source
+ )
if not created: # aka re-assignment
self.remove_old_assignees(group, assignee, assigned_to_id, assignee_type)
@@ -200,8 +204,9 @@ def deassign(
acting_user: User | RpcUser | None = None,
assigned_to: Team | RpcUser | None = None,
extra: dict[str, str] | None = None,
+ assignment_source: AssignmentSource | None = None,
) -> None:
- from sentry.integrations.utils import sync_group_assignee_outbound
+ from sentry.integrations.utils.sync import sync_group_assignee_outbound
from sentry.models.activity import Activity
from sentry.models.projectownership import ProjectOwnership
@@ -230,7 +235,9 @@ def deassign(
if features.has(
"organizations:integrations-issue-sync", group.organization, actor=acting_user
):
- sync_group_assignee_outbound(group, None, assign=False)
+ sync_group_assignee_outbound(
+ group, None, assign=False, assignment_source=assignment_source
+ )
issue_unassigned.send_robust(
project=group.project, group=group, user=acting_user, sender=self.__class__
diff --git a/src/sentry/models/grouphashmetadata.py b/src/sentry/models/grouphashmetadata.py
index b661e2178c0c2..10bbb9bd2c09d 100644
--- a/src/sentry/models/grouphashmetadata.py
+++ b/src/sentry/models/grouphashmetadata.py
@@ -4,6 +4,7 @@
from sentry.backup.scopes import RelocationScope
from sentry.db.models import Model, region_silo_model
from sentry.db.models.base import sane_repr
+from sentry.db.models.fields.foreignkey import FlexibleForeignKey
@region_silo_model
@@ -11,11 +12,33 @@ class GroupHashMetadata(Model):
__relocation_scope__ = RelocationScope.Excluded
# GENERAL
+
grouphash = models.OneToOneField(
"sentry.GroupHash", related_name="_metadata", on_delete=models.CASCADE
)
date_added = models.DateTimeField(default=timezone.now)
+ # HASHING
+
+ # Most recent config to produce this hash
+ latest_grouping_config = models.CharField(null=True)
+
+ # SEER
+
+ # When this hash was sent to Seer. This will be different than `date_added` if we send it to
+ # Seer as part of a backfill rather than during ingest.
+ seer_date_sent = models.DateTimeField(null=True)
+ # Id of the event whose stacktrace was sent to Seer
+ seer_event_sent = models.CharField(max_length=32, null=True)
+ # The version of the Seer model used to process this hash value
+ seer_model = models.CharField(null=True)
+ # The `GroupHash` record representing the match Seer sent back as a match (if any)
+ seer_matched_grouphash = FlexibleForeignKey(
+ "sentry.GroupHash", related_name="seer_matchees", on_delete=models.DO_NOTHING, null=True
+ )
+ # The similarity between this hash's stacktrace and the parent (matched) hash's stacktrace
+ seer_match_distance = models.FloatField(null=True)
+
class Meta:
app_label = "sentry"
db_table = "sentry_grouphashmetadata"
diff --git a/src/sentry/models/groupinbox.py b/src/sentry/models/groupinbox.py
index d4ab7737c13a2..9f91b1218b79a 100644
--- a/src/sentry/models/groupinbox.py
+++ b/src/sentry/models/groupinbox.py
@@ -111,7 +111,7 @@ def remove_group_from_inbox(group, action=None, user=None, referrer=None):
def bulk_remove_groups_from_inbox(groups, action=None, user=None, referrer=None):
- with sentry_sdk.start_span(description="bulk_remove_groups_from_inbox"):
+ with sentry_sdk.start_span(name="bulk_remove_groups_from_inbox"):
try:
group_inbox = GroupInbox.objects.filter(group__in=groups)
group_inbox.delete()
diff --git a/src/sentry/models/project.py b/src/sentry/models/project.py
index 6a1a43cd28046..8cadf8fad3a5a 100644
--- a/src/sentry/models/project.py
+++ b/src/sentry/models/project.py
@@ -54,6 +54,7 @@
from sentry.users.models.user import User
SENTRY_USE_SNOWFLAKE = getattr(settings, "SENTRY_USE_SNOWFLAKE", False)
+PROJECT_SLUG_MAX_LENGTH = 100
# NOTE:
# - When you modify this list, ensure that the platform IDs listed in "sentry/static/app/data/platforms.tsx" match.
@@ -232,7 +233,7 @@ class Project(Model, PendingDeletionMixin):
__relocation_scope__ = RelocationScope.Organization
- slug = SentrySlugField(null=True)
+ slug = SentrySlugField(null=True, max_length=PROJECT_SLUG_MAX_LENGTH)
# DEPRECATED do not use, prefer slug
name = models.CharField(max_length=200)
forced_color = models.CharField(max_length=6, null=True, blank=True)
@@ -469,6 +470,7 @@ def get_full_name(self):
return self.slug
def transfer_to(self, organization):
+ from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.incidents.models.alert_rule import AlertRule
from sentry.integrations.models.external_issue import ExternalIssue
from sentry.models.environment import Environment, EnvironmentProject
@@ -476,7 +478,6 @@ def transfer_to(self, organization):
from sentry.models.releaseprojectenvironment import ReleaseProjectEnvironment
from sentry.models.releases.release_project import ReleaseProject
from sentry.models.rule import Rule
- from sentry.models.scheduledeletion import RegionScheduledDeletion
from sentry.monitors.models import Monitor
old_org_id = self.organization_id
diff --git a/src/sentry/models/projectownership.py b/src/sentry/models/projectownership.py
index 6dbb55cd2fef0..456d51ae6c77c 100644
--- a/src/sentry/models/projectownership.py
+++ b/src/sentry/models/projectownership.py
@@ -5,6 +5,7 @@
from collections.abc import Mapping, Sequence
from typing import TYPE_CHECKING, Any
+import sentry_sdk
from django.db import models
from django.db.models.signals import post_delete, post_save
from django.utils import timezone
@@ -61,7 +62,7 @@ class Meta:
__repr__ = sane_repr("project_id", "is_active")
@classmethod
- def get_cache_key(self, project_id):
+ def get_cache_key(self, project_id) -> str:
return f"projectownership_project_id:1:{project_id}"
@classmethod
@@ -134,17 +135,21 @@ def get_owners(
owners = {o for rule in rules for o in rule.owners}
owners_to_actors = resolve_actors(owners, project_id)
- ordered_actors = []
+ ordered_actors: list[Actor] = []
for rule in rules:
for o in rule.owners:
- if o in owners and owners_to_actors.get(o) is not None:
- ordered_actors.append(owners_to_actors[o])
- owners.remove(o)
+ if o in owners:
+ actor = owners_to_actors.get(o)
+ if actor is not None:
+ ordered_actors.append(actor)
+ owners.remove(o)
return ordered_actors, rules
@classmethod
- def _hydrate_rules(cls, project_id, rules, type: str = OwnerRuleType.OWNERSHIP_RULE.value):
+ def _hydrate_rules(
+ cls, project_id: int, rules: Sequence[Rule], type: str = OwnerRuleType.OWNERSHIP_RULE.value
+ ):
"""
Get the last matching rule to take the most precedence.
"""
@@ -165,8 +170,10 @@ def _hydrate_rules(cls, project_id, rules, type: str = OwnerRuleType.OWNERSHIP_R
return result
@classmethod
+ @metrics.wraps("projectownership.get_issue_owners")
+ @sentry_sdk.trace
def get_issue_owners(
- cls, project_id, data, limit=2
+ cls, project_id: int, data: Mapping[str, Any], limit: int = 2
) -> Sequence[tuple[Rule, Sequence[Team | RpcUser], str]]:
"""
Get the issue owners for a project if there are any.
@@ -177,41 +184,44 @@ def get_issue_owners(
"""
from sentry.models.projectcodeowners import ProjectCodeOwners
- with metrics.timer("projectownership.get_autoassign_owners"):
- ownership = cls.get_ownership_cached(project_id)
- codeowners = ProjectCodeOwners.get_codeowners_cached(project_id)
- if not (ownership or codeowners):
- return []
-
- if not ownership:
- ownership = cls(project_id=project_id)
+ ownership = cls.get_ownership_cached(project_id)
+ codeowners = ProjectCodeOwners.get_codeowners_cached(project_id)
+ if not (ownership or codeowners):
+ return []
- ownership_rules = cls._matching_ownership_rules(ownership, data)
- codeowners_rules = cls._matching_ownership_rules(codeowners, data) if codeowners else []
+ if not ownership:
+ ownership = cls(project_id=project_id)
- if not (codeowners_rules or ownership_rules):
- return []
+ # rules_with_owners is ordered by priority, descending, see also:
+ # https://docs.sentry.io/product/issues/ownership-rules/#evaluation-flow
+ rules_with_owners = []
+ with metrics.timer("projectownership.get_issue_owners_ownership_rules"):
+ ownership_rules = list(reversed(cls._matching_ownership_rules(ownership, data)))
hydrated_ownership_rules = cls._hydrate_rules(
project_id, ownership_rules, OwnerRuleType.OWNERSHIP_RULE.value
)
- hydrated_codeowners_rules = cls._hydrate_rules(
- project_id, codeowners_rules, OwnerRuleType.CODEOWNERS.value
- )
+ for item in hydrated_ownership_rules:
+ if item[1]: # actors
+ rules_with_owners.append(item)
+ if len(rules_with_owners) == limit:
+ return rules_with_owners
- rules_in_evaluation_order = [
- *hydrated_ownership_rules[::-1],
- *hydrated_codeowners_rules[::-1],
- ]
+ if not codeowners:
+ return rules_with_owners
- rules_with_owners = list(
- filter(
- lambda item: len(item[1]) > 0,
- rules_in_evaluation_order,
- )
+ with metrics.timer("projectownership.get_issue_owners_codeowners_rules"):
+ codeowners_rules = list(reversed(cls._matching_ownership_rules(codeowners, data)))
+ hydrated_codeowners_rules = cls._hydrate_rules(
+ project_id, codeowners_rules, OwnerRuleType.CODEOWNERS.value
)
+ for item in hydrated_codeowners_rules:
+ if item[1]: # actors
+ rules_with_owners.append(item)
+ if len(rules_with_owners) == limit:
+ return rules_with_owners
- return rules_with_owners[:limit]
+ return rules_with_owners
@classmethod
def _get_autoassignment_types(cls, ownership):
@@ -236,7 +246,7 @@ def handle_auto_assignment(
organization_id: int | None = None,
force_autoassign: bool = False,
logging_extra: dict[str, str | bool | int] | None = None,
- ):
+ ) -> None:
"""
Get the auto-assign owner for a project if there are any.
We combine the schemas from IssueOwners and CodeOwners.
@@ -358,12 +368,10 @@ def _matching_ownership_rules(
cls,
ownership: ProjectOwnership | ProjectCodeOwners,
data: Mapping[str, Any],
- ) -> Sequence[Rule]:
+ ) -> list[Rule]:
rules = []
if ownership.schema is not None:
- munged_data = None
- if options.get("ownership.munge_data_for_performance"):
- munged_data = Matcher.munge_if_needed(data)
+ munged_data = Matcher.munge_if_needed(data)
for rule in load_schema(ownership.schema):
if rule.test(data, munged_data):
rules.append(rule)
diff --git a/src/sentry/monitors/constants.py b/src/sentry/monitors/constants.py
index 8f06d3589dff4..fb19678754fc4 100644
--- a/src/sentry/monitors/constants.py
+++ b/src/sentry/monitors/constants.py
@@ -16,9 +16,6 @@
# being marked as missed
DEFAULT_CHECKIN_MARGIN = 1
-# Enforced maximum length of the monitor slug
-MAX_SLUG_LENGTH = 50
-
class PermitCheckInStatus(Enum):
ACCEPT = 0
diff --git a/src/sentry/monitors/endpoints/base_monitor_details.py b/src/sentry/monitors/endpoints/base_monitor_details.py
index f220997994ad8..8f9a2c366f333 100644
--- a/src/sentry/monitors/endpoints/base_monitor_details.py
+++ b/src/sentry/monitors/endpoints/base_monitor_details.py
@@ -13,10 +13,10 @@
from sentry.api.helpers.environments import get_environments
from sentry.api.serializers import serialize
from sentry.constants import ObjectStatus
+from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.models.environment import Environment
from sentry.models.project import Project
from sentry.models.rule import Rule, RuleActivity, RuleActivityType
-from sentry.models.scheduledeletion import RegionScheduledDeletion
from sentry.monitors.models import (
CheckInStatus,
Monitor,
diff --git a/src/sentry/monitors/endpoints/base_monitor_environment_details.py b/src/sentry/monitors/endpoints/base_monitor_environment_details.py
index b090001a5cbc8..bbdb5364a5a76 100644
--- a/src/sentry/monitors/endpoints/base_monitor_environment_details.py
+++ b/src/sentry/monitors/endpoints/base_monitor_environment_details.py
@@ -7,7 +7,7 @@
from sentry.api.base import BaseEndpointMixin
from sentry.api.serializers import serialize
from sentry.constants import ObjectStatus
-from sentry.models.scheduledeletion import RegionScheduledDeletion
+from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.monitors.models import MonitorEnvironment, MonitorStatus
diff --git a/src/sentry/monitors/endpoints/organization_monitor_index.py b/src/sentry/monitors/endpoints/organization_monitor_index.py
index b7dc52b928598..585a37cf26d5c 100644
--- a/src/sentry/monitors/endpoints/organization_monitor_index.py
+++ b/src/sentry/monitors/endpoints/organization_monitor_index.py
@@ -34,6 +34,8 @@
from sentry.models.environment import Environment
from sentry.models.organization import Organization
from sentry.monitors.models import (
+ DEFAULT_STATUS_ORDER,
+ MONITOR_ENVIRONMENT_ORDERING,
Monitor,
MonitorEnvironment,
MonitorLimitsExceeded,
@@ -66,18 +68,6 @@ def map_value_to_constant(constant, value):
from rest_framework.request import Request
from rest_framework.response import Response
-DEFAULT_ORDERING = [
- MonitorStatus.ERROR,
- MonitorStatus.OK,
- MonitorStatus.ACTIVE,
- MonitorStatus.DISABLED,
-]
-
-MONITOR_ENVIRONMENT_ORDERING = Case(
- *[When(status=s, then=Value(i)) for i, s in enumerate(DEFAULT_ORDERING)],
- output_field=IntegerField(),
-)
-
def flip_sort_direction(sort_field: str) -> str:
if sort_field[0] == "-":
@@ -163,8 +153,8 @@ def get(self, request: Request, organization: Organization) -> Response:
queryset = queryset.annotate(
environment_status_ordering=Case(
# Sort DISABLED and is_muted monitors to the bottom of the list
- When(status=ObjectStatus.DISABLED, then=Value(len(DEFAULT_ORDERING) + 1)),
- When(is_muted=True, then=Value(len(DEFAULT_ORDERING))),
+ When(status=ObjectStatus.DISABLED, then=Value(len(DEFAULT_STATUS_ORDER) + 1)),
+ When(is_muted=True, then=Value(len(DEFAULT_STATUS_ORDER))),
default=Subquery(
monitor_environments_query.annotate(
status_ordering=MONITOR_ENVIRONMENT_ORDERING
diff --git a/src/sentry/monitors/models.py b/src/sentry/monitors/models.py
index 0690ee7089152..966e707819308 100644
--- a/src/sentry/monitors/models.py
+++ b/src/sentry/monitors/models.py
@@ -11,7 +11,7 @@
import jsonschema
from django.conf import settings
from django.db import models
-from django.db.models import Q
+from django.db.models import Case, IntegerField, Q, Value, When
from django.db.models.signals import post_delete, pre_save
from django.dispatch import receiver
from django.utils import timezone
@@ -31,13 +31,12 @@
sane_repr,
)
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
-from sentry.db.models.fields.slug import SentrySlugField
+from sentry.db.models.fields.slug import DEFAULT_SLUG_MAX_LENGTH, SentrySlugField
from sentry.db.models.manager.base import BaseManager
from sentry.db.models.utils import slugify_instance
from sentry.locks import locks
from sentry.models.environment import Environment
from sentry.models.rule import Rule, RuleSource
-from sentry.monitors.constants import MAX_SLUG_LENGTH
from sentry.monitors.types import CrontabSchedule, IntervalSchedule
from sentry.types.actor import Actor
from sentry.utils.retries import TimedRetryPolicy
@@ -170,6 +169,20 @@ def as_choices(cls):
)
+DEFAULT_STATUS_ORDER = [
+ MonitorStatus.ERROR,
+ MonitorStatus.OK,
+ MonitorStatus.ACTIVE,
+ MonitorStatus.DISABLED,
+]
+
+MONITOR_ENVIRONMENT_ORDERING = Case(
+ When(is_muted=True, then=Value(len(DEFAULT_STATUS_ORDER) + 1)),
+ *[When(status=s, then=Value(i)) for i, s in enumerate(DEFAULT_STATUS_ORDER)],
+ output_field=IntegerField(),
+)
+
+
class MonitorType:
# In the future we may have other types of monitors such as health check
# monitors. But for now we just have CRON_JOB style monitors.
@@ -296,14 +309,12 @@ def save(self, *args, **kwargs):
self,
self.name,
organization_id=self.organization_id,
- max_length=MAX_SLUG_LENGTH,
+ max_length=DEFAULT_SLUG_MAX_LENGTH,
)
return super().save(*args, **kwargs)
@property
def owner_actor(self) -> Actor | None:
- if not (self.owner_user_id or self.owner_team_id):
- return None
return Actor.from_id(user_id=self.owner_user_id, team_id=self.owner_team_id)
@property
diff --git a/src/sentry/monitors/processing_errors/manager.py b/src/sentry/monitors/processing_errors/manager.py
index 0d6e5c08fdad6..af4d1e839710a 100644
--- a/src/sentry/monitors/processing_errors/manager.py
+++ b/src/sentry/monitors/processing_errors/manager.py
@@ -10,7 +10,7 @@
from redis.client import StrictRedis
from rediscluster import RedisCluster
-from sentry import analytics, features
+from sentry import analytics
from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.monitors.models import Monitor
@@ -180,8 +180,6 @@ def handle_processing_errors(item: CheckinItem, error: ProcessingErrorsException
try:
project = Project.objects.get_from_cache(id=item.message["project_id"])
organization = Organization.objects.get_from_cache(id=project.organization_id)
- if not features.has("organizations:crons-write-user-feedback", organization):
- return
metrics.incr(
"monitors.checkin.handle_processing_error",
diff --git a/src/sentry/monitors/serializers.py b/src/sentry/monitors/serializers.py
index bb213eb73150a..78d04987dbf19 100644
--- a/src/sentry/monitors/serializers.py
+++ b/src/sentry/monitors/serializers.py
@@ -10,6 +10,7 @@
from sentry.models.environment import Environment
from sentry.models.project import Project
from sentry.monitors.models import (
+ MONITOR_ENVIRONMENT_ORDERING,
Monitor,
MonitorCheckIn,
MonitorEnvBrokenDetection,
@@ -198,7 +199,8 @@ def get_attrs(self, item_list, user, **kwargs):
monitor_environments_qs = (
MonitorEnvironment.objects.filter(monitor__in=item_list)
- .order_by("-last_checkin")
+ .annotate(status_ordering=MONITOR_ENVIRONMENT_ORDERING)
+ .order_by("status_ordering", "-last_checkin", "environment_id")
.exclude(
status__in=[MonitorStatus.PENDING_DELETION, MonitorStatus.DELETION_IN_PROGRESS]
)
diff --git a/src/sentry/monitors/types.py b/src/sentry/monitors/types.py
index 7a1832983c490..50f17140da855 100644
--- a/src/sentry/monitors/types.py
+++ b/src/sentry/monitors/types.py
@@ -8,7 +8,7 @@
from django.utils.text import slugify
from sentry_kafka_schemas.schema_types.ingest_monitors_v1 import CheckIn
-from sentry.monitors.constants import MAX_SLUG_LENGTH
+from sentry.db.models.fields.slug import DEFAULT_SLUG_MAX_LENGTH
class CheckinTrace(TypedDict):
@@ -70,7 +70,7 @@ class CheckinItem:
@cached_property
def valid_monitor_slug(self):
- return slugify(self.payload["monitor_slug"])[:MAX_SLUG_LENGTH].strip("-")
+ return slugify(self.payload["monitor_slug"])[:DEFAULT_SLUG_MAX_LENGTH].strip("-")
@property
def processing_key(self):
diff --git a/src/sentry/monitors/validators.py b/src/sentry/monitors/validators.py
index 931c497e6cead..417540964f9fb 100644
--- a/src/sentry/monitors/validators.py
+++ b/src/sentry/monitors/validators.py
@@ -16,7 +16,8 @@
from sentry.api.serializers.rest_framework.project import ProjectField
from sentry.constants import ObjectStatus
from sentry.db.models import BoundedPositiveIntegerField
-from sentry.monitors.constants import MAX_SLUG_LENGTH, MAX_THRESHOLD, MAX_TIMEOUT
+from sentry.db.models.fields.slug import DEFAULT_SLUG_MAX_LENGTH
+from sentry.monitors.constants import MAX_THRESHOLD, MAX_TIMEOUT
from sentry.monitors.models import CheckInStatus, Monitor, MonitorType, ScheduleType
from sentry.monitors.schedule import get_next_schedule, get_prev_schedule
from sentry.monitors.types import CrontabSchedule
@@ -246,7 +247,7 @@ class MonitorValidator(CamelSnakeSerializer):
help_text="Name of the monitor. Used for notifications.",
)
slug = SentrySerializerSlugField(
- max_length=MAX_SLUG_LENGTH,
+ max_length=DEFAULT_SLUG_MAX_LENGTH,
required=False,
help_text="Uniquely identifies your monitor within your organization. Changing this slug will require updates to any instrumented check-in calls.",
)
diff --git a/src/sentry/nodestore/bigtable/backend.py b/src/sentry/nodestore/bigtable/backend.py
index aa7580bd9403f..fc45086f21712 100644
--- a/src/sentry/nodestore/bigtable/backend.py
+++ b/src/sentry/nodestore/bigtable/backend.py
@@ -63,6 +63,7 @@ def __init__(
self.automatic_expiry = automatic_expiry
self.skip_deletes = automatic_expiry and "_SENTRY_CLEANUP" in os.environ
+ @sentry_sdk.tracing.trace
def _get_bytes(self, id: str) -> bytes | None:
return self.store.get(id)
diff --git a/src/sentry/notifications/notifications/base.py b/src/sentry/notifications/notifications/base.py
index d7c5d023c93ad..f752f9f5e2a93 100644
--- a/src/sentry/notifications/notifications/base.py
+++ b/src/sentry/notifications/notifications/base.py
@@ -171,7 +171,7 @@ def record_analytics(self, event_name: str, *args: Any, **kwargs: Any) -> None:
analytics.record(event_name, *args, **kwargs)
def record_notification_sent(self, recipient: Actor, provider: ExternalProviders) -> None:
- with sentry_sdk.start_span(op="notification.send", description="record_notification_sent"):
+ with sentry_sdk.start_span(op="notification.send", name="record_notification_sent"):
# may want to explicitly pass in the parameters for this event
self.record_analytics(
f"integrations.{provider.name}.notification_sent",
@@ -284,14 +284,14 @@ def send(self) -> None:
"""The default way to send notifications that respects Notification Settings."""
from sentry.notifications.notify import notify
- with sentry_sdk.start_span(op="notification.send", description="get_participants"):
+ with sentry_sdk.start_span(op="notification.send", name="get_participants"):
participants_by_provider = self.get_participants()
if not participants_by_provider:
return
context = self.get_context()
for provider, recipients in participants_by_provider.items():
- with sentry_sdk.start_span(op="notification.send", description=f"send_for_{provider}"):
+ with sentry_sdk.start_span(op="notification.send", name=f"send_for_{provider}"):
safe_execute(notify, provider, self, recipients, context)
diff --git a/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py b/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py
index b14f3e33a6415..809e79a1ccf48 100644
--- a/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py
+++ b/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py
@@ -2,7 +2,9 @@
from django.db.models import Q
from django.utils import timezone
+from sentry import analytics
from sentry.models.options.organization_option import OrganizationOption
+from sentry.models.organization import Organization
from sentry.models.organizationonboardingtask import (
OnboardingTaskStatus,
OrganizationOnboardingTask,
@@ -45,5 +47,13 @@ def try_mark_onboarding_complete(self, organization_id):
key="onboarding:complete",
value={"updated": json.datetime_to_str(timezone.now())},
)
+
+ organization = Organization.objects.get(id=organization_id)
+ analytics.record(
+ "onboarding.complete",
+ user_id=organization.default_owner_id,
+ organization_id=organization_id,
+ referrer="onboarding_tasks",
+ )
except IntegrityError:
pass
diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py
index 0df79019e0ee9..18550131c90cd 100644
--- a/src/sentry/options/defaults.py
+++ b/src/sentry/options/defaults.py
@@ -424,30 +424,32 @@
flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
)
-# Replay Options
-#
-# Replay storage backend configuration (only applicable if the direct-storage driver is used)
+# Flag Options
register(
- "replay.storage.backend",
- default=None,
+ "flags:options-audit-log-is-enabled",
+ default=True,
flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
+ type=Bool,
)
register(
- "replay.storage.options",
- type=Dict,
+ "flags:options-audit-log-organization-id",
default=None,
flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
+ type=Int,
)
-# Replay Analyzer service.
+
+# Replay Options
+#
+# Replay storage backend configuration (only applicable if the direct-storage driver is used)
register(
- "replay.analyzer_service_url",
+ "replay.storage.backend",
default=None,
flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
)
register(
- "organizations:session-replay-accessibility-issues-enabled",
- type=Bool,
- default=True,
+ "replay.storage.options",
+ type=Dict,
+ default=None,
flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
)
# Globally disables replay-video.
@@ -473,6 +475,14 @@
flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE,
)
+# Dev Toolbar Options
+register(
+ "devtoolbar.analytics.enabled",
+ type=Bool,
+ default=False,
+ flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
+)
+
# Extract spans only from a random fraction of transactions.
#
@@ -502,9 +512,6 @@
register("slack.signing-secret", flags=FLAG_CREDENTIAL | FLAG_PRIORITIZE_DISK)
-# Slack Middleware Parser
-register("send-slack-response-from-control-silo", default=False, flags=FLAG_AUTOMATOR_MODIFIABLE)
-
# Codecov Integration
register("codecov.client-secret", flags=FLAG_CREDENTIAL | FLAG_PRIORITIZE_DISK)
@@ -581,6 +588,14 @@
register("vsts-limited.client-id", flags=FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE)
register("vsts-limited.client-secret", flags=FLAG_CREDENTIAL | FLAG_PRIORITIZE_DISK)
+# Azure DevOps Integration Social Login Flow
+register(
+ "vsts.social-auth-migration",
+ default=False,
+ type=Bool,
+ flags=FLAG_MODIFIABLE_BOOL | FLAG_AUTOMATOR_MODIFIABLE,
+)
+
# PagerDuty Integration
register("pagerduty.app-id", default="", flags=FLAG_AUTOMATOR_MODIFIABLE)
@@ -1785,6 +1800,12 @@
default=3,
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
+register(
+ "performance.traces.trace-explorer-skip-recent-seconds",
+ type=Int,
+ default=0,
+ flags=FLAG_AUTOMATOR_MODIFIABLE,
+)
register(
"performance.traces.span_query_minimum_spans",
type=Int,
@@ -1846,6 +1867,13 @@
default=[],
flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE,
)
+# Used for the z-score when calculating the margin of error in performance
+register(
+ "performance.extrapolation.confidence.z-score",
+ type=Float,
+ default=1.96,
+ flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE,
+)
# Used for enabling flags in ST. Should be removed once Flagpole works in all STs.
register("performance.use_metrics.enabled", default=False, flags=FLAG_AUTOMATOR_MODIFIABLE)
@@ -2390,12 +2418,6 @@
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
-register(
- "grouping.config_transition.killswitch_enabled",
- type=Bool,
- default=False,
- flags=FLAG_AUTOMATOR_MODIFIABLE,
-)
# Sample rate for double writing to experimental dsn
register(
@@ -2696,6 +2718,11 @@
default=10000,
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
+register(
+ "celery_split_queue_task_rollout",
+ default={},
+ flags=FLAG_AUTOMATOR_MODIFIABLE,
+)
register(
"grouping.grouphash_metadata.ingestion_writes_enabled",
@@ -2715,13 +2742,6 @@
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
-register(
- "ownership.munge_data_for_performance",
- type=Bool,
- default=False,
- flags=FLAG_AUTOMATOR_MODIFIABLE,
-)
-
# Restrict uptime issue creation for specific host provider identifiers. Items
# in this list map to the `host_provider_id` column in the UptimeSubscription
# table.
@@ -2741,3 +2761,23 @@
default=False,
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
+
+register(
+ "celery_split_queue_legacy_mode",
+ default=["post_process_transactions"],
+ flags=FLAG_AUTOMATOR_MODIFIABLE,
+)
+
+register(
+ "celery_split_queue_rollout",
+ default={"post_process_transactions": 1.0},
+ flags=FLAG_AUTOMATOR_MODIFIABLE,
+)
+
+# Secret Scanning. Allows to temporarily disable signature verification.
+register(
+ "secret-scanning.github.enable-signature-verification",
+ type=Bool,
+ default=True,
+ flags=FLAG_AUTOMATOR_MODIFIABLE,
+)
diff --git a/src/sentry/options/manager.py b/src/sentry/options/manager.py
index f8d2b98df824b..aeaf12ff78594 100644
--- a/src/sentry/options/manager.py
+++ b/src/sentry/options/manager.py
@@ -298,11 +298,6 @@ def get(self, key: str, silent=False):
if not (opt.flags & FLAG_NOSTORE):
result = self.store.get(opt, silent=silent)
if result is not None:
- # HACK(mattrobenolt): SENTRY_URL_PREFIX must be kept in sync
- # when reading values from the database. This should
- # be replaced by a signal.
- if key == "system.url-prefix":
- settings.SENTRY_URL_PREFIX = result
return result
# Some values we don't want to allow them to be configured through
diff --git a/src/sentry/organizations/services/organization/impl.py b/src/sentry/organizations/services/organization/impl.py
index 2d6d44cd5f685..136ec177bad66 100644
--- a/src/sentry/organizations/services/organization/impl.py
+++ b/src/sentry/organizations/services/organization/impl.py
@@ -12,6 +12,7 @@
from sentry.api.serializers import serialize
from sentry.backup.dependencies import merge_users_for_model_in_org
from sentry.db.postgres.transactions import enforce_constraints
+from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.hybridcloud.models.outbox import ControlOutbox, outbox_context
from sentry.hybridcloud.outbox.category import OutboxCategory, OutboxScope
from sentry.hybridcloud.rpc import OptionValue, logger
@@ -37,7 +38,6 @@
from sentry.models.rule import Rule, RuleActivity
from sentry.models.rulesnooze import RuleSnooze
from sentry.models.savedsearch import SavedSearch
-from sentry.models.scheduledeletion import RegionScheduledDeletion
from sentry.models.team import Team, TeamStatus
from sentry.monitors.models import Monitor
from sentry.organizations.services.organization import (
diff --git a/src/sentry/ownership/grammar.py b/src/sentry/ownership/grammar.py
index c1fb8d32fff08..d31f3b11f1707 100644
--- a/src/sentry/ownership/grammar.py
+++ b/src/sentry/ownership/grammar.py
@@ -3,11 +3,12 @@
import re
from collections import namedtuple
from collections.abc import Callable, Iterable, Mapping, Sequence
-from typing import Any, NamedTuple
+from typing import TYPE_CHECKING, Any, NamedTuple
from parsimonious.exceptions import ParseError
from parsimonious.grammar import Grammar
-from parsimonious.nodes import Node, NodeVisitor
+from parsimonious.nodes import Node
+from parsimonious.nodes import NodeVisitor as BaseNodeVisitor
from rest_framework.serializers import ValidationError
from sentry.eventstore.models import EventSubjectTemplateData
@@ -18,10 +19,15 @@
from sentry.utils.codeowners import codeowners_match
from sentry.utils.event_frames import find_stack_frames, get_sdk_name, munged_filename_and_frames
from sentry.utils.glob import glob_match
-from sentry.utils.safe import PathSearchable, get_path
+from sentry.utils.safe import get_path
__all__ = ("parse_rules", "dump_schema", "load_schema")
+if TYPE_CHECKING:
+ NodeVisitor = BaseNodeVisitor[str]
+else:
+ NodeVisitor = BaseNodeVisitor
+
VERSION = 1
URL = "url"
@@ -90,12 +96,9 @@ def load(cls, data: Mapping[str, Any]) -> Rule:
def test(
self,
data: Mapping[str, Any],
- munged_data: tuple[Sequence[Mapping[str, Any]], Sequence[str]] | None,
+ munged_data: tuple[Sequence[Mapping[str, Any]], Sequence[str]],
) -> bool | Any:
- if munged_data:
- return self.matcher.test_with_munged(data, munged_data)
- else:
- return self.matcher.test(data)
+ return self.matcher.test(data, munged_data)
class Matcher(namedtuple("Matcher", "type pattern")):
@@ -124,7 +127,9 @@ def load(cls, data: Mapping[str, str]) -> Matcher:
return cls(data["type"], data["pattern"])
@staticmethod
- def munge_if_needed(data: PathSearchable) -> tuple[Sequence[Mapping[str, Any]], Sequence[str]]:
+ def munge_if_needed(
+ data: Mapping[str, Any]
+ ) -> tuple[Sequence[Mapping[str, Any]], Sequence[str]]:
keys = ["filename", "abs_path"]
platform = data.get("platform")
sdk_name = get_sdk_name(data)
@@ -137,13 +142,11 @@ def munge_if_needed(data: PathSearchable) -> tuple[Sequence[Mapping[str, Any]],
return frames, keys
- def test_with_munged(
- self, data: PathSearchable, munged_data: tuple[Sequence[Mapping[str, Any]], Sequence[str]]
+ def test(
+ self,
+ data: Mapping[str, Any],
+ munged_data: tuple[Sequence[Mapping[str, Any]], Sequence[str]],
) -> bool:
- """
- Temporary function to test pre-munging data performance in production. will remove
- and combine with test if prod deployment goes well.
- """
if self.type == URL:
return self.test_url(data)
elif self.type == PATH:
@@ -164,31 +167,7 @@ def test_with_munged(
)
return False
- def test(self, data: PathSearchable) -> bool:
- if self.type == URL:
- return self.test_url(data)
- elif self.type == PATH:
- return self.test_frames(*self.munge_if_needed(data))
- elif self.type == MODULE:
- return self.test_frames(find_stack_frames(data), ["module"])
- elif self.type.startswith("tags."):
- return self.test_tag(data)
- elif self.type == CODEOWNERS:
- return self.test_frames(
- *self.munge_if_needed(data),
- # Codeowners has a slightly different syntax compared to issue owners
- # As such we need to match it using gitignore logic.
- # See syntax documentation here:
- # https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/creating-a-repository-on-github/about-code-owners
- match_frame_value_func=lambda val, pattern: bool(codeowners_match(val, pattern)),
- match_frame_func=lambda frame: frame.get("in_app") is not False,
- )
- return False
-
- def test_url(self, data: PathSearchable) -> bool:
- if not isinstance(data, Mapping):
- return False
-
+ def test_url(self, data: Mapping[str, Any]) -> bool:
url = get_path(data, "request", "url")
return url and bool(glob_match(url, self.pattern, ignorecase=True))
@@ -201,7 +180,7 @@ def test_frames(
),
match_frame_func: Callable[[Mapping[str, Any]], bool] = lambda _: True,
) -> bool:
- for frame in (f for f in frames if isinstance(f, Mapping)):
+ for frame in frames:
if not match_frame_func(frame):
continue
@@ -215,7 +194,7 @@ def test_frames(
return False
- def test_tag(self, data: PathSearchable) -> bool:
+ def test_tag(self, data: Mapping[str, Any]) -> bool:
tag = self.type[5:]
# inspect the event-payload User interface first before checking tags.user
@@ -296,7 +275,7 @@ def visit_matcher_tag(self, node: Node, children: Sequence[Any]) -> str:
def visit_owners(self, node: Node, children: tuple[Any, Sequence[Owner]]) -> list[Owner]:
_, owners = children
- return owners
+ return list(owners)
def visit_owner(self, node: Node, children: tuple[Node, bool, str]) -> Owner:
_, is_team, pattern = children
@@ -320,7 +299,7 @@ def visit_quoted_identifier(self, node: Node, children: Sequence[Any]) -> str:
return str(node.text[1:-1].encode("ascii", "backslashreplace").decode("unicode-escape"))
def generic_visit(self, node: Node, children: Sequence[Any]) -> list[Node] | Node:
- return children or node
+ return list(children) or node
def parse_rules(data: str) -> Any:
@@ -462,7 +441,7 @@ def convert_codeowners_syntax(
return result
-def resolve_actors(owners: Iterable[Owner], project_id: int) -> dict[Owner, Actor]:
+def resolve_actors(owners: Iterable[Owner], project_id: int) -> dict[Owner, Actor | None]:
"""Convert a list of Owner objects into a dictionary
of {Owner: Actor} pairs. Actors not identified are returned
as None."""
@@ -563,8 +542,9 @@ def create_schema_from_issue_owners(
try:
rules = parse_rules(issue_owners)
except ParseError as e:
+ rule_name = e.expr.name if e.expr else str(e.expr)
raise ValidationError(
- {"raw": f"Parse error: {e.expr.name} (line {e.line()}, column {e.column()})"}
+ {"raw": f"Parse error: {rule_name} (line {e.line()}, column {e.column()})"}
)
schema = dump_schema(rules)
diff --git a/src/sentry/plugins/__init__.py b/src/sentry/plugins/__init__.py
index 1a7d079a76166..0632b0da1e3c0 100644
--- a/src/sentry/plugins/__init__.py
+++ b/src/sentry/plugins/__init__.py
@@ -6,4 +6,5 @@
"jira",
"pagerduty",
"opsgenie",
+ "phabricator",
)
diff --git a/src/sentry/profiles/task.py b/src/sentry/profiles/task.py
index f888cf2bda68d..0149b9159366d 100644
--- a/src/sentry/profiles/task.py
+++ b/src/sentry/profiles/task.py
@@ -187,7 +187,10 @@ def process_profile_task(
if not project.flags.has_profiles:
first_profile_received.send_robust(project=project, sender=Project)
try:
- _track_duration_outcome(profile=profile, project=project)
+ if quotas.backend.should_emit_profile_duration_outcome(
+ organization=organization, profile=profile
+ ):
+ _track_duration_outcome(profile=profile, project=project)
except Exception as e:
sentry_sdk.capture_exception(e)
if profile.get("version") != "2":
diff --git a/src/sentry/projectoptions/manager.py b/src/sentry/projectoptions/manager.py
index 7256eab87904f..af5b3e572722e 100644
--- a/src/sentry/projectoptions/manager.py
+++ b/src/sentry/projectoptions/manager.py
@@ -15,8 +15,11 @@ def get_default(self, project=None, epoch=None):
epoch = 1
else:
epoch = project.get_option("sentry:option-epoch") or 1
+ # Find where in the ordered epoch list the project's epoch would go
idx = bisect.bisect(self._epoch_default_list, epoch)
if idx > 0:
+ # Return the value corresponding to the highest epoch which doesn't exceed the
+ # project epoch
return self.epoch_defaults[self._epoch_default_list[idx - 1]]
return self.default
diff --git a/src/sentry/projects/services/project/impl.py b/src/sentry/projects/services/project/impl.py
index 8ca8c2a44af67..b5d73ae6e2815 100644
--- a/src/sentry/projects/services/project/impl.py
+++ b/src/sentry/projects/services/project/impl.py
@@ -35,6 +35,14 @@ def get_by_id(self, *, organization_id: int, id: int) -> RpcProject | None:
return serialize_project(project)
return None
+ def get_by_slug(self, *, organization_id: int, slug: str) -> RpcProject | None:
+ project: Project | None = Project.objects.filter(
+ slug=slug, organization=organization_id
+ ).first()
+ if project:
+ return serialize_project(project)
+ return None
+
def get_many_by_organizations(
self,
*,
diff --git a/src/sentry/projects/services/project/service.py b/src/sentry/projects/services/project/service.py
index a5d2928d1ca5b..a4626db47b8f6 100644
--- a/src/sentry/projects/services/project/service.py
+++ b/src/sentry/projects/services/project/service.py
@@ -59,6 +59,11 @@ def delete_option(self, *, project: RpcProject, key: str) -> None:
def get_by_id(self, *, organization_id: int, id: int) -> RpcProject | None:
pass
+ @regional_rpc_method(resolve=ByOrganizationId())
+ @abstractmethod
+ def get_by_slug(self, *, organization_id: int, slug: str) -> RpcProject | None:
+ pass
+
@regional_rpc_method(resolve=ByOrganizationId())
@abstractmethod
def serialize_many(
diff --git a/src/sentry/queue/routers.py b/src/sentry/queue/routers.py
new file mode 100644
index 0000000000000..b1ad6081ec9f5
--- /dev/null
+++ b/src/sentry/queue/routers.py
@@ -0,0 +1,140 @@
+import logging
+import random
+from collections.abc import Iterator, Mapping, Sequence
+from itertools import cycle
+from typing import Any, NamedTuple
+
+from django.conf import settings
+
+from sentry import options
+from sentry.celery import app
+from sentry.conf.types.celery import SplitQueueSize
+from sentry.utils.celery import build_queue_names
+
+logger = logging.getLogger(__name__)
+
+
+def _get_known_queues() -> set[str]:
+ return {c_queue.name for c_queue in app.conf.CELERY_QUEUES}
+
+
+def _validate_destinations(destinations: Sequence[str]) -> None:
+ for dest in destinations:
+ assert dest in _get_known_queues(), f"Queue {dest} in split queue config is not declared."
+
+
+class TaskRoute(NamedTuple):
+ default_queue: str
+ queues: Iterator[str]
+
+
+def _build_destination_names(default_queue: str, queue_size_conf: SplitQueueSize) -> Sequence[str]:
+ """
+ Validates the configurations and builds the list of queues to cycle through.
+
+ If no valid configuration is provided it returns an empty Sequence.
+ It is up to the callsite to decide what to do with that to properly route
+ messages.
+ """
+
+ known_queues = _get_known_queues()
+
+ assert (
+ default_queue in known_queues
+ ), f"Queue {default_queue} in split queue config is not declared."
+
+ assert queue_size_conf["in_use"] <= queue_size_conf["total"]
+ if queue_size_conf["in_use"] >= 2:
+ destinations = build_queue_names(default_queue, queue_size_conf["in_use"])
+ _validate_destinations(destinations)
+ return destinations
+ else:
+ logger.error(
+ "Invalid configuration for queue %s. In use is not greater than 1: %d. Fall back to source",
+ default_queue,
+ queue_size_conf["in_use"],
+ )
+ return []
+
+
+class SplitQueueTaskRouter:
+ """
+ Routes tasks to split queues.
+
+ As for `SplitQueueRouter` this is meant to spread the load of a queue
+ to a number of split queues.
+
+ The main difference is that this is a router used directly by Celery.
+ It is configured as the main router via the `CELERY_ROUTES` setting.
+ Every time a task is scheduled that does not define a queue this router
+ is used and it maps a task to a queue.
+
+ Split queues can be rolled out individually via options.
+ """
+
+ def __init__(self) -> None:
+ self.__task_routers = {}
+ for task, dest_config in settings.CELERY_SPLIT_QUEUE_TASK_ROUTES.items():
+ default_destination = dest_config["default_queue"]
+ destinations: Sequence[str] = []
+ if "queues_config" in dest_config:
+ destinations = _build_destination_names(
+ dest_config["default_queue"], dest_config["queues_config"]
+ )
+
+ if not destinations:
+ destinations = [dest_config["default_queue"]]
+
+ # It is critical to add a TaskRoute even if the configuration is invalid
+ # or if the setting does not contain queues spec. This is because
+ # the task, in this case does not define the queue name, so the router
+ # has to provide the default one.
+ self.__task_routers[task] = TaskRoute(default_destination, cycle(destinations))
+
+ def route_for_task(self, task: str, *args: Any, **kwargs: Any) -> Mapping[str, str] | None:
+ route = self.__task_routers.get(task)
+
+ if route is None:
+ return None
+
+ rollout_rate = options.get("celery_split_queue_task_rollout").get(task, 0.0)
+ if random.random() >= rollout_rate:
+ return {"queue": route.default_queue}
+
+ return {"queue": next(route.queues)}
+
+
+class SplitQueueRouter:
+ """
+ Returns the split queue to use for a Celery queue.
+ Split queues allow us to spread the load of a queue to multiple ones.
+ This takes in input a queue name and returns the split. It is supposed
+ to be used by the code that schedules the task.
+ Each split queue can be individually rolled out via options.
+ WARNING: Do not forget to configure your workers to listen to the
+ queues appropriately before you start routing messages.
+ """
+
+ def __init__(self) -> None:
+ self.__queue_routers = {}
+ for source, dest_config in settings.CELERY_SPLIT_QUEUE_ROUTES.items():
+ destinations = _build_destination_names(source, dest_config)
+ if destinations:
+ self.__queue_routers[source] = cycle(destinations)
+
+ def route_for_queue(self, queue: str) -> str:
+ rollout_rate = options.get("celery_split_queue_rollout").get(queue, 0.0)
+ if random.random() >= rollout_rate:
+ return queue
+
+ if queue in set(options.get("celery_split_queue_legacy_mode")):
+ # Use legacy route
+ # This router required to define the routing logic inside the
+ # settings file.
+ return settings.SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER.get(queue, lambda: queue)()
+ else:
+ router = self.__queue_routers.get(queue)
+ if router is not None:
+ return next(router)
+ else:
+ return queue
diff --git a/src/sentry/quotas/base.py b/src/sentry/quotas/base.py
index 4b00ff1496582..376a69acfa3a6 100644
--- a/src/sentry/quotas/base.py
+++ b/src/sentry/quotas/base.py
@@ -15,9 +15,11 @@
from sentry.utils.services import Service
if TYPE_CHECKING:
+ from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.models.projectkey import ProjectKey
from sentry.monitors.models import Monitor
+ from sentry.profiles.task import Profile
@unique
@@ -653,3 +655,11 @@ def update_monitor_slug(self, previous_slug: str, new_slug: str, project_id: int
"""
Updates a monitor seat assignment's slug.
"""
+
+ def should_emit_profile_duration_outcome(
+ self, organization: Organization, profile: Profile
+ ) -> bool:
+ """
+ Determines if the profile duration outcome should be emitted.
+ """
+ return True
diff --git a/src/sentry/receivers/features.py b/src/sentry/receivers/features.py
index c4183c0838991..7eb48ad997171 100644
--- a/src/sentry/receivers/features.py
+++ b/src/sentry/receivers/features.py
@@ -306,6 +306,7 @@ def record_alert_rule_created(
alert_rule_ui_component=None,
duplicate_rule=None,
wizard_v3=None,
+ query_type=None,
**kwargs,
):
# NOTE: This intentionally does not fire for the default issue alert rule
@@ -334,6 +335,7 @@ def record_alert_rule_created(
alert_rule_ui_component=alert_rule_ui_component,
duplicate_rule=duplicate_rule,
wizard_v3=wizard_v3,
+ query_type=query_type,
)
diff --git a/src/sentry/receivers/outbox/control.py b/src/sentry/receivers/outbox/control.py
index 46931fe263dbc..a9b7d9c3f7db4 100644
--- a/src/sentry/receivers/outbox/control.py
+++ b/src/sentry/receivers/outbox/control.py
@@ -24,7 +24,7 @@
from sentry.receivers.outbox import maybe_process_tombstone
from sentry.relocation.services.relocation_export.service import region_relocation_export_service
from sentry.sentry_apps.models.sentry_app import SentryApp
-from sentry.tasks.sentry_apps import clear_region_cache
+from sentry.sentry_apps.tasks.sentry_apps import clear_region_cache
logger = logging.getLogger(__name__)
diff --git a/src/sentry/receivers/sentry_apps.py b/src/sentry/receivers/sentry_apps.py
index 8bb125988d960..5a46a0878eeea 100644
--- a/src/sentry/receivers/sentry_apps.py
+++ b/src/sentry/receivers/sentry_apps.py
@@ -12,6 +12,7 @@
from sentry.models.team import Team
from sentry.sentry_apps.logic import consolidate_events
from sentry.sentry_apps.services.app import RpcSentryAppInstallation, app_service
+from sentry.sentry_apps.tasks.sentry_apps import build_comment_webhook, workflow_notification
from sentry.signals import (
comment_created,
comment_deleted,
@@ -22,7 +23,6 @@
issue_resolved,
issue_unresolved,
)
-from sentry.tasks.sentry_apps import build_comment_webhook, workflow_notification
from sentry.users.models.user import User
from sentry.users.services.user import RpcUser
diff --git a/src/sentry/relay/config/__init__.py b/src/sentry/relay/config/__init__.py
index ed7a4c6cce0fe..bd7c9a003de89 100644
--- a/src/sentry/relay/config/__init__.py
+++ b/src/sentry/relay/config/__init__.py
@@ -60,9 +60,6 @@
"organizations:session-replay-video-disabled",
"organizations:session-replay",
"organizations:standalone-span-ingestion",
- "organizations:transaction-name-mark-scrubbed-as-sanitized",
- "organizations:transaction-name-normalize",
- "organizations:user-feedback-ingest",
"projects:discard-transaction",
"projects:profiling-ingest-unsampled-profiles",
"projects:span-metrics-extraction",
diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py
index fa0b987964a8f..168f67824c316 100644
--- a/src/sentry/relay/config/metric_extraction.py
+++ b/src/sentry/relay/config/metric_extraction.py
@@ -258,7 +258,10 @@ def _get_widget_metric_specs(
widget_queries = (
DashboardWidgetQuery.objects.filter(
widget__dashboard__organization=project.organization,
- widget__widget_type=DashboardWidgetTypes.DISCOVER,
+ widget__widget_type__in=[
+ DashboardWidgetTypes.DISCOVER,
+ DashboardWidgetTypes.TRANSACTION_LIKE,
+ ],
)
.prefetch_related("dashboardwidgetqueryondemand_set", "widget")
.order_by("-widget__dashboard__last_visited", "widget__order")
diff --git a/src/sentry/relay/globalconfig.py b/src/sentry/relay/globalconfig.py
index d9d1e81c8a004..6e58750ec3bdf 100644
--- a/src/sentry/relay/globalconfig.py
+++ b/src/sentry/relay/globalconfig.py
@@ -7,7 +7,7 @@
MetricExtractionGroups,
global_metric_extraction_groups,
)
-from sentry.relay.types import GenericFiltersConfig
+from sentry.relay.types import GenericFiltersConfig, RuleCondition
from sentry.utils import metrics
# List of options to include in the global config.
@@ -28,11 +28,21 @@
]
+class SpanOpDefaultRule(TypedDict):
+ condition: RuleCondition
+ value: str
+
+
+class SpanOpDefaults(TypedDict):
+ rules: list[SpanOpDefaultRule]
+
+
class GlobalConfig(TypedDict, total=False):
measurements: MeasurementsConfig
aiModelCosts: AIModelCosts
metricExtraction: MetricExtractionGroups
filters: GenericFiltersConfig | None
+ spanOpDefaults: SpanOpDefaults
options: dict[str, Any]
@@ -43,6 +53,25 @@ def get_global_generic_filters() -> GenericFiltersConfig:
}
+def span_op_defaults() -> SpanOpDefaults:
+ return {
+ "rules": [
+ {
+ # If span.data[messaging.system] is set, use span.op "message":
+ "condition": {
+ "op": "not",
+ "inner": {
+ "op": "eq",
+ "name": "span.data.messaging\\.system",
+ "value": None,
+ },
+ },
+ "value": "message",
+ }
+ ]
+ }
+
+
@metrics.wraps("relay.globalconfig.get")
def get_global_config():
"""Return the global configuration for Relay."""
@@ -51,6 +80,7 @@ def get_global_config():
"measurements": get_measurements_config(),
"aiModelCosts": ai_model_costs_config(),
"metricExtraction": global_metric_extraction_groups(),
+ "spanOpDefaults": span_op_defaults(),
}
filters = get_global_generic_filters()
diff --git a/src/sentry/remote_config/README.md b/src/sentry/remote_config/README.md
deleted file mode 100644
index db945aa508a00..0000000000000
--- a/src/sentry/remote_config/README.md
+++ /dev/null
@@ -1 +0,0 @@
-# Remote Configuration Product
diff --git a/src/sentry/remote_config/docs/api.md b/src/sentry/remote_config/docs/api.md
deleted file mode 100644
index fc05ee85c44d6..0000000000000
--- a/src/sentry/remote_config/docs/api.md
+++ /dev/null
@@ -1,157 +0,0 @@
-# Configurations API
-
-Host: https://sentry.io/api/0
-
-**Authors.**
-
-@cmanallen
-
-## Configuration [/projects///configuration/]
-
-### Get Configuration [GET]
-
-Retrieve the project's configuration.
-
-**Attributes**
-
-| Column | Type | Description |
-| -------- | -------------- | --------------------------------------------- |
-| features | array[Feature] | Custom, user-defined configuration container. |
-| options | Option | Sentry SDK options container. |
-
-**Feature Object**
-
-| Field | Type | Description |
-| ----- | ------ | ---------------------------------- |
-| key | string | The name used to lookup a feature. |
-| value | any | A JSON value. |
-
-**Option Object**
-
-| Field | Type | Description |
-| ------------------ | ----- | --------------------------------------------------- |
-| sample_rate | float | Error sample rate. A numeric value between 0 and 1. |
-| traces_sample_rate | float | Trace sample rate. A numeric value between 0 and 1. |
-
-**If an existing configuration exists**
-
-- Response 200
-
- ```json
- {
- "data": {
- "features": [
- {
- "key": "hello",
- "value": "world"
- },
- {
- "key": "has_access",
- "value": true
- }
- ],
- "options": {
- "sample_rate": 1.0,
- "traces_sample_rate": 0.5
- }
- }
- }
- ```
-
-**If no existing configuration exists**
-
-- Response 404
-
-### Set Configuration [POST]
-
-Set the project's configuration.
-
-- Request
-
- ```json
- {
- "data": {
- "features": [
- {
- "key": "hello",
- "value": "world"
- },
- {
- "key": "has_access",
- "value": true
- }
- ],
- "options": {
- "sample_rate": 1.0,
- "traces_sample_rate": 0.5
- }
- }
- }
- ```
-
-- Response 201
-
- ```json
- {
- "data": {
- "features": [
- {
- "key": "hello",
- "value": "world"
- },
- {
- "key": "has_access",
- "value": true
- }
- ],
- "options": {
- "sample_rate": 1.0,
- "traces_sample_rate": 0.5
- }
- }
- }
- ```
-
-### Delete Configuration [DELETE]
-
-Delete the project's configuration.
-
-- Response 204
-
-## Configuration Proxy [/remote-config/projects//]
-
-Temporary configuration proxy resource.
-
-### Get Configuration [GET]
-
-Fetch a project's configuration. Responses should be proxied exactly to the SDK.
-
-- Response 200
-
- - Headers
-
- Cache-Control: public, max-age=3600
- Content-Type: application/json
- ETag: a7966bf58e23583c9a5a4059383ff850
-
- - Body
-
- ```json
- {
- "features": [
- {
- "key": "hello",
- "value": "world"
- },
- {
- "key": "has_access",
- "value": true
- }
- ],
- "options": {
- "sample_rate": 1.0,
- "traces_sample_rate": 0.5
- },
- "version": 1
- }
- ```
diff --git a/src/sentry/remote_config/docs/protocol.md b/src/sentry/remote_config/docs/protocol.md
deleted file mode 100644
index 30885911c3167..0000000000000
--- a/src/sentry/remote_config/docs/protocol.md
+++ /dev/null
@@ -1,106 +0,0 @@
-# Remote Configuration Protocol
-
-Host: https://o1300299.ingest.us.sentry.io
-
-**Authors.**
-
-@cmanallen
-
-## Configuration [/api//configuration/]
-
-### Get Configuration [GET]
-
-Retrieve a project's configuration.
-
-**Attributes**
-
-| Field | Type | Description |
-| -------- | -------------- | --------------------------------------------- |
-| features | array[Feature] | Custom, user-defined configuration container. |
-| options | Option | Sentry SDK options container. |
-| version | number | The version of the protocol. |
-
-**Feature Object**
-
-| Field | Type | Description |
-| ----- | ------ | ---------------------------------- |
-| key | string | The name used to lookup a feature. |
-| value | any | A JSON value. |
-
-**Option Object**
-
-| Field | Type | Description |
-| ------------------ | ----- | ------------------ |
-| sample_rate | float | Error sample rate. |
-| traces_sample_rate | float | Trace sample rate. |
-
-**Server ETag Matches**
-
-If the server's ETag matches the request's a 304 (NOT MODIFIED) response is returned.
-
-- Request
-
- - Headers
-
- Accept: application/json
- If-None-Match: 8832040536272351350
-
-- Response 304
-
- - Headers
-
- Cache-Control: public, max-age=60
- Content-Type: application/json
- ETag: 8832040536272351350
-
-**Server ETag Does Not Match or If-None-Match Omitted**
-
-If the server's ETag does not match the request's a 200 response is returned.
-
-- Request
-
- - Headers
-
- Accept: application/json
- If-None-Match: ABC
-
-- Response 200
-
- - Headers
-
- Cache-Control: public, max-age=60
- Content-Type: application/json
- ETag: 8832040536272351350
-
- - Body
-
- ```json
- {
- "features": [
- {
- "key": "hello",
- "value": "world"
- },
- {
- "key": "has_access",
- "value": true
- }
- ],
- "options": {
- "sample_rate": 1.0,
- "traces_sample_rate": 0.5
- },
- "version": 1
- }
- ```
-
-**No Configuration Exists for the Project**
-
-- Request
-
- - Headers
-
- Accept: application/json
- If-None-Match: ABC
-
-- Response 404
diff --git a/src/sentry/remote_config/endpoints.py b/src/sentry/remote_config/endpoints.py
deleted file mode 100644
index e2d8fe8c29730..0000000000000
--- a/src/sentry/remote_config/endpoints.py
+++ /dev/null
@@ -1,152 +0,0 @@
-import hashlib
-
-from django.contrib.auth.models import AnonymousUser
-from rest_framework import serializers
-from rest_framework.authentication import BasicAuthentication
-from rest_framework.request import Request
-from rest_framework.response import Response
-from rest_framework.serializers import Serializer
-
-from sentry import features
-from sentry.api.api_owners import ApiOwner
-from sentry.api.api_publish_status import ApiPublishStatus
-from sentry.api.authentication import AuthenticationSiloLimit
-from sentry.api.base import Endpoint, region_silo_endpoint
-from sentry.api.bases.project import ProjectEndpoint, ProjectEventPermission
-from sentry.api.permissions import RelayPermission
-from sentry.models.project import Project
-from sentry.remote_config.storage import make_api_backend, make_configuration_backend
-from sentry.silo.base import SiloMode
-from sentry.utils import json, metrics
-
-
-class OptionsValidator(Serializer):
- sample_rate = serializers.FloatField(max_value=1.0, min_value=0, required=True)
- traces_sample_rate = serializers.FloatField(max_value=1.0, min_value=0, required=True)
-
-
-class FeatureValidator(Serializer):
- key = serializers.CharField(required=True)
- value = serializers.JSONField(required=True, allow_null=True)
-
-
-class ConfigurationValidator(Serializer):
- id = serializers.UUIDField(read_only=True)
- features: serializers.ListSerializer = serializers.ListSerializer(
- child=FeatureValidator(), required=True
- )
- options = OptionsValidator(required=True)
-
-
-class ConfigurationContainerValidator(Serializer):
- data = ConfigurationValidator(required=True) # type: ignore[assignment]
-
-
-@region_silo_endpoint
-class ProjectConfigurationEndpoint(ProjectEndpoint):
- owner = ApiOwner.REMOTE_CONFIG
- permission_classes = (ProjectEventPermission,)
- publish_status = {
- "GET": ApiPublishStatus.EXPERIMENTAL,
- "POST": ApiPublishStatus.EXPERIMENTAL,
- "DELETE": ApiPublishStatus.EXPERIMENTAL,
- }
-
- def get(self, request: Request, project: Project) -> Response:
- """Get remote configuration from project options."""
- if not features.has(
- "organizations:remote-config", project.organization, actor=request.user
- ):
- return Response("Disabled", status=404)
-
- remote_config, source = make_api_backend(project).get()
- if remote_config is None:
- return Response("Not found.", status=404)
-
- return Response(
- {"data": remote_config},
- status=200,
- headers={"X-Sentry-Data-Source": source},
- )
-
- def post(self, request: Request, project: Project) -> Response:
- """Set remote configuration in project options."""
- if not features.has(
- "organizations:remote-config", project.organization, actor=request.user
- ):
- return Response("Disabled", status=404)
-
- validator = ConfigurationContainerValidator(data=request.data)
- if not validator.is_valid():
- return self.respond(validator.errors, status=400)
-
- result = validator.validated_data["data"]
-
- make_api_backend(project).set(result)
- metrics.incr("remote_config.configuration.write")
- return Response({"data": result}, status=201)
-
- def delete(self, request: Request, project: Project) -> Response:
- """Delete remote configuration from project options."""
- if not features.has(
- "organizations:remote-config", project.organization, actor=request.user
- ):
- return Response("Disabled", status=404)
-
- make_api_backend(project).pop()
- metrics.incr("remote_config.configuration.delete")
- return Response("", status=204)
-
-
-@AuthenticationSiloLimit(SiloMode.REGION)
-class RelayAuthentication(BasicAuthentication):
- """Same as default Relay authentication except without body signing."""
-
- def authenticate(self, request: Request):
- return (AnonymousUser(), None)
-
-
-class RemoteConfigRelayPermission(RelayPermission):
- def has_permission(self, request: Request, view: object) -> bool:
- # Relay has permission to do everything! Except the only thing we expose is a simple
- # read endpoint full of public data...
- return True
-
-
-@region_silo_endpoint
-class ProjectConfigurationProxyEndpoint(Endpoint):
- publish_status = {
- "GET": ApiPublishStatus.EXPERIMENTAL,
- }
- owner = ApiOwner.REMOTE_CONFIG
- authentication_classes = (RelayAuthentication,)
- permission_classes = (RemoteConfigRelayPermission,)
- enforce_rate_limit = False
-
- def get(self, request: Request, project_id: int) -> Response:
- metrics.incr("remote_config.configuration.requested")
-
- project = Project.objects.select_related("organization").get(pk=project_id)
- if not features.has("organizations:remote-config", project.organization, actor=None):
- metrics.incr("remote_config.configuration.flag_disabled")
- return Response("Disabled", status=404)
-
- result, source = make_configuration_backend(project).get()
- if result is None:
- metrics.incr("remote_config.configuration.not_found")
- return Response("Not found", status=404)
-
- result_str = json.dumps(result)
- metrics.incr("remote_config.configuration.returned")
- metrics.distribution("remote_config.configuration.size", value=len(result_str))
-
- # Emulating cache headers just because.
- return Response(
- result,
- status=200,
- headers={
- "Cache-Control": "public, max-age=3600",
- "ETag": hashlib.sha1(result_str.encode()).hexdigest(),
- "X-Sentry-Data-Source": source,
- },
- )
diff --git a/src/sentry/remote_config/storage.py b/src/sentry/remote_config/storage.py
deleted file mode 100644
index 86a74da327445..0000000000000
--- a/src/sentry/remote_config/storage.py
+++ /dev/null
@@ -1,162 +0,0 @@
-from io import BytesIO
-from typing import TypedDict
-
-from sentry import options
-from sentry.cache import default_cache
-from sentry.models.files.utils import get_storage
-from sentry.models.project import Project
-from sentry.utils import json, metrics
-
-JSONValue = str | int | float | bool | None | list["JSONValue"] | dict[str, "JSONValue"]
-
-
-class Options(TypedDict):
- sample_rate: float
- traces_sample_rate: float
-
-
-class Feature(TypedDict):
- key: str
- value: JSONValue
-
-
-class StorageFormat(TypedDict):
- features: list[Feature]
- options: Options
- version: int
-
-
-class APIFormat(TypedDict):
- features: list[Feature]
- options: Options
-
-
-class ConfigurationCache:
- def __init__(self, key: str) -> None:
- self.key = key
-
- def get(self) -> StorageFormat | None:
- cache_result = default_cache.get(self.key)
-
- if cache_result is None:
- metrics.incr("remote_config.configuration.cache_miss")
- else:
- metrics.incr("remote_config.configuration.cache_hit")
-
- return cache_result
-
- def set(self, value: StorageFormat) -> None:
- default_cache.set(self.key, value=value, timeout=None)
-
- def pop(self) -> None:
- try:
- default_cache.delete(self.key)
- except Exception:
- pass
-
-
-class ConfigurationStorage:
- def __init__(self, key: str) -> None:
- self.key = key
-
- @property
- def storage(self):
- return get_storage(self._make_storage_config())
-
- def get(self) -> StorageFormat | None:
- try:
- blob = self.storage.open(self.key)
- result = blob.read()
- blob.close()
- except Exception:
- return None
-
- if result is None:
- return None
- return json.loads(result)
-
- def set(self, value: StorageFormat) -> None:
- self.storage.save(self.key, BytesIO(json.dumps(value).encode()))
-
- def pop(self) -> None:
- try:
- self.storage.delete(self.key)
- except Exception:
- return None
-
- def _make_storage_config(self) -> dict | None:
- backend = options.get("configurations.storage.backend")
- if backend:
- return {
- "backend": backend,
- "options": options.get("configurations.storage.options"),
- }
- else:
- return None
-
-
-class ConfigurationBackend:
- def __init__(self, project: Project) -> None:
- self.project = project
- self.key = f"configurations/{self.project.id}/production"
-
- self.cache = ConfigurationCache(self.key)
- self.storage = ConfigurationStorage(self.key)
-
- def get(self) -> tuple[StorageFormat | None, str]:
- cache_result = self.cache.get()
- if cache_result is not None:
- return (cache_result, "cache")
-
- storage_result = self.storage.get()
- if storage_result:
- self.cache.set(storage_result)
-
- return (storage_result, "store")
-
- def set(self, value: StorageFormat) -> None:
- self.storage.set(value)
- self.cache.set(value)
-
- def pop(self) -> None:
- self.cache.pop()
- self.storage.pop()
-
-
-class APIBackendDecorator:
- def __init__(self, backend: ConfigurationBackend) -> None:
- self.driver = backend
-
- def get(self) -> tuple[APIFormat | None, str]:
- result, source = self.driver.get()
- return self._deserialize(result), source
-
- def set(self, value: APIFormat) -> None:
- self.driver.set(self._serialize(value))
-
- def pop(self) -> None:
- self.driver.pop()
-
- def _deserialize(self, result: StorageFormat | None) -> APIFormat | None:
- if result is None:
- return None
-
- return {
- "features": result["features"],
- "options": result["options"],
- }
-
- def _serialize(self, result: APIFormat) -> StorageFormat:
- return {
- "features": result["features"],
- "options": result["options"],
- "version": 1,
- }
-
-
-def make_configuration_backend(project: Project):
- return ConfigurationBackend(project)
-
-
-def make_api_backend(project: Project):
- return APIBackendDecorator(make_configuration_backend(project))
diff --git a/src/sentry/remote_subscriptions/consumers/result_consumer.py b/src/sentry/remote_subscriptions/consumers/result_consumer.py
index bffccf80e371e..6e7ddcdf99b6c 100644
--- a/src/sentry/remote_subscriptions/consumers/result_consumer.py
+++ b/src/sentry/remote_subscriptions/consumers/result_consumer.py
@@ -9,7 +9,7 @@
from arroyo.processing.strategies.abstract import ProcessingStrategy, ProcessingStrategyFactory
from arroyo.processing.strategies.commit import CommitOffsets
from arroyo.processing.strategies.run_task import RunTask
-from arroyo.types import BrokerValue, Commit, FilteredPayload, Message, Partition
+from arroyo.types import Commit, FilteredPayload, Message, Partition
from sentry.conf.types.kafka_definition import Topic, get_topic_codec
from sentry.remote_subscriptions.models import BaseRemoteSubscription
@@ -23,30 +23,12 @@
class ResultProcessor(abc.ABC, Generic[T, U]):
- def __init__(self):
- self.codec = get_topic_codec(self.topic_for_codec)
-
@property
@abc.abstractmethod
def subscription_model(self) -> type[U]:
pass
- @property
- @abc.abstractmethod
- def topic_for_codec(self) -> Topic:
- pass
-
- def __call__(self, message: Message[KafkaPayload | FilteredPayload]):
- assert not isinstance(message.payload, FilteredPayload)
- assert isinstance(message.value, BrokerValue)
-
- try:
- result = self.codec.decode(message.payload.value)
- except Exception:
- logger.exception(
- "Failed to decode message payload",
- extra={"payload": message.payload.value},
- )
+ def __call__(self, result: T):
try:
# TODO: Handle subscription not existing - we should remove the subscription from
# the remote system in that case.
@@ -74,18 +56,43 @@ def handle_result(self, subscription: U | None, result: T):
class ResultsStrategyFactory(ProcessingStrategyFactory[KafkaPayload], Generic[T, U]):
def __init__(self) -> None:
self.result_processor = self.result_processor_cls()
+ self.codec = get_topic_codec(self.topic_for_codec)
+
+ @property
+ @abc.abstractmethod
+ def topic_for_codec(self) -> Topic:
+ pass
@property
@abc.abstractmethod
def result_processor_cls(self) -> type[ResultProcessor[T, U]]:
pass
+ def decode_payload(self, payload: KafkaPayload | FilteredPayload) -> T | None:
+ assert not isinstance(payload, FilteredPayload)
+ try:
+ return self.codec.decode(payload.value)
+ except Exception:
+ logger.exception(
+ "Failed to decode message payload",
+ extra={"payload": payload.value},
+ )
+ return None
+
+ def process_single(self, message: Message[KafkaPayload | FilteredPayload]):
+ result = self.decode_payload(message.payload)
+ if result is not None:
+ self.result_processor(result)
+
+ def create_serial_worker(self, commit: Commit) -> ProcessingStrategy[KafkaPayload]:
+ return RunTask(
+ function=self.process_single,
+ next_step=CommitOffsets(commit),
+ )
+
def create_with_partitions(
self,
commit: Commit,
partitions: Mapping[Partition, int],
) -> ProcessingStrategy[KafkaPayload]:
- return RunTask(
- function=self.result_processor,
- next_step=CommitOffsets(commit),
- )
+ return self.create_serial_worker(commit)
diff --git a/src/sentry/replays/blueprints/api.md b/src/sentry/replays/blueprints/api.md
index 1e58fa793cca5..3df4cbaa5396c 100644
--- a/src/sentry/replays/blueprints/api.md
+++ b/src/sentry/replays/blueprints/api.md
@@ -244,98 +244,6 @@ Deletes a replay instance.
- Response 204
-## Replay Accessibility Issues [/projects///replays//accessibility-issues]
-
-This resource does not accept any URI parameters and is not paginated. Responses are ingested whole.
-
-### Fetch Replay Accessibility Issues [GET]
-
-- Parameters
-
- - timestamp (optional, number) - A UNIX timestamp (seconds since epoch) marking the last moment to render a replay for accessibility analysis.
-
-Retrieve a collection of accessibility issues.
-
-**Attributes**
-
-Issue Type:
-
-| Column | Type | Description |
-| --------- | ---------------------- | --------------------------------------------------- |
-| elements | array[IssueElement] | Array of elements matching the accessibility issue. |
-| help | string | - |
-| help_url | string | - |
-| id | string | - |
-| impact | Optional[enum[string]] | One of: 'minor', 'moderate', 'serious', 'critical' |
-| timestamp | number | - |
-
-IssueElement Type:
-
-| Column | Type | Description |
-| ------------ | ------------------------------ | --------------------------------------------------- |
-| alternatives | array[IssueElementAlternative] | Array of solutions which could solve the problem. |
-| element | string | Array of elements matching the accessibility issue. |
-| target | array[string] | Array of elements matching the accessibility issue. |
-
-IssueElementAlternative Type:
-
-| Column | Type | Description |
-| ------- | ------ | ------------------------------------- |
-| id | string | String ID of the accessibility issue. |
-| message | string | Message explaining the problem. |
-
-- Response 200
-
- - Headers
-
- - X-Hits=1
-
- - Body
-
- ```json
- {
- "data": [
- [
- {
- "elements": [
- {
- "alternatives": [
- {
- "id": "button-has-visible-text",
- "message": "Element does not have inner text that is visible to screen readers"
- },
- {
- "id": "aria-label",
- "message": "aria-label attribute does not exist or is empty"
- },
- {
- "id": "aria-labelledby",
- "message": "aria-labelledby attribute does not exist, references elements that do not exist or references elements that are empty"
- },
- {
- "id": "non-empty-title",
- "message": "Element has no title attribute"
- },
- {
- "id": "presentational-role",
- "message": "Element's default semantics were not overridden with role=\"none\" or role=\"presentation\""
- }
- ],
- "element": "",
- "target": ["button:nth-child(1)"]
- }
- ],
- "help_url": "https://dequeuniversity.com/rules/axe/4.8/button-name?application=playwright",
- "help": "Buttons must have discernible text",
- "id": "button-name",
- "impact": "critical",
- "timestamp": 1695967678108
- }
- ]
- ]
- }
- ```
-
## Replay Selectors [/organizations//replay-selectors/]
- Parameters
diff --git a/src/sentry/replays/consumers/recording_buffered.py b/src/sentry/replays/consumers/recording_buffered.py
index 05021dbe51f73..a2dd7982c1747 100644
--- a/src/sentry/replays/consumers/recording_buffered.py
+++ b/src/sentry/replays/consumers/recording_buffered.py
@@ -101,7 +101,7 @@ def cast_payload_from_bytes(x: bytes) -> Any:
class BufferCommitFailed(Exception):
- ...
+ pass
class RecordingBufferedStrategyFactory(ProcessingStrategyFactory[KafkaPayload]):
diff --git a/src/sentry/replays/endpoints/project_replay_accessibility_issues.py b/src/sentry/replays/endpoints/project_replay_accessibility_issues.py
deleted file mode 100644
index 0b347464e91a4..0000000000000
--- a/src/sentry/replays/endpoints/project_replay_accessibility_issues.py
+++ /dev/null
@@ -1,164 +0,0 @@
-from __future__ import annotations
-
-import logging
-import uuid
-from typing import Any
-
-import requests
-from rest_framework.exceptions import ParseError
-from rest_framework.request import Request
-from rest_framework.response import Response
-
-from sentry import features, options
-from sentry.api.api_owners import ApiOwner
-from sentry.api.api_publish_status import ApiPublishStatus
-from sentry.api.base import region_silo_endpoint
-from sentry.api.bases.project import ProjectEndpoint
-from sentry.models.project import Project
-from sentry.replays.lib.storage import make_recording_filename
-from sentry.replays.usecases.reader import (
- fetch_direct_storage_segments_meta,
- segment_row_to_storage_meta,
-)
-from sentry.replays.usecases.segment import query_segment_storage_meta_by_timestamp
-from sentry.types.ratelimit import RateLimit, RateLimitCategory
-from sentry.utils import metrics
-from sentry.utils.cursors import Cursor, CursorResult
-
-REFERRER = "replays.query.query_replay_clicks_dataset"
-
-logger = logging.getLogger()
-
-
-@region_silo_endpoint
-class ProjectReplayAccessibilityIssuesEndpoint(ProjectEndpoint):
- # Internal API maintenance decoration.
- owner = ApiOwner.REPLAY
- publish_status = {
- "GET": ApiPublishStatus.EXPERIMENTAL,
- }
-
- # Rate Limits
- enforce_rate_limit = True
- rate_limits = {
- "GET": {
- RateLimitCategory.IP: RateLimit(limit=5, window=1),
- RateLimitCategory.USER: RateLimit(limit=5, window=1),
- RateLimitCategory.ORGANIZATION: RateLimit(limit=5, window=1),
- }
- }
-
- def get(self, request: Request, project: Project, replay_id: str) -> Response:
- if not features.has(
- "organizations:session-replay", project.organization, actor=request.user
- ):
- return Response(status=404)
-
- if not features.has(
- "organizations:session-replay-accessibility-issues",
- project.organization,
- actor=request.user,
- ):
- metrics.incr("session-replay-accessibility-issues-flag-disabled")
- return Response(status=404)
-
- if options.get("organizations:session-replay-accessibility-issues-enabled") is False:
- metrics.incr("session-replay-accessibility-issues-option-disabled")
- return Response(status=404)
-
- try:
- replay_id = str(uuid.UUID(replay_id)).replace("-", "")
- except ValueError:
- return Response(status=404)
-
- timestamp_param = request.GET.get("timestamp", None)
- if timestamp_param is None:
- timestamp = None
- else:
- try:
- timestamp = float(timestamp_param)
- except TypeError:
- timestamp = None
- except ValueError:
- raise ParseError("Invalid timestamp value specified.")
-
- def data_fn(offset, limit):
- # Increment a counter for every call to the accessibility service.
- metrics.incr("session-replay-accessibility-issues-count")
-
- # We only support direct-storage. Filestore is deprecated and should be removed
- # from the driver.
- if timestamp is None:
- # If no timestamp is provided we render 5 segments by convention.
- segments = fetch_direct_storage_segments_meta(
- project.id,
- replay_id,
- offset,
- limit=5,
- )
- else:
- # If a timestamp was provided we fetch every segment that started prior to the
- # timestamp value.
- results = query_segment_storage_meta_by_timestamp(
- project.organization.id,
- project.id,
- replay_id,
- timestamp,
- )
- segments = [
- segment_row_to_storage_meta(project.id, replay_id, row)
- for row in results["data"]
- ]
-
- if len(segments) == 0:
- return {"meta": {"total": 0}, "data": []}
-
- # Make a POST request to the replay-analyzer service. The files will be downloaded
- # and evaluated on the remote system. The accessibility output is then redirected to
- # the client.
- return request_accessibility_issues(
- [make_recording_filename(segment) for segment in segments]
- )
-
- return self.paginate(
- request=request,
- paginator=ReplayAccessibilityPaginator(data_fn=data_fn),
- )
-
-
-class ReplayAccessibilityPaginator:
- """Replay Analyzer service paginator class."""
-
- def __init__(self, data_fn):
- self.data_fn = data_fn
-
- def get_result(self, limit, cursor=None):
- offset = cursor.offset if cursor is not None else 0
-
- data = self.data_fn(offset=offset, limit=limit)
-
- return CursorResult(
- data,
- hits=data.pop("meta")["total"],
- prev=Cursor(0, max(0, offset - limit), True, offset > 0),
- next=Cursor(0, max(0, offset + limit), False, False),
- )
-
-
-def request_accessibility_issues(filenames: list[str]) -> Any:
- try:
- response = requests.post(
- f"{options.get('replay.analyzer_service_url')}/api/0/analyze/accessibility",
- json={"data": {"filenames": filenames}},
- )
-
- content = response.content
- status_code = response.status_code
-
- if status_code == 201:
- return response.json()
- else:
- raise ValueError(f"An error occurred: {content.decode('utf-8')}")
- except Exception:
- logger.exception("replay accessibility analysis failed")
- raise ParseError("Could not analyze accessibility issues at this time.")
diff --git a/src/sentry/replays/endpoints/project_replay_recording_segment_details.py b/src/sentry/replays/endpoints/project_replay_recording_segment_details.py
index 74c4bccfe41a8..d10b64cd2f7f8 100644
--- a/src/sentry/replays/endpoints/project_replay_recording_segment_details.py
+++ b/src/sentry/replays/endpoints/project_replay_recording_segment_details.py
@@ -81,7 +81,7 @@ def get(self, request: Request, project, replay_id, segment_id) -> HttpResponseB
def download(self, segment: RecordingSegmentStorageMeta) -> StreamingHttpResponse:
with sentry_sdk.start_span(
op="download_segment",
- description="ProjectReplayRecordingSegmentDetailsEndpoint.download_segment",
+ name="ProjectReplayRecordingSegmentDetailsEndpoint.download_segment",
) as child_span:
segment_bytes = download_segment(segment, span=child_span)
segment_reader = BytesIO(segment_bytes)
diff --git a/src/sentry/replays/lib/http.py b/src/sentry/replays/lib/http.py
index 236e968e99825..83b2887ce0e37 100644
--- a/src/sentry/replays/lib/http.py
+++ b/src/sentry/replays/lib/http.py
@@ -4,11 +4,11 @@
class MalformedRangeHeader(Exception):
- ...
+ pass
class UnsatisfiableRange(Exception):
- ...
+ pass
class RangeProtocol(Protocol):
diff --git a/src/sentry/replays/lib/new_query/conditions.py b/src/sentry/replays/lib/new_query/conditions.py
index 55f51f56578e5..1d3db63a5a09d 100644
--- a/src/sentry/replays/lib/new_query/conditions.py
+++ b/src/sentry/replays/lib/new_query/conditions.py
@@ -20,7 +20,7 @@
from typing import Any, NoReturn, TypeVar
from uuid import UUID
-from snuba_sdk import And, Condition, Function, Identifier, Lambda, Op
+from snuba_sdk import And, Condition, Function, Identifier, Lambda, Op, Or
from snuba_sdk.expressions import Expression
from sentry.replays.lib.new_query.errors import OperatorNotSupported
@@ -211,21 +211,39 @@ class IPv4Scalar(GenericBase):
"""IPv4 scalar condition class."""
@staticmethod
- def visit_eq(expression: Expression, value: str) -> Condition:
+ def visit_eq(expression: Expression, value: str | None) -> Condition:
+ if value is None:
+ return Condition(Function("isNull", parameters=[expression]), Op.EQ, 1)
return Condition(expression, Op.EQ, Function("toIPv4", parameters=[value]))
@staticmethod
- def visit_neq(expression: Expression, value: str) -> Condition:
+ def visit_neq(expression: Expression, value: str | None) -> Condition:
+ if value is None:
+ return Condition(Function("isNull", parameters=[expression]), Op.EQ, 0)
return Condition(expression, Op.NEQ, Function("toIPv4", parameters=[value]))
@staticmethod
- def visit_in(expression: Expression, value: list[str]) -> Condition:
- values = [Function("toIPv4", parameters=[v]) for v in value]
+ def visit_in(expression: Expression, value_list: list[str | None]) -> Condition:
+ values = [Function("toIPv4", parameters=[v]) for v in value_list if v is not None]
+ if None in value_list:
+ return Or(
+ conditions=[
+ Condition(expression, Op.IN, values),
+ Condition(Function("isNull", parameters=[expression]), Op.EQ, 1),
+ ]
+ )
return Condition(expression, Op.IN, values)
@staticmethod
- def visit_not_in(expression: Expression, value: list[str]) -> Condition:
- values = [Function("toIPv4", parameters=[v]) for v in value]
+ def visit_not_in(expression: Expression, value_list: list[str | None]) -> Condition:
+ values = [Function("toIPv4", parameters=[v]) for v in value_list if v is not None]
+ if None in value_list:
+ return And(
+ conditions=[
+ Condition(expression, Op.NOT_IN, values),
+ Condition(Function("isNull", parameters=[expression]), Op.EQ, 0),
+ ]
+ )
return Condition(expression, Op.NOT_IN, values)
diff --git a/src/sentry/replays/lib/new_query/fields.py b/src/sentry/replays/lib/new_query/fields.py
index ccd05f9ce7f52..fc499fb9db5fe 100644
--- a/src/sentry/replays/lib/new_query/fields.py
+++ b/src/sentry/replays/lib/new_query/fields.py
@@ -154,6 +154,10 @@ class StringColumnField(ColumnField[str]):
"""String-type condition column field."""
+class NullableStringColumnField(ColumnField[str | None]):
+ """Null or string-type condition column field."""
+
+
class IntegerColumnField(ColumnField[int]):
"""Integer-type condition column field."""
diff --git a/src/sentry/replays/lib/new_query/parsers.py b/src/sentry/replays/lib/new_query/parsers.py
index 1406ff0b8b865..1b665392fec9e 100644
--- a/src/sentry/replays/lib/new_query/parsers.py
+++ b/src/sentry/replays/lib/new_query/parsers.py
@@ -26,8 +26,10 @@ def parse_str(value: str) -> str:
return value
-def parse_ipv4(value: str) -> str:
+def parse_ipv4(value: str) -> str | None:
"""Validates an IPv4 address"""
+ if value == "":
+ return None
try:
ipaddress.IPv4Address(value)
return value
diff --git a/src/sentry/replays/lib/new_query/utils.py b/src/sentry/replays/lib/new_query/utils.py
index 1458236a99a97..8e39201c1ed7b 100644
--- a/src/sentry/replays/lib/new_query/utils.py
+++ b/src/sentry/replays/lib/new_query/utils.py
@@ -4,6 +4,7 @@
from uuid import UUID
from snuba_sdk import Condition, Function, Op
+from snuba_sdk.conditions import And, Or
def to_uuid(value: UUID) -> Function:
@@ -29,7 +30,7 @@ def does_not_contain(condition: Condition) -> Condition:
# Work-around for https://github.com/getsentry/snuba-sdk/issues/115
-def translate_condition_to_function(condition: Condition) -> Function:
+def translate_condition_to_function(condition: Condition | And | Or) -> Function:
"""Transforms infix operations to prefix operations."""
if condition.op == Op.EQ:
return Function("equals", parameters=[condition.lhs, condition.rhs])
diff --git a/src/sentry/replays/usecases/ingest/__init__.py b/src/sentry/replays/usecases/ingest/__init__.py
index 137e832b4df85..95c5522060f12 100644
--- a/src/sentry/replays/usecases/ingest/__init__.py
+++ b/src/sentry/replays/usecases/ingest/__init__.py
@@ -74,7 +74,7 @@ def ingest_recording(
with sentry_sdk.scope.use_isolation_scope(isolation_scope):
with transaction.start_child(
op="replays.usecases.ingest.ingest_recording",
- description="ingest_recording",
+ name="ingest_recording",
):
message = RecordingIngestMessage(
replay_id=message_dict["replay_id"],
@@ -260,7 +260,7 @@ def recording_post_processor(
# Emit DOM search metadata to Clickhouse.
with transaction.start_child(
op="replays.usecases.ingest.parse_and_emit_replay_actions",
- description="parse_and_emit_replay_actions",
+ name="parse_and_emit_replay_actions",
):
project = Project.objects.get_from_cache(id=message.project_id)
parse_and_emit_replay_actions(
diff --git a/src/sentry/replays/usecases/query/conditions/aggregate.py b/src/sentry/replays/usecases/query/conditions/aggregate.py
index 7ab334b103148..ccbfd43b899b9 100644
--- a/src/sentry/replays/usecases/query/conditions/aggregate.py
+++ b/src/sentry/replays/usecases/query/conditions/aggregate.py
@@ -25,7 +25,7 @@
from uuid import UUID
-from snuba_sdk import Condition
+from snuba_sdk import And, Condition, Or
from snuba_sdk.expressions import Expression
from sentry.replays.lib.new_query.conditions import (
@@ -40,6 +40,14 @@
from sentry.replays.lib.new_query.utils import contains, does_not_contain
+def _nonempty_str(expression: Expression) -> Condition:
+ return StringScalar.visit_neq(expression, "")
+
+
+def _nonnull_ipv4(expression: Expression) -> Condition:
+ return IPv4Scalar.visit_neq(expression, None)
+
+
class SumOfIntegerIdScalar(GenericBase):
@staticmethod
def visit_eq(expression: Expression, value: int) -> Condition:
@@ -60,46 +68,76 @@ def visit_not_in(expression: Expression, value: list[int]) -> Condition:
class SumOfIPv4Scalar(GenericBase):
@staticmethod
- def visit_eq(expression: Expression, value: str) -> Condition:
+ def visit_eq(expression: Expression, value: str | None) -> Condition:
+ if value is None:
+ return does_not_contain(_nonnull_ipv4(expression))
return contains(IPv4Scalar.visit_eq(expression, value))
@staticmethod
- def visit_neq(expression: Expression, value: str) -> Condition:
+ def visit_neq(expression: Expression, value: str | None) -> Condition:
+ if value is None:
+ return contains(_nonnull_ipv4(expression))
return does_not_contain(IPv4Scalar.visit_eq(expression, value))
@staticmethod
- def visit_in(expression: Expression, value: list[str]) -> Condition:
- return contains(IPv4Scalar.visit_in(expression, value))
+ def visit_in(expression: Expression, value_list: list[str | None]) -> Condition:
+ nonempty_case = contains(
+ IPv4Scalar.visit_in(expression, [v for v in value_list if v is not None])
+ )
+ if None in value_list:
+ return Or(conditions=[SumOfIPv4Scalar.visit_eq(expression, None), nonempty_case])
+ return nonempty_case
@staticmethod
- def visit_not_in(expression: Expression, value: list[str]) -> Condition:
- return does_not_contain(IPv4Scalar.visit_in(expression, value))
+ def visit_not_in(expression: Expression, value_list: list[str | None]) -> Condition:
+ nonempty_case = does_not_contain(
+ IPv4Scalar.visit_in(expression, [v for v in value_list if v is not None])
+ )
+ if None in value_list:
+ return And(conditions=[SumOfIPv4Scalar.visit_neq(expression, None), nonempty_case])
+ return nonempty_case
class SumOfStringScalar(GenericBase):
@staticmethod
def visit_eq(expression: Expression, value: str) -> Condition:
+ if value == "":
+ return does_not_contain(_nonempty_str(expression))
return contains(StringScalar.visit_eq(expression, value))
@staticmethod
def visit_neq(expression: Expression, value: str) -> Condition:
+ if value == "":
+ return contains(_nonempty_str(expression))
return does_not_contain(StringScalar.visit_eq(expression, value))
@staticmethod
def visit_match(expression: Expression, value: str) -> Condition:
+ # Assumes this is only called on wildcard strings, so `value` is non-empty.
return contains(StringScalar.visit_match(expression, value))
@staticmethod
def visit_not_match(expression: Expression, value: str) -> Condition:
+ # Assumes this is only called on wildcard strings, so `value` is non-empty.
return does_not_contain(StringScalar.visit_match(expression, value))
@staticmethod
- def visit_in(expression: Expression, value: list[str]) -> Condition:
- return contains(StringScalar.visit_in(expression, value))
+ def visit_in(expression: Expression, value_list: list[str]) -> Condition:
+ nonempty_case = contains(
+ StringScalar.visit_in(expression, [v for v in value_list if v != ""])
+ )
+ if "" in value_list:
+ return Or(conditions=[SumOfStringScalar.visit_eq(expression, ""), nonempty_case])
+ return nonempty_case
@staticmethod
- def visit_not_in(expression: Expression, value: list[str]) -> Condition:
- return does_not_contain(StringScalar.visit_in(expression, value))
+ def visit_not_in(expression: Expression, value_list: list[str]) -> Condition:
+ nonempty_case = does_not_contain(
+ StringScalar.visit_in(expression, [v for v in value_list if v != ""])
+ )
+ if "" in value_list:
+ return And(conditions=[SumOfStringScalar.visit_neq(expression, ""), nonempty_case])
+ return nonempty_case
class SumOfStringArray(GenericBase):
diff --git a/src/sentry/replays/usecases/query/configs/aggregate.py b/src/sentry/replays/usecases/query/configs/aggregate.py
index 740a9e333528e..2a133d1972951 100644
--- a/src/sentry/replays/usecases/query/configs/aggregate.py
+++ b/src/sentry/replays/usecases/query/configs/aggregate.py
@@ -19,6 +19,7 @@
CountField,
FieldProtocol,
IntegerColumnField,
+ NullableStringColumnField,
StringColumnField,
SumField,
SumLengthField,
@@ -121,7 +122,7 @@ def array_string_field(column_name: str) -> StringColumnField:
"urls": array_string_field("urls"),
"user.email": string_field("user_email"),
"user.id": string_field("user_id"),
- "user.ip_address": StringColumnField("ip_address_v4", parse_ipv4, SumOfIPv4Scalar),
+ "user.ip_address": NullableStringColumnField("ip_address_v4", parse_ipv4, SumOfIPv4Scalar),
"user.username": string_field("user_name"),
"viewed_by_id": IntegerColumnField("viewed_by_id", parse_int, SumOfIntegerIdScalar),
"warning_ids": UUIDColumnField("warning_id", parse_uuid, SumOfUUIDScalar),
diff --git a/src/sentry/replays/usecases/query/configs/scalar.py b/src/sentry/replays/usecases/query/configs/scalar.py
index efb3a022589e0..ecacafd47cecf 100644
--- a/src/sentry/replays/usecases/query/configs/scalar.py
+++ b/src/sentry/replays/usecases/query/configs/scalar.py
@@ -7,14 +7,13 @@
from sentry.api.event_search import ParenExpression, SearchFilter
from sentry.replays.lib.new_query.conditions import (
- IPv4Scalar,
NonEmptyStringScalar,
StringArray,
StringScalar,
UUIDArray,
)
from sentry.replays.lib.new_query.fields import FieldProtocol, StringColumnField, UUIDColumnField
-from sentry.replays.lib.new_query.parsers import parse_ipv4, parse_str, parse_uuid
+from sentry.replays.lib.new_query.parsers import parse_str, parse_uuid
from sentry.replays.lib.selector.parse import parse_selector
from sentry.replays.usecases.query.conditions import (
ClickSelectorComposite,
@@ -63,10 +62,6 @@ def string_field(column_name: str) -> StringColumnField:
"error_ids": ComputedField(parse_uuid, ErrorIdScalar),
"trace_ids": UUIDColumnField("trace_ids", parse_uuid, UUIDArray),
"urls": StringColumnField("urls", parse_str, StringArray),
- "user.email": StringColumnField("user_email", parse_str, NonEmptyStringScalar),
- "user.id": StringColumnField("user_id", parse_str, NonEmptyStringScalar),
- "user.ip_address": StringColumnField("ip_address_v4", parse_ipv4, IPv4Scalar),
- "user.username": StringColumnField("user_name", parse_str, NonEmptyStringScalar),
}
# Aliases
@@ -74,7 +69,6 @@ def string_field(column_name: str) -> StringColumnField:
varying_search_config["trace_id"] = varying_search_config["trace_ids"]
varying_search_config["trace"] = varying_search_config["trace_ids"]
varying_search_config["url"] = varying_search_config["urls"]
-varying_search_config["user.ip"] = varying_search_config["user.ip_address"]
varying_search_config["*"] = TagField(query=TagScalar)
diff --git a/src/sentry/roles/__init__.py b/src/sentry/roles/__init__.py
index d079de62b831a..9786b2bb15776 100644
--- a/src/sentry/roles/__init__.py
+++ b/src/sentry/roles/__init__.py
@@ -17,5 +17,5 @@
get_choices = default_manager.get_choices
get_default = default_manager.get_default
get_top_dog = default_manager.get_top_dog
-with_scope = default_manager.with_scope
with_any_scope = default_manager.with_any_scope
+with_scope = default_manager.with_scope
diff --git a/src/sentry/rules/actions/integrations/create_ticket/utils.py b/src/sentry/rules/actions/integrations/create_ticket/utils.py
index 7f025a2aeafb3..4d9476c8cb377 100644
--- a/src/sentry/rules/actions/integrations/create_ticket/utils.py
+++ b/src/sentry/rules/actions/integrations/create_ticket/utils.py
@@ -125,6 +125,7 @@ def create_issue(event: GroupEvent, futures: Sequence[RuleFuture]) -> None:
"provider": provider,
"integration_id": integration.id,
"error_message": str(e),
+ "exception_type": type(e).__name__,
},
)
metrics.incr(
@@ -136,5 +137,4 @@ def create_issue(event: GroupEvent, futures: Sequence[RuleFuture]) -> None:
raise
- if not event.get_tag("sample_event") == "yes":
- create_link(integration, installation, event, response)
+ create_link(integration, installation, event, response)
diff --git a/src/sentry/rules/actions/notify_event_service.py b/src/sentry/rules/actions/notify_event_service.py
index 91e9079dd9095..4180216674610 100644
--- a/src/sentry/rules/actions/notify_event_service.py
+++ b/src/sentry/rules/actions/notify_event_service.py
@@ -7,7 +7,6 @@
from django import forms
from sentry.api.serializers import serialize
-from sentry.api.serializers.models.app_platform_event import AppPlatformEvent
from sentry.eventstore.models import GroupEvent
from sentry.incidents.endpoints.serializers.incident import IncidentSerializer
from sentry.incidents.models.alert_rule import AlertRuleTriggerAction
@@ -19,8 +18,9 @@
from sentry.rules.actions.base import EventAction
from sentry.rules.actions.services import PluginService
from sentry.rules.base import CallbackFuture
+from sentry.sentry_apps.api.serializers.app_platform_event import AppPlatformEvent
from sentry.sentry_apps.services.app import RpcSentryAppService, app_service
-from sentry.tasks.sentry_apps import notify_sentry_app
+from sentry.sentry_apps.tasks.sentry_apps import notify_sentry_app
from sentry.utils import json, metrics
from sentry.utils.forms import set_field_choices
diff --git a/src/sentry/rules/actions/sentry_apps/notify_event.py b/src/sentry/rules/actions/sentry_apps/notify_event.py
index 4fad36d35a71f..4ddb8e7c049e0 100644
--- a/src/sentry/rules/actions/sentry_apps/notify_event.py
+++ b/src/sentry/rules/actions/sentry_apps/notify_event.py
@@ -15,7 +15,7 @@
RpcSentryAppEventData,
app_service,
)
-from sentry.tasks.sentry_apps import notify_sentry_app
+from sentry.sentry_apps.tasks.sentry_apps import notify_sentry_app
ValidationError = serializers.ValidationError
diff --git a/src/sentry/rules/filters/latest_adopted_release_filter.py b/src/sentry/rules/filters/latest_adopted_release_filter.py
index a695653fd85d9..b354c082f310c 100644
--- a/src/sentry/rules/filters/latest_adopted_release_filter.py
+++ b/src/sentry/rules/filters/latest_adopted_release_filter.py
@@ -52,7 +52,7 @@ def clean_environment(self):
class LatestAdoptedReleaseFilter(EventFilter):
id = "sentry.rules.filters.latest_adopted_release_filter.LatestAdoptedReleaseFilter"
form_cls = LatestAdoptedReleaseForm
- label = "The {oldest_or_newest} release associated with the event's issue is {older_or_newer} than the latest release in {environment}"
+ label = "The {oldest_or_newest} adopted release associated with the event's issue is {older_or_newer} than the latest adopted release in {environment}"
form_fields = {
"oldest_or_newest": {"type": "choice", "choices": list(model_age_choices)},
diff --git a/src/sentry/rules/processing/delayed_processing.py b/src/sentry/rules/processing/delayed_processing.py
index e85b7b2f3b80f..c3fa9fead1c41 100644
--- a/src/sentry/rules/processing/delayed_processing.py
+++ b/src/sentry/rules/processing/delayed_processing.py
@@ -7,7 +7,6 @@
from itertools import islice
from typing import Any, DefaultDict, NamedTuple
-import sentry_sdk
from django.db.models import OuterRef, Subquery
from sentry import buffer, nodestore, options
@@ -359,12 +358,8 @@ def passes_comparison(
query_values = [
condition_group_results[unique_query][group_id] for unique_query in unique_queries
]
- except KeyError as exception:
- sentry_sdk.capture_exception(exception)
- logger.exception(
- "delayed_processing.missing_query_results",
- extra={"exception": exception, "group_id": group_id, "project_id": project_id},
- )
+ except KeyError:
+ metrics.incr("delayed_processing.missing_query_result")
return False
calculated_value = query_values[0]
diff --git a/src/sentry/runner/commands/backup.py b/src/sentry/runner/commands/backup.py
index 040078a5f08d4..40463dc6ac656 100644
--- a/src/sentry/runner/commands/backup.py
+++ b/src/sentry/runner/commands/backup.py
@@ -185,7 +185,7 @@ def get_printer(silent: bool, no_prompt: bool) -> Printer:
return InputOutputPrinter()
-def get_filter_arg(name: str, from_cmd_line: str, from_file: IO[str] | None) -> str:
+def get_filter_arg(name: str, from_cmd_line: str | None, from_file: IO[str] | None) -> str | None:
"""
Helper function to load `--filter-...`-style arguments from a file or the command line.
"""
@@ -199,10 +199,13 @@ def get_filter_arg(name: str, from_cmd_line: str, from_file: IO[str] | None) ->
return from_file.read() if from_file is not None else from_cmd_line
-def parse_filter_arg(filter_arg: str) -> set[str] | None:
+def parse_filter_arg(filter_arg: str | None) -> set[str] | None:
+ if filter_arg is None:
+ return None
+
filter_by = None
if filter_arg:
- filter_by = set(filter_arg.split(","))
+ filter_by = {arg.strip() for arg in filter_arg.split(",") if not arg.isspace()}
return filter_by
@@ -627,23 +630,27 @@ def import_() -> None:
type=click.File("rb"),
help=DECRYPT_WITH_GCP_KMS_HELP,
)
-@click.option(
- "--filter-usernames",
- default="",
- type=str,
- help="An optional comma-separated list of users to include. "
- "If this option is not set, all encountered users are imported.",
-)
@click.option(
"--findings-file",
type=click.File("w"),
required=False,
help=FINDINGS_FILE_HELP,
)
+@click.option(
+ "--filter-usernames",
+ default=None,
+ type=str,
+ required=False,
+ help="An optional comma-separated list of users to include. "
+ "If this option is not set, all encountered users are imported.",
+)
@click.option(
"--filter-usernames-file",
type=click.File("r"),
- help="Like `--filter-usernames`, except it pulls from a comma-separated file.",
+ required=False,
+ help="Like `--filter-usernames`, except it pulls from a comma-separated file. An empty file"
+ "equates to no usernames being compared. If you'd like to compare all usernames with no filter,"
+ "omit the `--filter-usernames[-file] flag instead.",
)
@click.option(
"--merge-users",
@@ -682,14 +689,13 @@ def import_users(
from sentry.backup.imports import import_in_user_scope
printer = get_printer(silent=silent, no_prompt=no_prompt)
+ user_filter_arg = get_filter_arg("filter-usernames", filter_usernames, filter_usernames_file)
with write_import_findings(findings_file, printer):
import_in_user_scope(
src,
decryptor=get_decryptor_from_flags(decrypt_with, decrypt_with_gcp_kms),
flags=ImportFlags(merge_users=merge_users),
- user_filter=parse_filter_arg(
- get_filter_arg("filter-usernames", filter_usernames, filter_usernames_file)
- ),
+ user_filter=parse_filter_arg(user_filter_arg),
printer=printer,
)
@@ -708,7 +714,7 @@ def import_users(
)
@click.option(
"--filter-org-slugs",
- default="",
+ default=None,
type=str,
help="An optional comma-separated list of organization slugs to include. "
"If this option is not set, all encountered organizations are imported. "
@@ -918,14 +924,16 @@ def export() -> None:
)
@click.option(
"--filter-usernames",
- default="",
+ default=None,
type=str,
+ required=False,
help="An optional comma-separated list of users to include. "
"If this option is not set, all encountered users are imported.",
)
@click.option(
"--filter-usernames-file",
type=click.File("r"),
+ required=False,
help="Like `--filter-usernames`, except it pulls from a comma-separated file.",
)
@click.option(
@@ -971,14 +979,13 @@ def export_users(
from sentry.backup.exports import export_in_user_scope
printer = get_printer(silent=silent, no_prompt=no_prompt)
+ user_filter_arg = get_filter_arg("filter-usernames", filter_usernames, filter_usernames_file)
with write_export_findings(findings_file, printer):
export_in_user_scope(
dest,
encryptor=get_encryptor_from_flags(encrypt_with, encrypt_with_gcp_kms),
indent=indent,
- user_filter=parse_filter_arg(
- get_filter_arg("filter-usernames", filter_usernames, filter_usernames_file)
- ),
+ user_filter=parse_filter_arg(user_filter_arg),
printer=printer,
)
@@ -997,7 +1004,7 @@ def export_users(
)
@click.option(
"--filter-org-slugs",
- default="",
+ default=None,
type=str,
help="An optional comma-separated list of organization slugs to include. "
"If this option is not set, all encountered organizations are exported. "
diff --git a/src/sentry/runner/commands/configoptions.py b/src/sentry/runner/commands/configoptions.py
index 74fbb471d56d4..62ef19abff733 100644
--- a/src/sentry/runner/commands/configoptions.py
+++ b/src/sentry/runner/commands/configoptions.py
@@ -126,7 +126,7 @@ def configoptions(ctx: click.Context, dry_run: bool, file: str | None, hide_drif
drifted_options = set()
invalid_options = set()
- presenter_delegator = PresenterDelegator("options-automator")
+ presenter_delegator = PresenterDelegator("options-automator", dry_run=dry_run)
ctx.obj["presenter_delegator"] = presenter_delegator
for key, value in options_to_update.items():
diff --git a/src/sentry/runner/commands/presenters/audit_log_presenter.py b/src/sentry/runner/commands/presenters/audit_log_presenter.py
new file mode 100644
index 0000000000000..184db5480fd45
--- /dev/null
+++ b/src/sentry/runner/commands/presenters/audit_log_presenter.py
@@ -0,0 +1,53 @@
+import itertools
+import logging
+from datetime import datetime, timezone
+
+from sentry import options
+from sentry.flags.providers import FlagAuditLogItem, handle_flag_pole_event_internal
+from sentry.runner.commands.presenters.webhookpresenter import WebhookPresenter
+
+logger = logging.getLogger()
+
+
+class AuditLogPresenter(WebhookPresenter):
+ def __init__(self, source: str, dry_run: bool = False) -> None:
+ self.dry_run = dry_run
+ super().__init__(source)
+
+ @staticmethod
+ def is_webhook_enabled() -> bool:
+ return (
+ options.get("flags:options-audit-log-is-enabled") is True
+ and options.get("flags:options-audit-log-organization-id") is not None
+ )
+
+ def flush(self) -> None:
+ if self.dry_run:
+ logger.warning("Dry run. Skipping audit-log process.")
+ return None
+
+ if not self.is_webhook_enabled():
+ logger.warning("Options audit log webhook is disabled.")
+ return None
+
+ items = self._create_audit_log_items()
+ handle_flag_pole_event_internal(
+ items, organization_id=options.get("flags:options-audit-log-organization-id")
+ )
+
+ def _create_audit_log_items(self) -> list[FlagAuditLogItem]:
+ return [
+ {
+ "action": action,
+ "created_at": datetime.now(tz=timezone.utc),
+ "created_by": "internal",
+ "flag": flag,
+ "tags": tags,
+ }
+ for flag, action, tags in itertools.chain(
+ ((flag, "created", {"value": v}) for flag, v in self.set_options),
+ ((flag, "deleted", {}) for flag in self.unset_options),
+ ((flag, "updated", {"value": v}) for flag, _, v in self.updated_options),
+ ((flag, "updated", {}) for flag, _ in self.drifted_options),
+ )
+ ]
diff --git a/src/sentry/runner/commands/presenters/presenterdelegator.py b/src/sentry/runner/commands/presenters/presenterdelegator.py
index 18161346b4b90..7071ff0c9eab7 100644
--- a/src/sentry/runner/commands/presenters/presenterdelegator.py
+++ b/src/sentry/runner/commands/presenters/presenterdelegator.py
@@ -5,17 +5,23 @@
class PresenterDelegator:
- def __init__(self, source: str) -> None:
+ def __init__(self, source: str, dry_run: bool) -> None:
+ from sentry.runner.commands.presenters.audit_log_presenter import AuditLogPresenter
+
self._consolepresenter = ConsolePresenter()
self._slackpresenter = None
if WebhookPresenter.is_webhook_enabled():
self._slackpresenter = WebhookPresenter(source)
+ if AuditLogPresenter.is_webhook_enabled():
+ self._auditlogpresenter = AuditLogPresenter(source, dry_run)
def __getattr__(self, attr: str) -> Any:
def wrapper(*args: Any, **kwargs: Any) -> None:
getattr(self._consolepresenter, attr)(*args, **kwargs)
if self._slackpresenter:
getattr(self._slackpresenter, attr)(*args, **kwargs)
+ if self._auditlogpresenter:
+ getattr(self._auditlogpresenter, attr)(*args, **kwargs)
return wrapper
diff --git a/src/sentry/runner/initializer.py b/src/sentry/runner/initializer.py
index 80d46552305cf..d94b8433d1fb0 100644
--- a/src/sentry/runner/initializer.py
+++ b/src/sentry/runner/initializer.py
@@ -203,15 +203,6 @@ def bootstrap_options(settings: Any, config: str | None = None) -> None:
# these will be validated later after bootstrapping
for k, v in options.items():
settings.SENTRY_OPTIONS[k] = v
- # If SENTRY_URL_PREFIX is used in config, show deprecation warning and
- # set the newer SENTRY_OPTIONS['system.url-prefix']. Needs to be here
- # to check from the config file directly before the django setup is done.
- # TODO: delete when SENTRY_URL_PREFIX is removed
- if k == "SENTRY_URL_PREFIX":
- warnings.warn(
- DeprecatedSettingWarning("SENTRY_URL_PREFIX", "SENTRY_OPTIONS['system.url-prefix']")
- )
- settings.SENTRY_OPTIONS["system.url-prefix"] = v
# Now go back through all of SENTRY_OPTIONS and promote
# back into settings. This catches the case when values are defined
@@ -389,6 +380,8 @@ def initialize_app(config: dict[str, Any], skip_service_validation: bool = False
setup_services(validate=not skip_service_validation)
+ import_grouptype()
+
from django.utils import timezone
from sentry.app import env
@@ -580,13 +573,6 @@ def apply_legacy_settings(settings: Any) -> None:
# option.)
settings.SENTRY_REDIS_OPTIONS = options.get("redis.clusters")["default"]
- if not hasattr(settings, "SENTRY_URL_PREFIX"):
- url_prefix = options.get("system.url-prefix", silent=True)
- if not url_prefix:
- # HACK: We need to have some value here for backwards compatibility
- url_prefix = "http://sentry.example.com"
- settings.SENTRY_URL_PREFIX = url_prefix
-
if settings.TIME_ZONE != "UTC":
# non-UTC timezones are not supported
show_big_error("TIME_ZONE should be set to UTC")
@@ -711,3 +697,9 @@ def validate_outbox_config() -> None:
for outbox_name in settings.SENTRY_OUTBOX_MODELS["REGION"]:
RegionOutboxBase.from_outbox_name(outbox_name)
+
+
+def import_grouptype() -> None:
+ from sentry.issues.grouptype import import_grouptype
+
+ import_grouptype()
diff --git a/src/sentry/scim/endpoints/teams.py b/src/sentry/scim/endpoints/teams.py
index 28dedbdb04445..195b9fab0948d 100644
--- a/src/sentry/scim/endpoints/teams.py
+++ b/src/sentry/scim/endpoints/teams.py
@@ -302,6 +302,7 @@ class OrganizationSCIMTeamDetails(SCIMEndpoint, TeamDetailsEndpoint):
"PATCH": ApiPublishStatus.PUBLIC,
}
permission_classes = (OrganizationSCIMTeamPermission,)
+ _allow_idp_changes = True
def convert_args(
self, request: Request, organization_id_or_slug: int | str, team_id, *args, **kwargs
diff --git a/src/sentry/search/eap/columns.py b/src/sentry/search/eap/columns.py
new file mode 100644
index 0000000000000..1a690a4b1a101
--- /dev/null
+++ b/src/sentry/search/eap/columns.py
@@ -0,0 +1,279 @@
+from collections.abc import Callable
+from dataclasses import dataclass
+from typing import Any
+
+from sentry_protos.snuba.v1.trace_item_attribute_pb2 import (
+ AttributeAggregation,
+ AttributeKey,
+ Function,
+ VirtualColumnContext,
+)
+
+from sentry.exceptions import InvalidSearchQuery
+from sentry.search.eap import constants
+from sentry.search.events.types import SnubaParams
+from sentry.utils.validators import is_event_id, is_span_id
+
+
+@dataclass(frozen=True)
+class ResolvedColumn:
+ # The alias for this column
+ public_alias: str # `p95() as foo` has the public alias `foo` and `p95()` has the public alias `p95()`
+ # The internal rpc alias for this column
+ internal_name: str | Function.ValueType
+ # The public type for this column
+ search_type: str
+ # The internal rpc type for this column, optional as it can mostly be inferred from search_type
+ internal_type: AttributeKey.Type.ValueType | None = None
+ # Only for aggregates, we only support functions with 1 argument right now
+ argument: AttributeKey | None = None
+ # Processor is the function run in the post process step to transform a row into the final result
+ processor: Callable[[Any], Any] | None = None
+ # Validator to check if the value in a query is correct
+ validator: Callable[[Any], bool] | None = None
+
+ def process_column(row: Any) -> None:
+ """Pull the column from row, then process it and mutate it"""
+ raise NotImplementedError()
+
+ def validate(self, value: Any) -> None:
+ if self.validator is not None:
+ if not self.validator(value):
+ raise InvalidSearchQuery(f"{value} is an invalid value for {self.public_alias}")
+
+ @property
+ def proto_definition(self) -> AttributeAggregation | AttributeKey:
+ """The definition of this function as needed by the RPC"""
+ if isinstance(self.internal_name, Function.ValueType):
+ return AttributeAggregation(
+ aggregate=self.internal_name,
+ key=self.argument,
+ label=self.public_alias,
+ )
+ else:
+ return AttributeKey(
+ name=self.internal_name,
+ type=self.internal_type
+ if self.internal_type is not None
+ else constants.TYPE_MAP[self.search_type],
+ )
+
+
+@dataclass
+class ArgumentDefinition:
+ argument_type: str | None = None
+ # The public alias for the default arg, the SearchResolver will resolve this value
+ default_arg: str | None = None
+ # Whether this argument is completely ignored, used for `count()`
+ ignored: bool = False
+
+
+@dataclass
+class FunctionDefinition:
+ internal_function: Function.ValueType
+ # the search_type the argument should be
+ arguments: list[ArgumentDefinition]
+ # The public type for this column
+ search_type: str
+
+ @property
+ def required_arguments(self) -> list[ArgumentDefinition]:
+ return [arg for arg in self.arguments if arg.default_arg is None and not arg.ignored]
+
+
+SPAN_COLUMN_DEFINITIONS = {
+ column.public_alias: column
+ for column in [
+ ResolvedColumn(
+ public_alias="id",
+ internal_name="span_id",
+ search_type="string",
+ validator=is_span_id,
+ ),
+ ResolvedColumn(
+ public_alias="organization.id", internal_name="organization_id", search_type="string"
+ ),
+ ResolvedColumn(
+ public_alias="span.action",
+ internal_name="action",
+ search_type="string",
+ ),
+ ResolvedColumn(
+ public_alias="span.description",
+ internal_name="name",
+ search_type="string",
+ ),
+ ResolvedColumn(
+ public_alias="description",
+ internal_name="name",
+ search_type="string",
+ ),
+ # Message maps to description, this is to allow wildcard searching
+ ResolvedColumn(
+ public_alias="message",
+ internal_name="name",
+ search_type="string",
+ ),
+ ResolvedColumn(
+ public_alias="span.domain", internal_name="attr_str[domain]", search_type="string"
+ ),
+ ResolvedColumn(
+ public_alias="span.group", internal_name="attr_str[group]", search_type="string"
+ ),
+ ResolvedColumn(public_alias="span.op", internal_name="attr_str[op]", search_type="string"),
+ ResolvedColumn(
+ public_alias="span.category", internal_name="attr_str[category]", search_type="string"
+ ),
+ ResolvedColumn(
+ public_alias="span.self_time", internal_name="exclusive_time_ms", search_type="duration"
+ ),
+ ResolvedColumn(
+ public_alias="span.duration", internal_name="duration_ms", search_type="duration"
+ ),
+ ResolvedColumn(
+ public_alias="span.status", internal_name="attr_str[status]", search_type="string"
+ ),
+ ResolvedColumn(
+ public_alias="trace",
+ internal_name="trace_id",
+ search_type="string",
+ validator=is_event_id,
+ ),
+ ResolvedColumn(
+ public_alias="messaging.destination.name",
+ internal_name="attr_str[messaging.destination.name]",
+ search_type="string",
+ ),
+ ResolvedColumn(
+ public_alias="messaging.message.id",
+ internal_name="attr_str[messaging.message.id]",
+ search_type="string",
+ ),
+ ResolvedColumn(
+ public_alias="span.status_code",
+ internal_name="attr_str[status_code]",
+ search_type="string",
+ ),
+ ResolvedColumn(
+ public_alias="replay.id", internal_name="attr_str[replay_id]", search_type="string"
+ ),
+ ResolvedColumn(
+ public_alias="span.ai.pipeline.group",
+ internal_name="attr_str[ai_pipeline_group]",
+ search_type="string",
+ ),
+ ResolvedColumn(
+ public_alias="trace.status",
+ internal_name="attr_str[trace.status]",
+ search_type="string",
+ ),
+ ResolvedColumn(
+ public_alias="browser.name",
+ internal_name="attr_str[browser.name]",
+ search_type="string",
+ ),
+ ResolvedColumn(
+ public_alias="ai.total_cost",
+ internal_name="attr_num[ai.total_cost]",
+ search_type="number",
+ ),
+ ResolvedColumn(
+ public_alias="ai.total_tokens.used",
+ internal_name="attr_num[ai_total_tokens_used]",
+ search_type="number",
+ ),
+ ResolvedColumn(
+ public_alias="project",
+ internal_name="project_id",
+ internal_type=constants.INT,
+ search_type="string",
+ ),
+ ResolvedColumn(
+ public_alias="project.slug",
+ internal_name="project_id",
+ search_type="string",
+ internal_type=constants.INT,
+ ),
+ ]
+}
+
+
+def project_context_constructor(column_name: str) -> Callable[[SnubaParams], VirtualColumnContext]:
+ def context_constructor(params: SnubaParams) -> VirtualColumnContext:
+ return VirtualColumnContext(
+ from_column_name="project_id",
+ to_column_name=column_name,
+ value_map={
+ str(project_id): project_name
+ for project_id, project_name in params.project_id_map.items()
+ },
+ )
+
+ return context_constructor
+
+
+VIRTUAL_CONTEXTS = {
+ "project": project_context_constructor("project"),
+ "project.slug": project_context_constructor("project.slug"),
+}
+
+
+SPAN_FUNCTION_DEFINITIONS = {
+ "sum": FunctionDefinition(
+ internal_function=Function.FUNCTION_SUM,
+ search_type="duration",
+ arguments=[ArgumentDefinition(argument_type="duration", default_arg="span.duration")],
+ ),
+ "avg": FunctionDefinition(
+ internal_function=Function.FUNCTION_AVERAGE,
+ search_type="duration",
+ arguments=[ArgumentDefinition(argument_type="duration", default_arg="span.duration")],
+ ),
+ "count": FunctionDefinition(
+ internal_function=Function.FUNCTION_COUNT,
+ search_type="number",
+ arguments=[ArgumentDefinition(argument_type="duration", default_arg="span.duration")],
+ ),
+ "p50": FunctionDefinition(
+ internal_function=Function.FUNCTION_P50,
+ search_type="duration",
+ arguments=[ArgumentDefinition(argument_type="duration", default_arg="span.duration")],
+ ),
+ "p90": FunctionDefinition(
+ internal_function=Function.FUNCTION_P90,
+ search_type="duration",
+ arguments=[ArgumentDefinition(argument_type="duration", default_arg="span.duration")],
+ ),
+ "p95": FunctionDefinition(
+ internal_function=Function.FUNCTION_P95,
+ search_type="duration",
+ arguments=[ArgumentDefinition(argument_type="duration", default_arg="span.duration")],
+ ),
+ "p99": FunctionDefinition(
+ internal_function=Function.FUNCTION_P99,
+ search_type="duration",
+ arguments=[ArgumentDefinition(argument_type="duration", default_arg="span.duration")],
+ ),
+ "max": FunctionDefinition(
+ internal_function=Function.FUNCTION_MAX,
+ search_type="duration",
+ arguments=[ArgumentDefinition(argument_type="duration", default_arg="span.duration")],
+ ),
+ "min": FunctionDefinition(
+ internal_function=Function.FUNCTION_MIN,
+ search_type="duration",
+ arguments=[ArgumentDefinition(argument_type="duration", default_arg="span.duration")],
+ ),
+ "count_unique": FunctionDefinition(
+ internal_function=Function.FUNCTION_UNIQ,
+ search_type="duration",
+ arguments=[
+ ArgumentDefinition(
+ argument_type="string",
+ )
+ ],
+ ),
+}
+
+
+Processors: dict[str, Callable[[Any], Any]] = {}
diff --git a/src/sentry/search/eap/constants.py b/src/sentry/search/eap/constants.py
new file mode 100644
index 0000000000000..7d61687b84845
--- /dev/null
+++ b/src/sentry/search/eap/constants.py
@@ -0,0 +1,28 @@
+from sentry_protos.snuba.v1.trace_item_attribute_pb2 import AttributeKey
+from sentry_protos.snuba.v1.trace_item_filter_pb2 import ComparisonFilter
+
+OPERATOR_MAP = {
+ "=": ComparisonFilter.OP_EQUALS,
+ "!=": ComparisonFilter.OP_NOT_EQUALS,
+ "IN": ComparisonFilter.OP_IN,
+ "NOT IN": ComparisonFilter.OP_NOT_IN,
+ ">": ComparisonFilter.OP_GREATER_THAN,
+ "<": ComparisonFilter.OP_LESS_THAN,
+ ">=": ComparisonFilter.OP_GREATER_THAN_OR_EQUALS,
+ "<=": ComparisonFilter.OP_LESS_THAN_OR_EQUALS,
+}
+IN_OPERATORS = ["IN", "NOT IN"]
+
+STRING = AttributeKey.TYPE_STRING
+BOOLEAN = AttributeKey.TYPE_BOOLEAN
+FLOAT = AttributeKey.TYPE_FLOAT
+INT = AttributeKey.TYPE_INT
+
+# TODO: we need a datetime type
+# Maps search types back to types for the proto
+TYPE_MAP = {
+ # TODO: need to update these to float once the proto supports float arrays
+ "number": INT,
+ "duration": INT,
+ "string": STRING,
+}
diff --git a/src/sentry/search/eap/spans.py b/src/sentry/search/eap/spans.py
new file mode 100644
index 0000000000000..efd4c6dc8c35f
--- /dev/null
+++ b/src/sentry/search/eap/spans.py
@@ -0,0 +1,375 @@
+from collections.abc import Sequence
+from dataclasses import dataclass, field
+from datetime import datetime
+from re import Match
+from typing import cast
+
+from parsimonious.exceptions import ParseError
+from sentry_protos.snuba.v1.trace_item_attribute_pb2 import (
+ AttributeKey,
+ AttributeValue,
+ IntArray,
+ StrArray,
+ VirtualColumnContext,
+)
+from sentry_protos.snuba.v1.trace_item_filter_pb2 import (
+ AndFilter,
+ ComparisonFilter,
+ OrFilter,
+ TraceItemFilter,
+)
+
+from sentry.api import event_search
+from sentry.exceptions import InvalidSearchQuery
+from sentry.search.eap import constants
+from sentry.search.eap.columns import (
+ SPAN_COLUMN_DEFINITIONS,
+ SPAN_FUNCTION_DEFINITIONS,
+ VIRTUAL_CONTEXTS,
+ ResolvedColumn,
+)
+from sentry.search.eap.types import SearchResolverConfig
+from sentry.search.events import constants as qb_constants
+from sentry.search.events import fields
+from sentry.search.events import filter as event_filter
+from sentry.search.events.types import SnubaParams
+
+
+@dataclass(frozen=True)
+class SearchResolver:
+ """The only attributes are things we want to cache and params
+
+ Please do not store any state on the SearchResolver
+ """
+
+ params: SnubaParams
+ config: SearchResolverConfig
+ resolved_columns: dict[str, ResolvedColumn] = field(default_factory=dict)
+
+ def resolve_query(self, querystring: str) -> TraceItemFilter | None:
+ """Given a query string in the public search syntax eg. `span.description:foo` construct the TraceItemFilter"""
+ try:
+ parsed_terms = event_search.parse_search_query(
+ querystring,
+ params=self.params.filter_params,
+ get_field_type=self.get_field_type,
+ )
+ except ParseError as e:
+ if e.expr is not None:
+ raise InvalidSearchQuery(f"Parse error: {e.expr.name} (column {e.column():d})")
+ else:
+ raise InvalidSearchQuery(f"Parse error for: {querystring}")
+
+ if any(
+ isinstance(term, event_search.ParenExpression)
+ or event_search.SearchBoolean.is_operator(term)
+ for term in parsed_terms
+ ):
+ return self._resolve_boolean_conditions(parsed_terms)
+ else:
+ return self._resolve_terms(parsed_terms)
+
+ def _resolve_boolean_conditions(
+ self, terms: event_filter.ParsedTerms
+ ) -> TraceItemFilter | None:
+ if len(terms) == 1:
+ if isinstance(terms[0], event_search.ParenExpression):
+ return self._resolve_boolean_conditions(terms[0].children)
+ elif isinstance(terms[0], event_search.SearchFilter):
+ return self._resolve_terms([cast(event_search.SearchFilter, terms[0])])
+ else:
+ raise NotImplementedError("Haven't handled all the search expressions yet")
+
+ # Filter out any ANDs since we can assume anything without an OR is an AND. Also do some
+ # basic sanitization of the query: can't have two operators next to each other, and can't
+ # start or end a query with an operator.
+ previous_term: event_filter.ParsedTerm | None = None
+ new_terms = []
+ term: event_filter.ParsedTerm | None = None
+ for term in terms:
+ if previous_term:
+ if event_search.SearchBoolean.is_operator(
+ previous_term
+ ) and event_search.SearchBoolean.is_operator(term):
+ raise InvalidSearchQuery(
+ f"Missing condition in between two condition operators: '{previous_term} {term}'"
+ )
+ else:
+ if event_search.SearchBoolean.is_operator(term):
+ raise InvalidSearchQuery(
+ f"Condition is missing on the left side of '{term}' operator"
+ )
+
+ if term != event_search.SearchBoolean.BOOLEAN_AND:
+ new_terms.append(term)
+
+ previous_term = term
+
+ if term is not None and event_search.SearchBoolean.is_operator(term):
+ raise InvalidSearchQuery(f"Condition is missing on the right side of '{term}' operator")
+ terms = new_terms
+
+ # We put precedence on AND, which sort of counter-intuitively means we have to split the query
+ # on ORs first, so the ANDs are grouped together. Search through the query for ORs and split the
+ # query on each OR.
+ # We want to maintain a binary tree, so split the terms on the first OR we can find and recurse on
+ # the two sides. If there is no OR, split the first element out to AND
+ index = None
+ lhs, rhs = None, None
+ operator: type[OrFilter] | type[AndFilter] | None = None
+ try:
+ index = terms.index(event_search.SearchBoolean.BOOLEAN_OR)
+ lhs, rhs = terms[:index], terms[index + 1 :]
+ operator = OrFilter
+ except Exception:
+ lhs, rhs = terms[:1], terms[1:]
+ operator = AndFilter
+
+ resolved_lhs = self._resolve_boolean_conditions(lhs) if lhs else None
+ resolved_rhs = self._resolve_boolean_conditions(rhs) if rhs else None
+
+ if resolved_lhs is not None and resolved_rhs is not None:
+ if operator == AndFilter:
+ return TraceItemFilter(and_filter=AndFilter(filters=[resolved_lhs, resolved_rhs]))
+ else:
+ return TraceItemFilter(or_filter=OrFilter(filters=[resolved_lhs, resolved_rhs]))
+ elif resolved_lhs is None and resolved_rhs is not None:
+ return resolved_rhs
+ elif resolved_lhs is not None and resolved_rhs is None:
+ return resolved_lhs
+ else:
+ return None
+
+ def _resolve_terms(self, terms: event_filter.ParsedTerms) -> TraceItemFilter | None:
+ parsed_terms = []
+ for item in terms:
+ if isinstance(item, event_search.SearchFilter):
+ resolved_column, context = self.resolve_column(item.key.name)
+ if item.operator in constants.OPERATOR_MAP:
+ operator = constants.OPERATOR_MAP[item.operator]
+ else:
+ raise InvalidSearchQuery(f"Unknown operator: {item.operator}")
+ if isinstance(resolved_column.proto_definition, AttributeKey):
+ parsed_terms.append(
+ TraceItemFilter(
+ comparison_filter=ComparisonFilter(
+ key=resolved_column.proto_definition,
+ op=operator,
+ value=self._resolve_search_value(
+ resolved_column, item.operator, item.value.raw_value
+ ),
+ )
+ )
+ )
+ else:
+ raise NotImplementedError("Can't filter on aggregates yet")
+ else:
+ raise NotImplementedError()
+
+ if len(parsed_terms) > 1:
+ return TraceItemFilter(and_filter=AndFilter(filters=parsed_terms))
+ elif len(parsed_terms) == 1:
+ return parsed_terms[0]
+ else:
+ return None
+
+ def _resolve_search_value(
+ self,
+ column: ResolvedColumn,
+ operator: str,
+ value: str | int | datetime | Sequence[int] | Sequence[str],
+ ) -> AttributeValue:
+ column.validate(value)
+ if isinstance(column.proto_definition, AttributeKey):
+ column_type = column.proto_definition.type
+ if column_type == constants.STRING:
+ if operator in constants.IN_OPERATORS:
+ if isinstance(value, list) and all(isinstance(item, str) for item in value):
+ return AttributeValue(val_str_array=StrArray(values=value))
+ else:
+ raise InvalidSearchQuery(
+ f"{value} is not a valid value for doing an IN filter"
+ )
+ else:
+ return AttributeValue(val_str=str(value))
+ elif column_type == constants.INT:
+ # These int casts are only necessary because floats aren't supported in the proto yet
+ if operator in constants.IN_OPERATORS:
+ if isinstance(value, list):
+ return AttributeValue(
+ val_int_array=IntArray(values=[int(val) for val in value])
+ )
+ else:
+ raise InvalidSearchQuery(
+ f"{value} is not a valid value for doing an IN filter"
+ )
+ elif isinstance(value, (int, float)):
+ return AttributeValue(val_int=int(value))
+ raise InvalidSearchQuery(
+ f"{value} is not a valid filter value for {column.public_alias}"
+ )
+ else:
+ raise NotImplementedError("Aggregate Queries not implemented yet")
+
+ def resolve_columns(
+ self, selected_columns: list[str]
+ ) -> tuple[list[ResolvedColumn], list[VirtualColumnContext]]:
+ """Given a list of columns resolve them and get their context if applicable
+
+ This function will also dedupe the virtual column contexts if necessary
+ """
+ raise NotImplementedError()
+ # go from public alias -> rpc
+ # p = Procssors(parsed_column_name)
+ # return [ResolvedColumn()]
+
+ def resolve_column(self, column: str) -> tuple[ResolvedColumn, VirtualColumnContext | None]:
+ """Column is either an attribute or an aggregate, this function will determine which it is and call the relevant
+ resolve function"""
+ match = fields.is_function(column)
+ if match:
+ return self.resolve_aggregate(column, match)
+ else:
+ return self.resolve_attribute(column)
+
+ # TODO: Cache the column
+ # self.resolved_coluumn[alias] = ResolvedColumn()
+ # return ResolvedColumn()
+
+ def get_field_type(self, column: str) -> str:
+ resolved_column, _ = self.resolve_column(column)
+ return resolved_column.search_type
+
+ def resolve_attributes(
+ self, columns: list[str]
+ ) -> tuple[list[ResolvedColumn], list[VirtualColumnContext | None]]:
+ """Helper function to resolve a list of attributes instead of 1 attribute at a time"""
+ resolved_columns = []
+ resolved_contexts = []
+ for column in columns:
+ col, context = self.resolve_attribute(column)
+ resolved_columns.append(col)
+ resolved_contexts.append(context)
+ return resolved_columns, resolved_contexts
+
+ def resolve_attribute(self, column: str) -> tuple[ResolvedColumn, VirtualColumnContext | None]:
+ """Attributes are columns that aren't 'functions' or 'aggregates', usually this means string or numeric
+ attributes (aka. tags), but can also refer to fields like span.description"""
+ if column in SPAN_COLUMN_DEFINITIONS:
+ column_definition = SPAN_COLUMN_DEFINITIONS[column]
+ else:
+ # If the column isn't predefined handle it as a tag
+ tag_match = qb_constants.TYPED_TAG_KEY_RE.search(column)
+ if tag_match is None:
+ tag_match = qb_constants.TAG_KEY_RE.search(column)
+ field_type = "string"
+ else:
+ field_type = None
+ field = tag_match.group("tag") if tag_match else None
+ if field is None:
+ raise InvalidSearchQuery(f"Could not parse {column}")
+ # Assume string if a type isn't passed. eg. tags[foo]
+ if field_type is None:
+ field_type = tag_match.group("type") if tag_match else None
+
+ if field_type not in constants.TYPE_MAP:
+ raise InvalidSearchQuery(f"Unsupported type {field_type} in {column}")
+ internal_name = f"attr_str[{field}]" if field_type == "string" else f"attr_num[{field}]"
+ return (
+ ResolvedColumn(
+ public_alias=column, internal_name=internal_name, search_type=field_type
+ ),
+ None,
+ )
+
+ if column in VIRTUAL_CONTEXTS:
+ column_context = VIRTUAL_CONTEXTS[column](self.params)
+ else:
+ column_context = None
+
+ if column_definition:
+ return column_definition, column_context
+ else:
+ raise InvalidSearchQuery(f"Could not parse {column}")
+
+ def resolve_aggregates(
+ self, columns: list[str]
+ ) -> tuple[list[ResolvedColumn], list[VirtualColumnContext | None]]:
+ """Helper function to resolve a list of aggregates instead of 1 attribute at a time"""
+ resolved_aggregates, resolved_contexts = [], []
+ for column in columns:
+ aggregate, context = self.resolve_aggregate(column)
+ resolved_aggregates.append(aggregate)
+ resolved_contexts.append(context)
+ return resolved_aggregates, resolved_contexts
+
+ def resolve_aggregate(
+ self, column: str, match: Match | None = None
+ ) -> tuple[ResolvedColumn, VirtualColumnContext | None]:
+ # Check if this is a valid function, parse the function name and args out
+ if match is None:
+ match = fields.is_function(column)
+ if match is None:
+ raise InvalidSearchQuery(f"{column} is not an aggregate")
+
+ function = match.group("function")
+ columns = match.group("columns")
+ # Alias defaults to the name of the function
+ alias = match.group("alias") or column
+
+ # Get the function definition
+ if function not in SPAN_FUNCTION_DEFINITIONS:
+ raise InvalidSearchQuery(f"Unknown function {function}")
+ function_definition = SPAN_FUNCTION_DEFINITIONS[function]
+
+ parsed_columns = []
+
+ # Parse the arguments
+ attribute_args = fields.parse_arguments(function, columns)
+ if len(attribute_args) < len(function_definition.required_arguments):
+ raise InvalidSearchQuery(
+ f"Invalid number of arguments for {function}, was expecting {len(function_definition.required_arguments)} arguments"
+ )
+
+ for index, argument in enumerate(function_definition.arguments):
+ if argument.ignored:
+ continue
+ if index < len(attribute_args):
+ parsed_argument, _ = self.resolve_attribute(attribute_args[index])
+ elif argument.default_arg:
+ parsed_argument, _ = self.resolve_attribute(argument.default_arg)
+ else:
+ raise InvalidSearchQuery(
+ f"Invalid number of arguments for {function}, was expecting {len(function_definition.required_arguments)} arguments"
+ )
+
+ if (
+ argument.argument_type is not None
+ and parsed_argument.search_type != argument.argument_type
+ ):
+ raise InvalidSearchQuery(
+ f"{argument} is invalid for {function}, its a {parsed_argument.search_type} type field but {function} expects a {argument.argument_type} type field"
+ )
+ parsed_columns.append(parsed_argument)
+
+ # Proto doesn't support anything more than 1 argument yet
+ if len(parsed_columns) > 1:
+ raise InvalidSearchQuery("Cannot use more than one argument")
+ elif len(parsed_columns) == 1:
+ resolved_argument = (
+ parsed_columns[0].proto_definition
+ if isinstance(parsed_columns[0].proto_definition, AttributeKey)
+ else None
+ )
+ else:
+ resolved_argument = None
+
+ return (
+ ResolvedColumn(
+ public_alias=alias,
+ internal_name=function_definition.internal_function,
+ search_type=function_definition.search_type,
+ argument=resolved_argument,
+ ),
+ None,
+ )
diff --git a/src/sentry/search/eap/types.py b/src/sentry/search/eap/types.py
new file mode 100644
index 0000000000000..aba1427fbb33d
--- /dev/null
+++ b/src/sentry/search/eap/types.py
@@ -0,0 +1,8 @@
+class SearchResolverConfig:
+ # Automatically add id, etc. if there are no aggregates
+ auto_fields: bool = False
+ # Ignore aggregate conditions, if false the query will run but not use any aggregate conditions
+ use_aggregate_conditions: bool = True
+ # TODO: do we need parser_config_overrides? it looks like its just for alerts
+ # Whether to process the results from snuba
+ process_results: bool = True
diff --git a/src/sentry/search/events/builder/base.py b/src/sentry/search/events/builder/base.py
index 0e69f09db0572..b02a6cf7b181f 100644
--- a/src/sentry/search/events/builder/base.py
+++ b/src/sentry/search/events/builder/base.py
@@ -56,7 +56,7 @@
SnubaParams,
WhereType,
)
-from sentry.snuba.dataset import Dataset
+from sentry.snuba.dataset import Dataset, EntityKey
from sentry.snuba.metrics.utils import MetricMeta
from sentry.snuba.query_sources import QuerySource
from sentry.users.services.user.service import user_service
@@ -75,6 +75,12 @@
)
from sentry.utils.validators import INVALID_ID_DETAILS, INVALID_SPAN_ID, WILDCARD_NOT_ALLOWED
+DATASET_TO_ENTITY_MAP: Mapping[Dataset, EntityKey] = {
+ Dataset.Events: EntityKey.Events,
+ Dataset.Transactions: EntityKey.Transactions,
+ Dataset.EventsAnalyticsPlatform: EntityKey.EAPSpans,
+}
+
class BaseQueryBuilder:
requires_organization_condition: bool = False
@@ -303,7 +309,7 @@ def resolve_time_conditions(self) -> None:
self.end = self.params.end
def resolve_column_name(self, col: str) -> str:
- # TODO when utils/snuba.py becomes typed don't need this extra annotation
+ # TODO: when utils/snuba.py becomes typed don't need this extra annotation
column_resolver: Callable[[str], str] = resolve_column(self.dataset)
column_name = column_resolver(col)
# If the original column was passed in as tag[X], then there won't be a conflict
@@ -321,20 +327,20 @@ def resolve_query(
equations: list[str] | None = None,
orderby: list[str] | str | None = None,
) -> None:
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_query"):
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_time_conditions"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_query"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_time_conditions"):
# Has to be done early, since other conditions depend on start and end
self.resolve_time_conditions()
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_conditions"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_conditions"):
self.where, self.having = self.resolve_conditions(query)
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_params"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_params"):
# params depends on parse_query, and conditions being resolved first since there may be projects in conditions
self.where += self.resolve_params()
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_columns"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_columns"):
self.columns = self.resolve_select(selected_columns, equations)
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_orderby"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_orderby"):
self.orderby = self.resolve_orderby(orderby)
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_groupby"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_groupby"):
self.groupby = self.resolve_groupby(groupby_columns)
def parse_config(self) -> None:
@@ -1309,7 +1315,12 @@ def default_filter_converter(
if search_filter.operator in ("=", "!=") and search_filter.value.value == "":
if is_tag or is_attr or is_context or name in self.config.non_nullable_keys:
return Condition(lhs, Op(search_filter.operator), value)
- else:
+ elif is_measurement(name):
+ # Measurements can be a `Column` (e.g., `"lcp"`) or a `Function` (e.g., `"frames_frozen_rate"`). In either cause, since they are nullable, return a simple null check
+ return Condition(
+ Function("isNull", [lhs]), Op.EQ, 1 if search_filter.operator == "=" else 0
+ )
+ elif isinstance(lhs, Column):
# If not a tag, we can just check that the column is null.
return Condition(Function("isNull", [lhs]), Op(search_filter.operator), 1)
@@ -1492,17 +1503,19 @@ def get_public_alias(self, function: CurriedFunction) -> str:
"""
return self.function_alias_map[function.alias].field
- def _get_dataset_name(self) -> str:
+ def _get_entity_name(self) -> str:
+ if self.dataset in DATASET_TO_ENTITY_MAP:
+ return DATASET_TO_ENTITY_MAP[self.dataset].value
return self.dataset.value
def get_snql_query(self) -> Request:
self.validate_having_clause()
return Request(
- dataset=self._get_dataset_name(),
+ dataset=self.dataset.value,
app_id="default",
query=Query(
- match=Entity(self.dataset.value, sample=self.sample_rate),
+ match=Entity(self._get_entity_name(), sample=self.sample_rate),
select=self.columns,
array_join=self.array_join,
where=self.where,
@@ -1535,7 +1548,7 @@ def run_query(
return raw_snql_query(self.get_snql_query(), referrer, use_cache, query_source)
def process_results(self, results: Any) -> EventsResponse:
- with sentry_sdk.start_span(op="QueryBuilder", description="process_results") as span:
+ with sentry_sdk.start_span(op="QueryBuilder", name="process_results") as span:
span.set_data("result_count", len(results.get("data", [])))
translated_columns = self.alias_to_typed_tag_map
if self.builder_config.transform_alias_to_input_format:
diff --git a/src/sentry/search/events/builder/discover.py b/src/sentry/search/events/builder/discover.py
index 09625ba7c4794..b483ac99bd1b1 100644
--- a/src/sentry/search/events/builder/discover.py
+++ b/src/sentry/search/events/builder/discover.py
@@ -221,10 +221,10 @@ def select(self) -> list[SelectType]:
def get_snql_query(self) -> Request:
return Request(
- dataset=self._get_dataset_name(),
+ dataset=self.dataset.value,
app_id="default",
query=Query(
- match=Entity(self.dataset.value),
+ match=Entity(self._get_entity_name()),
select=self.select,
where=self.where,
having=self.having,
diff --git a/src/sentry/search/events/builder/metrics.py b/src/sentry/search/events/builder/metrics.py
index bf936489c71fd..2334229576210 100644
--- a/src/sentry/search/events/builder/metrics.py
+++ b/src/sentry/search/events/builder/metrics.py
@@ -469,10 +469,10 @@ def resolve_query(
orderby: list[str] | None = None,
) -> None:
# Resolutions that we always must perform, irrespectively of on demand.
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_time_conditions"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_time_conditions"):
# Has to be done early, since other conditions depend on start and end
self.resolve_time_conditions()
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_granularity"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_granularity"):
# Needs to happen before params and after time conditions since granularity can change start&end
self.granularity = self.resolve_granularity()
if self.start is not None:
@@ -484,17 +484,17 @@ def resolve_query(
# for building an on demand query we only require a time interval and granularity. All the other fields are
# automatically computed given the OnDemandMetricSpec.
if not self.use_on_demand:
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_conditions"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_conditions"):
self.where, self.having = self.resolve_conditions(query)
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_params"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_params"):
# params depends on parse_query, and conditions being resolved first since there may be projects
# in conditions
self.where += self.resolve_params()
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_columns"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_columns"):
self.columns = self.resolve_select(selected_columns, equations)
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_orderby"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_orderby"):
self.orderby = self.resolve_orderby(orderby)
- with sentry_sdk.start_span(op="QueryBuilder", description="resolve_groupby"):
+ with sentry_sdk.start_span(op="QueryBuilder", name="resolve_groupby"):
self.groupby = self.resolve_groupby(groupby_columns)
else:
# On demand still needs to call resolve since resolving columns has a side_effect
@@ -1091,7 +1091,7 @@ def convert_metric_layer_result(self, metrics_data_list: Any) -> Any:
one"""
seen_metrics_metas = {}
seen_total_keys = set()
- with sentry_sdk.start_span(op="metric_layer", description="transform_results"):
+ with sentry_sdk.start_span(op="metric_layer", name="transform_results"):
metric_layer_result: Any = {
"data": [],
"meta": [],
@@ -1271,7 +1271,7 @@ def run_query(
extra_conditions = None
try:
metrics_queries = []
- with sentry_sdk.start_span(op="metric_layer", description="transform_query"):
+ with sentry_sdk.start_span(op="metric_layer", name="transform_query"):
if self.use_on_demand:
aggregates = self._get_aggregates()
group_bys = self._get_group_bys()
@@ -1296,7 +1296,7 @@ def run_query(
)
metrics_data = []
for metrics_query in metrics_queries:
- with sentry_sdk.start_span(op="metric_layer", description="run_query"):
+ with sentry_sdk.start_span(op="metric_layer", name="run_query"):
metrics_data.append(
get_series(
projects=self.params.projects,
@@ -1308,7 +1308,7 @@ def run_query(
)
except Exception as err:
raise IncompatibleMetricsQuery(err)
- with sentry_sdk.start_span(op="metric_layer", description="transform_results"):
+ with sentry_sdk.start_span(op="metric_layer", name="transform_results"):
metric_layer_result = self.convert_metric_layer_result(metrics_data)
for row in metric_layer_result["data"]:
# Arrays in clickhouse cannot contain multiple types, and since groupby values
@@ -1795,7 +1795,7 @@ def run_query(
try:
metrics_queries = []
- with sentry_sdk.start_span(op="metric_layer", description="transform_query"):
+ with sentry_sdk.start_span(op="metric_layer", name="transform_query"):
if self.use_on_demand:
# Using timeseries columns here since epm(%d) etc is resolved.
for agg in self.selected_columns:
@@ -1813,7 +1813,7 @@ def run_query(
)
)
metrics_data = []
- with sentry_sdk.start_span(op="metric_layer", description="run_query"):
+ with sentry_sdk.start_span(op="metric_layer", name="run_query"):
for metrics_query in metrics_queries:
metrics_data.append(
get_series(
@@ -1827,7 +1827,7 @@ def run_query(
except Exception as err:
raise IncompatibleMetricsQuery(err)
- with sentry_sdk.start_span(op="metric_layer", description="transform_results"):
+ with sentry_sdk.start_span(op="metric_layer", name="transform_results"):
result = self._metric_layer_result(metrics_data, use_first_group_only=False)
return result
@@ -2059,7 +2059,7 @@ def run_query(
try:
metrics_queries = []
- with sentry_sdk.start_span(op="metric_layer", description="transform_query"):
+ with sentry_sdk.start_span(op="metric_layer", name="transform_query"):
if self.use_on_demand:
group_bys = self._get_group_bys()
@@ -2091,7 +2091,7 @@ def run_query(
)
metrics_data = []
for metrics_query in metrics_queries:
- with sentry_sdk.start_span(op="metric_layer", description="run_query"):
+ with sentry_sdk.start_span(op="metric_layer", name="run_query"):
metrics_data.append(
get_series(
projects=self.params.projects,
@@ -2107,7 +2107,7 @@ def run_query(
)
except Exception as err:
raise IncompatibleMetricsQuery(err)
- with sentry_sdk.start_span(op="metric_layer", description="transform_results"):
+ with sentry_sdk.start_span(op="metric_layer", name="transform_results"):
result = self._metric_layer_result(metrics_data, use_first_group_only=False)
return result
diff --git a/src/sentry/search/events/builder/spans_indexed.py b/src/sentry/search/events/builder/spans_indexed.py
index 7a6f31ec3561b..78ad9bc9d05e1 100644
--- a/src/sentry/search/events/builder/spans_indexed.py
+++ b/src/sentry/search/events/builder/spans_indexed.py
@@ -11,7 +11,6 @@
)
from sentry.search.events.fields import custom_time_processor
from sentry.search.events.types import SelectType
-from sentry.snuba.dataset import Dataset
SPAN_UUID_FIELDS = {
"trace",
@@ -69,11 +68,6 @@ class SpansEAPQueryBuilder(SpansIndexedQueryBuilderMixin, BaseQueryBuilder):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- def _get_dataset_name(self) -> str:
- if self.dataset == Dataset.SpansEAP:
- return "events_analytics_platform"
- return self.dataset.value
-
def resolve_field(self, raw_field: str, alias: bool = False) -> Column:
# try the typed regex first
if len(raw_field) <= 200:
@@ -89,7 +83,33 @@ def resolve_field(self, raw_field: str, alias: bool = False) -> Column:
# attr field is less permissive than tags, we can't have - in them
or "-" in field
):
- return super().resolve_field(raw_field, alias)
+ # Temporary until at least after 22 Dec 2024 when old data rotates out, otherwise we should just call super
+ # here and return default_field without any extra work
+ default_field = super().resolve_field(raw_field, alias)
+ if (
+ isinstance(default_field, Column)
+ and default_field.subscriptable == "attr_str"
+ or isinstance(default_field, AliasedExpression)
+ and default_field.exp.subscriptable == "attr_str"
+ ):
+ key = (
+ default_field.key
+ if isinstance(default_field, Column)
+ else default_field.exp.key
+ )
+ unprefixed_field = Column(f"attr_str[{key}]")
+ prefixed_field = Column(f"attr_str[sentry.{key}]")
+ return Function(
+ "if",
+ [
+ Function("mapContains", [Column("attr_str"), key]),
+ unprefixed_field,
+ prefixed_field,
+ ],
+ raw_field if alias else None,
+ )
+ else:
+ return default_field
if field_type not in ["number", "string"]:
raise InvalidSearchQuery(
@@ -97,17 +117,33 @@ def resolve_field(self, raw_field: str, alias: bool = False) -> Column:
)
if field_type == "string":
- col = Column(f"attr_str[{field}]")
+ attr_type = "attr_str"
+ field_col = Column(f"attr_str[{field}]")
else:
- col = Column(f"attr_num[{field}]")
+ attr_type = "attr_num"
+ field_col = Column(f"attr_num[{field}]")
if alias:
field_alias = f"tags_{field}@{field_type}"
self.typed_tag_to_alias_map[raw_field] = field_alias
self.alias_to_typed_tag_map[field_alias] = raw_field
- return AliasedExpression(col, field_alias)
else:
- return col
+ field_alias = None
+
+ # Temporary until at least after 22 Dec 2024 when old data rotates out
+ unprefixed_field = field_col
+ prefixed_field = Column(f"{attr_type}[sentry.{field}]")
+ col = Function(
+ "if",
+ [
+ Function("mapContains", [Column(attr_type), field]),
+ unprefixed_field,
+ prefixed_field,
+ ],
+ field_alias,
+ )
+
+ return col
class TimeseriesSpanIndexedQueryBuilder(SpansIndexedQueryBuilderMixin, TimeseriesQueryBuilder):
diff --git a/src/sentry/search/events/datasets/discover.py b/src/sentry/search/events/datasets/discover.py
index 6aab1a9c4a24a..8ded960337df6 100644
--- a/src/sentry/search/events/datasets/discover.py
+++ b/src/sentry/search/events/datasets/discover.py
@@ -380,7 +380,10 @@ def function_converter(self) -> Mapping[str, SnQLFunction]:
SnQLFunction(
"to_other",
required_args=[
- ColumnArg("column", allowed_columns=["release", "trace.parent_span", "id"]),
+ ColumnArg(
+ "column",
+ allowed_columns=["release", "trace.parent_span", "id", "trace.span"],
+ ),
SnQLStringArg("value", unquote=True, unescape_quotes=True),
],
optional_args=[
diff --git a/src/sentry/search/events/datasets/field_aliases.py b/src/sentry/search/events/datasets/field_aliases.py
index 14dd9d4974f0b..ee916b954b803 100644
--- a/src/sentry/search/events/datasets/field_aliases.py
+++ b/src/sentry/search/events/datasets/field_aliases.py
@@ -93,11 +93,11 @@ def resolve_span_module(builder: BaseQueryBuilder, alias: str) -> SelectType:
return Function(
"if",
[
- Function("in", [builder.column("span.op"), list(OP_MAPPING.keys())]),
+ Function("in", [builder.resolve_field("span.op"), list(OP_MAPPING.keys())]),
Function(
"transform",
[
- builder.column("span.op"),
+ builder.resolve_field("span.op"),
list(OP_MAPPING.keys()),
list(OP_MAPPING.values()),
"other",
@@ -106,7 +106,7 @@ def resolve_span_module(builder: BaseQueryBuilder, alias: str) -> SelectType:
Function(
"transform",
[
- builder.column("span.category"),
+ builder.resolve_field("span.category"),
constants.SPAN_MODULE_CATEGORY_VALUES,
constants.SPAN_MODULE_CATEGORY_VALUES,
"other",
diff --git a/src/sentry/search/events/datasets/spans_indexed.py b/src/sentry/search/events/datasets/spans_indexed.py
index a5545ec844667..b45619937fc59 100644
--- a/src/sentry/search/events/datasets/spans_indexed.py
+++ b/src/sentry/search/events/datasets/spans_indexed.py
@@ -2,8 +2,10 @@
from collections.abc import Callable, Mapping
+from django.utils.functional import cached_property
from snuba_sdk import Column, Direction, Function, OrderBy
+from sentry import options
from sentry.api.event_search import SearchFilter
from sentry.exceptions import InvalidSearchQuery
from sentry.search.events import constants
@@ -21,6 +23,7 @@
NumericColumn,
SnQLFieldColumn,
SnQLFunction,
+ SnQLStringArg,
with_default,
)
from sentry.search.events.types import SelectType, WhereType
@@ -562,12 +565,208 @@ def _resolve_span_duration(self, alias: str) -> SelectType:
alias,
)
+ def _resolve_aggregate_if(
+ self, aggregate: str
+ ) -> Callable[[Mapping[str, str | Column | SelectType | int | float], str | None], SelectType]:
+ def extract_attr(
+ column: str | Column | SelectType | int | float,
+ ) -> tuple[Column, str] | None:
+ # This check exists to handle the temporay prefixing.
+ # Once that's removed, this condition should become much simpler
+
+ if not isinstance(column, Function):
+ return None
+
+ if column.function != "if":
+ return None
+
+ if len(column.parameters) != 3:
+ return None
+
+ if (
+ not isinstance(column.parameters[0], Function)
+ or column.parameters[0].function != "mapContains"
+ or len(column.parameters[0].parameters) != 2
+ ):
+ return None
+
+ attr_col = column.parameters[0].parameters[0]
+ attr_name = column.parameters[0].parameters[1]
+
+ if not isinstance(attr_col, Column) or not isinstance(attr_name, str):
+ return None
+
+ return attr_col, attr_name
+
+ def resolve_aggregate_if(
+ args: Mapping[str, str | Column | SelectType | int | float],
+ alias: str | None = None,
+ ) -> SelectType:
+ attr = extract_attr(args["column"])
+
+ # If we're not aggregating on an attr column,
+ # we can directly aggregate on the column
+ if attr is None:
+ return Function(
+ f"{aggregate}",
+ [args["column"]],
+ alias,
+ )
+
+ # When aggregating on an attr column, we have to make sure that we skip rows
+ # where the attr does not exist.
+ attr_col, attr_name = attr
+
+ function = (
+ aggregate.replace("quantile", "quantileIf")
+ if aggregate.startswith("quantile(")
+ else f"{aggregate}If"
+ )
+
+ unprefixed = Function("mapContains", [attr_col, attr_name])
+ prefixed = Function("mapContains", [attr_col, f"sentry.{attr_name}"])
+
+ return Function(
+ function,
+ [
+ args["column"],
+ Function("or", [unprefixed, prefixed]),
+ ],
+ alias,
+ )
+
+ return resolve_aggregate_if
+
@property
def function_converter(self) -> dict[str, SnQLFunction]:
- existing_functions = super().function_converter
function_converter = {
function.name: function
for function in [
+ SnQLFunction(
+ "eps",
+ snql_aggregate=lambda args, alias: Function(
+ "divide", [Function("count", []), args["interval"]], alias
+ ),
+ optional_args=[IntervalDefault("interval", 1, None)],
+ default_result_type="rate",
+ ),
+ SnQLFunction(
+ "epm",
+ snql_aggregate=lambda args, alias: Function(
+ "divide",
+ [Function("count", []), Function("divide", [args["interval"], 60])],
+ alias,
+ ),
+ optional_args=[IntervalDefault("interval", 1, None)],
+ default_result_type="rate",
+ ),
+ SnQLFunction(
+ "count",
+ optional_args=[
+ with_default("span.duration", NumericColumn("column", spans=True)),
+ ],
+ snql_aggregate=self._resolve_aggregate_if("count"),
+ default_result_type="integer",
+ ),
+ SnQLFunction(
+ "count_unique",
+ required_args=[ColumnTagArg("column")],
+ snql_aggregate=lambda args, alias: Function("uniq", [args["column"]], alias),
+ default_result_type="integer",
+ ),
+ SnQLFunction(
+ "sum",
+ required_args=[NumericColumn("column", spans=True)],
+ snql_aggregate=self._resolve_aggregate_if("sum"),
+ result_type_fn=self.reflective_result_type(),
+ default_result_type="duration",
+ ),
+ SnQLFunction(
+ "avg",
+ optional_args=[
+ with_default("span.duration", NumericColumn("column", spans=True)),
+ ],
+ snql_aggregate=self._resolve_aggregate_if("avg"),
+ result_type_fn=self.reflective_result_type(),
+ default_result_type="duration",
+ redundant_grouping=True,
+ ),
+ SnQLFunction(
+ "p50",
+ optional_args=[
+ with_default("span.duration", NumericColumn("column", spans=True)),
+ ],
+ snql_aggregate=self._resolve_aggregate_if("quantile(0.5)"),
+ result_type_fn=self.reflective_result_type(),
+ default_result_type="duration",
+ redundant_grouping=True,
+ ),
+ SnQLFunction(
+ "p75",
+ optional_args=[
+ with_default("span.duration", NumericColumn("column", spans=True)),
+ ],
+ snql_aggregate=self._resolve_aggregate_if("quantile(0.75)"),
+ result_type_fn=self.reflective_result_type(),
+ default_result_type="duration",
+ redundant_grouping=True,
+ ),
+ SnQLFunction(
+ "p90",
+ optional_args=[
+ with_default("span.duration", NumericColumn("column", spans=True)),
+ ],
+ snql_aggregate=self._resolve_aggregate_if("quantile(0.90)"),
+ result_type_fn=self.reflective_result_type(),
+ default_result_type="duration",
+ redundant_grouping=True,
+ ),
+ SnQLFunction(
+ "p95",
+ optional_args=[
+ with_default("span.duration", NumericColumn("column", spans=True)),
+ ],
+ snql_aggregate=self._resolve_aggregate_if("quantile(0.95)"),
+ result_type_fn=self.reflective_result_type(),
+ default_result_type="duration",
+ redundant_grouping=True,
+ ),
+ SnQLFunction(
+ "p99",
+ optional_args=[
+ with_default("span.duration", NumericColumn("column", spans=True)),
+ ],
+ snql_aggregate=self._resolve_aggregate_if("quantile(0.99)"),
+ result_type_fn=self.reflective_result_type(),
+ default_result_type="duration",
+ redundant_grouping=True,
+ ),
+ SnQLFunction(
+ "p100",
+ optional_args=[
+ with_default("span.duration", NumericColumn("column", spans=True)),
+ ],
+ snql_aggregate=self._resolve_aggregate_if("max"),
+ result_type_fn=self.reflective_result_type(),
+ default_result_type="duration",
+ redundant_grouping=True,
+ ),
+ SnQLFunction(
+ "min",
+ required_args=[NumericColumn("column", spans=True)],
+ snql_aggregate=self._resolve_aggregate_if("min"),
+ result_type_fn=self.reflective_result_type(),
+ default_result_type="duration",
+ redundant_grouping=True,
+ ),
+ SnQLFunction(
+ "max",
+ required_args=[NumericColumn("column", spans=True)],
+ snql_aggregate=self._resolve_aggregate_if("max"),
+ result_type_fn=self.reflective_result_type(),
+ default_result_type="duration",
+ redundant_grouping=True,
+ ),
SnQLFunction(
"count_weighted",
optional_args=[NullColumn("column")],
@@ -700,24 +899,77 @@ def function_converter(self) -> dict[str, SnQLFunction]:
),
SnQLFunction(
"margin_of_error",
+ optional_args=[with_default("fpc", SnQLStringArg("fpc"))],
snql_aggregate=self._resolve_margin_of_error,
default_result_type="number",
),
SnQLFunction(
"lower_count_limit",
+ optional_args=[with_default("fpc", SnQLStringArg("fpc"))],
snql_aggregate=self._resolve_lower_limit,
default_result_type="number",
),
SnQLFunction(
"upper_count_limit",
+ optional_args=[with_default("fpc", SnQLStringArg("fpc"))],
snql_aggregate=self._resolve_upper_limit,
default_result_type="number",
),
+ SnQLFunction(
+ "first_seen",
+ snql_aggregate=lambda args, alias: Function(
+ "toUnixTimestamp64Milli",
+ [Function("min", [Column("start_timestamp")])],
+ alias,
+ ),
+ default_result_type="duration",
+ private=True,
+ ),
+ SnQLFunction(
+ "last_seen",
+ snql_aggregate=lambda args, alias: Function(
+ "toUnixTimestamp64Milli",
+ [Function("max", [Column("end_timestamp")])],
+ alias,
+ ),
+ default_result_type="duration",
+ private=True,
+ ),
]
}
- existing_functions.update(function_converter)
- return existing_functions
+ for alias, name in constants.SPAN_FUNCTION_ALIASES.items():
+ if name in function_converter:
+ function_converter[alias] = function_converter[name].alias_as(alias)
+
+ return function_converter
+
+ @property
+ def field_alias_converter(self) -> Mapping[str, Callable[[str], SelectType]]:
+ existing_field_aliases: dict[str, Callable[[str], SelectType]] = {
+ **super().field_alias_converter
+ }
+
+ field_alias_converter: Mapping[str, Callable[[str], SelectType]] = {
+ constants.PRECISE_START_TS: lambda alias: Function(
+ "divide",
+ [
+ Function("toUnixTimestamp64Milli", [Column("start_timestamp")]),
+ 1000,
+ ],
+ alias,
+ ),
+ constants.PRECISE_FINISH_TS: lambda alias: Function(
+ "divide",
+ [
+ Function("toUnixTimestamp64Milli", [Column("end_timestamp")]),
+ 1000,
+ ],
+ alias,
+ ),
+ }
+ existing_field_aliases.update(field_alias_converter)
+ return existing_field_aliases
def _resolve_sum_weighted(
self,
@@ -744,8 +996,13 @@ def _resolve_count_weighted(
alias: str | None = None,
) -> SelectType:
return Function(
- "sum",
- [Function("multiply", [Column("sign"), self.sampling_weight])],
+ "round",
+ [
+ Function(
+ "sum",
+ [Function("multiply", [Column("sign"), self.sampling_weight])],
+ )
+ ],
alias,
)
@@ -778,28 +1035,36 @@ def _query_total_counts(self) -> tuple[float | int, float | int]:
self._cached_count_weighted = results["data"][0]["count_weighted"]
return self._cached_count, self._cached_count_weighted
+ @cached_property
+ def _zscore(self):
+ """Defaults to 1.96, based on a z score for a confidence level of 95%"""
+ return options.get("performance.extrapolation.confidence.z-score")
+
def _resolve_margin_of_error(
self,
args: Mapping[str, str | Column | SelectType | int | float],
alias: str | None = None,
) -> SelectType:
"""Calculates the Margin of error for a given value, but unfortunately basis the total count based on
- extrapolated data"""
+ extrapolated data
+ Z * Margin Of Error * Finite Population Correction
+ """
# both of these need to be aggregated without a query
total_samples, population_size = self._query_total_counts()
sampled_group = Function("count", [])
return Function(
"multiply",
[
- # Based on a z score for a confidence level of 95%
- 1.96,
+ self._zscore,
Function(
"multiply",
[
# Unadjusted Margin of Error
self._resolve_unadjusted_margin(sampled_group, total_samples),
# Finite Population Correction
- self._resolve_finite_population_correction(total_samples, population_size),
+ self._resolve_finite_population_correction(
+ args, total_samples, population_size
+ ),
],
),
],
@@ -809,6 +1074,7 @@ def _resolve_margin_of_error(
def _resolve_unadjusted_margin(
self, sampled_group: SelectType, total_samples: SelectType
) -> SelectType:
+ """sqrt((p(1 - p)) / (total_samples))"""
# Naming this p to match the formula
p = Function("divide", [sampled_group, total_samples])
return Function(
@@ -822,20 +1088,27 @@ def _resolve_unadjusted_margin(
def _resolve_finite_population_correction(
self,
+ args: Mapping[str, str | Column | SelectType | int | float],
total_samples: SelectType,
population_size: int | float,
) -> SelectType:
- return Function(
- "sqrt",
- [
- Function(
- "divide",
- [
- Function("minus", [population_size, total_samples]),
- Function("minus", [population_size, 1]),
- ],
- )
- ],
+ """sqrt((population_size - total_samples) / (population_size - 1))"""
+ return (
+ Function(
+ "sqrt",
+ [
+ Function(
+ "divide",
+ [
+ Function("minus", [population_size, total_samples]),
+ Function("minus", [population_size, 1]),
+ ],
+ )
+ ],
+ )
+ # if the arg is anything but `fpc` just return 1 so we're not correcting for a finite population
+ if args["fpc"] == "fpc"
+ else 1
)
def _resolve_lower_limit(
@@ -843,11 +1116,21 @@ def _resolve_lower_limit(
args: Mapping[str, str | Column | SelectType | int | float],
alias: str,
) -> SelectType:
- total_samples, _ = self._query_total_counts()
+ """round(max(0, proportion_by_sample - margin_of_error) * total_population)"""
+ _, total_population = self._query_total_counts()
sampled_group = Function("count", [])
- proportion_by_sample = Function("divide", [sampled_group, total_samples])
- return Function(
+ proportion_by_sample = Function(
"divide",
+ [
+ sampled_group,
+ Function(
+ "multiply", [total_population, Function("avg", [Column("sampling_factor")])]
+ ),
+ ],
+ "proportion_by_sample",
+ )
+ return Function(
+ "round",
[
Function(
"multiply",
@@ -867,11 +1150,9 @@ def _resolve_lower_limit(
]
],
),
- total_samples,
+ total_population,
],
- ),
- # Math assumes a single sampling_weight
- Function("avg", [Column("sampling_factor")]),
+ )
],
alias,
)
@@ -881,34 +1162,35 @@ def _resolve_upper_limit(
args: Mapping[str, str | Column | SelectType | int | float],
alias: str,
) -> SelectType:
- total_samples, _ = self._query_total_counts()
+ """round(max(0, proportion_by_sample + margin_of_error) * total_population)"""
+ _, total_population = self._query_total_counts()
sampled_group = Function("count", [])
- proportion_by_sample = Function("divide", [sampled_group, total_samples])
- return Function(
+ proportion_by_sample = Function(
"divide",
+ [
+ sampled_group,
+ Function(
+ "multiply", [total_population, Function("avg", [Column("sampling_factor")])]
+ ),
+ ],
+ "proportion_by_sample",
+ )
+ return Function(
+ "round",
[
Function(
"multiply",
[
Function(
- "arrayMin",
+ "plus",
[
- [
- 1,
- Function(
- "plus",
- [
- proportion_by_sample,
- self._resolve_margin_of_error(args, "margin_of_error"),
- ],
- ),
- ]
+ proportion_by_sample,
+ self._resolve_margin_of_error(args, "margin_of_error"),
],
),
- total_samples,
+ total_population,
],
- ),
- Function("avg", [Column("sampling_factor")]),
+ )
],
alias,
)
diff --git a/src/sentry/search/events/fields.py b/src/sentry/search/events/fields.py
index f2e2128b4911c..208283e1c781e 100644
--- a/src/sentry/search/events/fields.py
+++ b/src/sentry/search/events/fields.py
@@ -38,6 +38,7 @@
SEARCH_MAP,
TAG_KEY_RE,
TEAM_KEY_TRANSACTION_ALIAS,
+ TYPED_TAG_KEY_RE,
USER_DISPLAY_ALIAS,
VALID_FIELD_PATTERN,
)
@@ -459,30 +460,37 @@ def format_column_arguments(column_args, arguments):
column_args[i] = arguments[column_args[i].arg]
-def parse_arguments(function: str, columns: str) -> list[str]:
+def _lookback(columns, j, string):
+ """For parse_arguments, check that the current character is preceeded by string"""
+ if j < len(string):
+ return False
+ return columns[j - len(string) : j] == string
+
+
+def parse_arguments(_function: str, columns: str) -> list[str]:
"""
Some functions take a quoted string for their arguments that may contain commas,
which requires special handling.
This function attempts to be identical with the similarly named parse_arguments
found in static/app/utils/discover/fields.tsx
"""
- if (function != "to_other" and function != "count_if" and function != "spans_histogram") or len(
- columns
- ) == 0:
- return [c.strip() for c in columns.split(",") if len(c.strip()) > 0]
-
args = []
quoted = False
+ in_tag = False
escaped = False
i, j = 0, 0
while j < len(columns):
- if i == j and columns[j] == '"':
+ if not in_tag and i == j and columns[j] == '"':
# when we see a quote at the beginning of
# an argument, then this is a quoted string
quoted = True
+ elif not quoted and columns[j] == "[" and _lookback(columns, j, "tags"):
+ # when the argument begins with tags[,
+ # then this is the beginning of the tag that may contain commas
+ in_tag = True
elif i == j and columns[j] == " ":
# argument has leading spaces, skip over them
i += 1
@@ -494,12 +502,16 @@ def parse_arguments(function: str, columns: str) -> list[str]:
# when we see a non-escaped quote while inside
# of a quoted string, we should end it
quoted = False
+ elif in_tag and not escaped and columns[j] == "]":
+ # when we see a non-escaped quote while inside
+ # of a quoted string, we should end it
+ in_tag = False
elif quoted and escaped:
# when we are inside a quoted string and have
# begun an escape character, we should end it
escaped = False
- elif quoted and columns[j] == ",":
- # when we are inside a quoted string and see
+ elif (quoted or in_tag) and columns[j] == ",":
+ # when we are inside a quoted string or tag and see
# a comma, it should not be considered an
# argument separator
pass
@@ -1127,6 +1139,9 @@ def _normalize(self, value: str) -> str:
return value
if not snuba_column and is_mri(value):
return value
+ match = TYPED_TAG_KEY_RE.search(value)
+ if match and match.group("type") == "number":
+ return value
if not snuba_column:
raise InvalidFunctionArgument(f"{value} is not a valid column")
elif snuba_column not in ["time", "timestamp", "duration"]:
diff --git a/src/sentry/search/snuba/backend.py b/src/sentry/search/snuba/backend.py
index 92aa4681f0623..de47f44053ad3 100644
--- a/src/sentry/search/snuba/backend.py
+++ b/src/sentry/search/snuba/backend.py
@@ -27,7 +27,6 @@
from sentry.models.grouplink import GroupLink
from sentry.models.groupowner import GroupOwner
from sentry.models.groupsubscription import GroupSubscription
-from sentry.models.platformexternalissue import PlatformExternalIssue
from sentry.models.project import Project
from sentry.models.release import Release
from sentry.models.team import Team
@@ -40,6 +39,7 @@
PostgresSnubaQueryExecutor,
TrendsSortWeights,
)
+from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
from sentry.users.models.user import User
from sentry.utils import metrics
from sentry.utils.cursors import Cursor, CursorResult
diff --git a/src/sentry/search/snuba/executors.py b/src/sentry/search/snuba/executors.py
index 91fa4f1b28047..8f7fe5086eab0 100644
--- a/src/sentry/search/snuba/executors.py
+++ b/src/sentry/search/snuba/executors.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import dataclasses
import functools
import logging
import time
@@ -503,6 +504,7 @@ def snuba_search(
)
except UnsupportedSearchQuery:
pass
+
query_params_for_categories = {
gc: query_params
for gc, query_params in query_params_for_categories.items()
@@ -1025,7 +1027,7 @@ def query(
# * we started with Postgres candidates and so only do one Snuba query max
# * the paginator is returning enough results to satisfy the query (>= the limit)
# * there are no more groups in Snuba to post-filter
- # TODO do we actually have to rebuild this SequencePaginator every time
+ # TODO: do we actually have to rebuild this SequencePaginator every time
# or can we just make it after we've broken out of the loop?
paginator_results = SequencePaginator(
[(score, id) for (id, score) in result_groups], reverse=True, **paginator_options
@@ -1162,6 +1164,8 @@ class InvalidQueryForExecutor(Exception):
class GroupAttributesPostgresSnubaQueryExecutor(PostgresSnubaQueryExecutor):
+ logger = logging.getLogger("sentry.search.groupattributessnuba")
+
def get_times_seen_filter(
self, search_filter: SearchFilter, joined_entity: Entity
) -> Condition:
@@ -1462,6 +1466,13 @@ def get_assigned_or_suggested(
conditions=top_level_conditions,
)
+ sort_strategies = {
+ "new": "first_seen_score",
+ "date": "last_seen_score",
+ "freq": "times_seen",
+ "user": "user_count",
+ }
+
def get_last_seen_aggregation(self, joined_entity: Entity) -> Function:
return Function(
"ifNull",
@@ -1478,7 +1489,7 @@ def get_last_seen_aggregation(self, joined_entity: Entity) -> Function:
),
0,
],
- alias="last_seen_score",
+ alias=self.sort_strategies["date"],
)
def get_first_seen_aggregation(self) -> Function:
@@ -1497,7 +1508,7 @@ def get_first_seen_aggregation(self) -> Function:
),
0,
],
- alias="first_seen_score",
+ alias=self.sort_strategies["new"],
)
def get_handled_condition(
@@ -1610,23 +1621,17 @@ def get_first_release_condition(
"first_release": (get_first_release_condition, Clauses.WHERE),
"firstRelease": (get_first_release_condition, Clauses.WHERE),
}
- times_seen_aggregation = Function("count", [], alias="times_seen")
def get_sort_defs(self, entity):
return {
"date": self.get_last_seen_aggregation(entity),
"new": self.get_first_seen_aggregation(),
- "freq": self.times_seen_aggregation,
- "user": Function("uniq", [Column("tags[sentry:user]", entity)], "user_count"),
+ "freq": Function("count", [], alias=self.sort_strategies["freq"]),
+ "user": Function(
+ "uniq", [Column("tags[sentry:user]", entity)], self.sort_strategies["user"]
+ ),
}
- sort_strategies = {
- "new": "first_seen_score",
- "date": "last_seen_score",
- "freq": "times_seen",
- "user": "user_count",
- }
-
def should_check_search_issues(
self, group_categories: Sequence[str], search_filters: Sequence[SearchFilter]
) -> bool:
@@ -1837,8 +1842,6 @@ def query(
Condition(Column("occurrence_type_id", joined_entity), Op.IN, group_types)
)
- sort_func = self.get_sort_defs(joined_entity)[sort_by]
-
if environments:
where_conditions.append(
Condition(
@@ -1846,6 +1849,8 @@ def query(
)
)
+ sort_func = self.get_sort_defs(joined_entity)[sort_by]
+
if cursor is not None:
op = Op.GTE if cursor.is_prev else Op.LTE
having.append(Condition(sort_func, op, cursor.value))
@@ -1865,7 +1870,9 @@ def query(
where=where_conditions,
groupby=groupby,
having=having,
- orderby=[OrderBy(sort_func, direction=Direction.DESC)],
+ orderby=[
+ OrderBy(dataclasses.replace(sort_func, alias=None), direction=Direction.DESC)
+ ],
limit=Limit(limit + 1),
)
dataset = Dataset.Events.value if is_errors else Dataset.IssuePlatform.value
diff --git a/src/sentry/seer/anomaly_detection/get_anomaly_data.py b/src/sentry/seer/anomaly_detection/get_anomaly_data.py
new file mode 100644
index 0000000000000..ab28bedf39372
--- /dev/null
+++ b/src/sentry/seer/anomaly_detection/get_anomaly_data.py
@@ -0,0 +1,141 @@
+import logging
+
+from django.conf import settings
+from urllib3.exceptions import MaxRetryError, TimeoutError
+
+from sentry.conf.server import SEER_ANOMALY_DETECTION_ENDPOINT_URL
+from sentry.incidents.models.alert_rule import AlertRule
+from sentry.net.http import connection_from_url
+from sentry.seer.anomaly_detection.types import (
+ AlertInSeer,
+ AnomalyDetectionConfig,
+ DetectAnomaliesRequest,
+ DetectAnomaliesResponse,
+ TimeSeriesPoint,
+)
+from sentry.seer.anomaly_detection.utils import translate_direction
+from sentry.seer.signed_seer_api import make_signed_seer_api_request
+from sentry.snuba.models import QuerySubscription
+from sentry.utils import json
+from sentry.utils.json import JSONDecodeError
+
+logger = logging.getLogger(__name__)
+
+SEER_ANOMALY_DETECTION_CONNECTION_POOL = connection_from_url(
+ settings.SEER_ANOMALY_DETECTION_URL,
+ timeout=settings.SEER_ANOMALY_DETECTION_TIMEOUT,
+)
+
+
+def get_anomaly_data_from_seer(
+ alert_rule: AlertRule,
+ subscription: QuerySubscription,
+ last_update: float,
+ aggregation_value: float | None,
+) -> list[TimeSeriesPoint] | None:
+ snuba_query = alert_rule.snuba_query
+ if not snuba_query or not aggregation_value:
+ return None
+
+ # XXX: we know we have these things because the serializer makes sure we do, but mypy insists
+ if (
+ alert_rule.threshold_type is None
+ or not alert_rule.sensitivity
+ or not alert_rule.seasonality
+ or not snuba_query.time_window
+ ):
+ return None
+
+ anomaly_detection_config = AnomalyDetectionConfig(
+ time_period=int(snuba_query.time_window / 60),
+ sensitivity=alert_rule.sensitivity,
+ direction=translate_direction(alert_rule.threshold_type),
+ expected_seasonality=alert_rule.seasonality,
+ )
+ context = AlertInSeer(
+ id=alert_rule.id,
+ cur_window=TimeSeriesPoint(timestamp=last_update, value=aggregation_value),
+ )
+ detect_anomalies_request = DetectAnomaliesRequest(
+ organization_id=subscription.project.organization.id,
+ project_id=subscription.project_id,
+ config=anomaly_detection_config,
+ context=context,
+ )
+ extra_data = {
+ "subscription_id": subscription.id,
+ "dataset": snuba_query.dataset,
+ "organization_id": subscription.project.organization.id,
+ "project_id": subscription.project_id,
+ "alert_rule_id": alert_rule.id,
+ }
+ try:
+ logger.info("Sending subscription update data to Seer", extra=extra_data)
+ response = make_signed_seer_api_request(
+ SEER_ANOMALY_DETECTION_CONNECTION_POOL,
+ SEER_ANOMALY_DETECTION_ENDPOINT_URL,
+ json.dumps(detect_anomalies_request).encode("utf-8"),
+ )
+ except (TimeoutError, MaxRetryError):
+ logger.warning("Timeout error when hitting anomaly detection endpoint", extra=extra_data)
+ return None
+
+ if response.status > 400:
+ logger.error(
+ "Error when hitting Seer detect anomalies endpoint",
+ extra={
+ "response_data": response.data,
+ **extra_data,
+ },
+ )
+ return None
+ try:
+ decoded_data = response.data.decode("utf-8")
+ except AttributeError:
+ logger.exception(
+ "Failed to parse Seer anomaly detection response",
+ extra={
+ "ad_config": anomaly_detection_config,
+ "context": context,
+ "response_data": response.data,
+ "response_code": response.status,
+ },
+ )
+ return None
+
+ try:
+ results: DetectAnomaliesResponse = json.loads(decoded_data)
+ except JSONDecodeError:
+ logger.exception(
+ "Failed to parse Seer anomaly detection response",
+ extra={
+ "ad_config": anomaly_detection_config,
+ "context": context,
+ "response_data": decoded_data,
+ "response_code": response.status,
+ },
+ )
+ return None
+
+ if not results.get("success"):
+ logger.error(
+ "Error when hitting Seer detect anomalies endpoint",
+ extra={
+ "error_message": results.get("message", ""),
+ **extra_data,
+ },
+ )
+ return None
+
+ ts = results.get("timeseries")
+ if not ts:
+ logger.warning(
+ "Seer anomaly detection response returned no potential anomalies",
+ extra={
+ "ad_config": anomaly_detection_config,
+ "context": context,
+ "response_data": results.get("message"),
+ },
+ )
+ return None
+ return ts
diff --git a/src/sentry/seer/anomaly_detection/get_historical_anomalies.py b/src/sentry/seer/anomaly_detection/get_historical_anomalies.py
index 16a921eb8d362..7fd4dd4788c38 100644
--- a/src/sentry/seer/anomaly_detection/get_historical_anomalies.py
+++ b/src/sentry/seer/anomaly_detection/get_historical_anomalies.py
@@ -151,7 +151,7 @@ def get_historical_anomaly_data_from_seer(
"""
if alert_rule.status == AlertRuleStatus.NOT_ENOUGH_DATA.value:
return []
- # don't think this can happen but mypy is yelling
+ # don't think these can happen but mypy is yelling
if not alert_rule.snuba_query:
logger.error(
"No snuba query associated with alert rule",
@@ -160,6 +160,14 @@ def get_historical_anomaly_data_from_seer(
},
)
return None
+ if not alert_rule.organization:
+ logger.error(
+ "No organization associated with alert rule",
+ extra={
+ "alert_rule_id": alert_rule.id,
+ },
+ )
+ return None
subscription = alert_rule.snuba_query.subscriptions.first()
# same deal as above
if not subscription:
@@ -175,7 +183,7 @@ def get_historical_anomaly_data_from_seer(
end = datetime.fromisoformat(end_string)
query_columns = get_query_columns([snuba_query.aggregate], snuba_query.time_window)
historical_data = fetch_historical_data(
- alert_rule=alert_rule,
+ organization=alert_rule.organization,
snuba_query=snuba_query,
query_columns=query_columns,
project=project,
diff --git a/src/sentry/seer/anomaly_detection/store_data.py b/src/sentry/seer/anomaly_detection/store_data.py
index 30171d23b15aa..9c6eecd33a144 100644
--- a/src/sentry/seer/anomaly_detection/store_data.py
+++ b/src/sentry/seer/anomaly_detection/store_data.py
@@ -1,5 +1,7 @@
import logging
from datetime import datetime, timedelta
+from enum import StrEnum
+from typing import Any
from django.conf import settings
from django.core.exceptions import ValidationError
@@ -8,7 +10,7 @@
from sentry.api.bases.organization_events import get_query_columns
from sentry.conf.server import SEER_ANOMALY_DETECTION_STORE_DATA_URL
-from sentry.incidents.models.alert_rule import AlertRule, AlertRuleStatus
+from sentry.incidents.models.alert_rule import AlertRule, AlertRuleDetectionType, AlertRuleStatus
from sentry.models.project import Project
from sentry.net.http import connection_from_url
from sentry.seer.anomaly_detection.types import (
@@ -21,12 +23,13 @@
from sentry.seer.anomaly_detection.utils import (
fetch_historical_data,
format_historical_data,
+ get_dataset_from_label,
+ get_event_types,
translate_direction,
)
from sentry.seer.signed_seer_api import make_signed_seer_api_request
-from sentry.snuba.models import SnubaQuery
-from sentry.snuba.utils import get_dataset
-from sentry.utils import json
+from sentry.snuba.models import SnubaQuery, SnubaQueryEventType
+from sentry.utils import json, metrics
from sentry.utils.json import JSONDecodeError
logger = logging.getLogger(__name__)
@@ -35,7 +38,12 @@
settings.SEER_ANOMALY_DETECTION_URL,
timeout=settings.SEER_ANOMALY_DETECTION_TIMEOUT,
)
-NUM_DAYS = 28
+MIN_DAYS = 7
+
+
+class SeerMethod(StrEnum):
+ CREATE = "create"
+ UPDATE = "update"
def _get_start_and_end_indices(data: list[TimeSeriesPoint]) -> tuple[int, int]:
@@ -57,16 +65,113 @@ def _get_start_and_end_indices(data: list[TimeSeriesPoint]) -> tuple[int, int]:
return start, end
-def send_historical_data_to_seer(alert_rule: AlertRule, project: Project) -> AlertRuleStatus:
+def handle_send_historical_data_to_seer(
+ alert_rule: AlertRule,
+ snuba_query: SnubaQuery,
+ project: Project,
+ method: str,
+ event_types: list[SnubaQueryEventType.EventType] | None = None,
+):
+ event_types_param = event_types or snuba_query.event_types
+ try:
+ rule_status = send_historical_data_to_seer(
+ alert_rule=alert_rule,
+ project=project,
+ snuba_query=snuba_query,
+ event_types=event_types_param,
+ )
+ if rule_status == AlertRuleStatus.NOT_ENOUGH_DATA:
+ # if we don't have at least seven days worth of data, then the dynamic alert won't fire
+ alert_rule.update(status=AlertRuleStatus.NOT_ENOUGH_DATA.value)
+ elif (
+ rule_status == AlertRuleStatus.PENDING and alert_rule.status != AlertRuleStatus.PENDING
+ ):
+ alert_rule.update(status=AlertRuleStatus.PENDING.value)
+ except (TimeoutError, MaxRetryError):
+ raise TimeoutError(f"Failed to send data to Seer - cannot {method} alert rule.")
+ except ParseError:
+ raise ParseError("Failed to parse Seer store data response")
+ except (ValidationError, Exception):
+ raise ValidationError(f"Failed to send data to Seer - cannot {method} alert rule.")
+
+
+def send_new_rule_data(alert_rule: AlertRule, project: Project, snuba_query: SnubaQuery) -> None:
+ try:
+ handle_send_historical_data_to_seer(alert_rule, snuba_query, project, SeerMethod.CREATE)
+ except (TimeoutError, MaxRetryError, ParseError, ValidationError):
+ alert_rule.delete()
+ raise
+ else:
+ metrics.incr("anomaly_detection_alert.created")
+
+
+def update_rule_data(
+ alert_rule: AlertRule,
+ project: Project,
+ snuba_query: SnubaQuery,
+ updated_fields: dict[str, Any],
+ updated_query_fields: dict[str, Any],
+) -> None:
+ # if the rule previously wasn't a dynamic type but it is now, we need to send Seer data for the first time
+ # OR it's dynamic but the query or aggregate is changing so we need to update the data Seer has
+ if updated_fields.get("detection_type") == AlertRuleDetectionType.DYNAMIC and (
+ alert_rule.detection_type != AlertRuleDetectionType.DYNAMIC
+ or updated_query_fields.get("query")
+ or updated_query_fields.get("aggregate")
+ ):
+ # use setattr to avoid saving the rule until the Seer call has successfully finished,
+ # otherwise the rule would be in a bad state
+ for k, v in updated_fields.items():
+ setattr(alert_rule, k, v)
+
+ for k, v in updated_query_fields.items():
+ if k == "dataset":
+ v = v.value
+ elif k == "time_window":
+ time_window = updated_query_fields.get("time_window")
+ v = (
+ int(time_window.total_seconds())
+ if time_window is not None
+ else snuba_query.time_window
+ )
+ elif k == "event_types":
+ continue
+ setattr(alert_rule.snuba_query, k, v)
+
+ assert alert_rule.snuba_query
+ handle_send_historical_data_to_seer(
+ alert_rule,
+ alert_rule.snuba_query,
+ project,
+ SeerMethod.UPDATE,
+ updated_query_fields.get("event_types"),
+ )
+
+
+def send_historical_data_to_seer(
+ alert_rule: AlertRule,
+ project: Project,
+ snuba_query: SnubaQuery | None = None,
+ event_types: list[SnubaQueryEventType.EventType] | None = None,
+) -> AlertRuleStatus:
"""
Get 28 days of historical data and pass it to Seer to be used for prediction anomalies on the alert.
"""
- snuba_query = SnubaQuery.objects.get(id=alert_rule.snuba_query_id)
+ if not snuba_query:
+ snuba_query = SnubaQuery.objects.get(id=alert_rule.snuba_query_id)
window_min = int(snuba_query.time_window / 60)
- dataset = get_dataset(snuba_query.dataset)
- query_columns = get_query_columns([snuba_query.aggregate], snuba_query.time_window)
+ dataset = get_dataset_from_label(snuba_query.dataset)
+ query_columns = get_query_columns([snuba_query.aggregate], window_min)
+ event_types = get_event_types(snuba_query, event_types)
+ if not alert_rule.organization:
+ raise ValidationError("Alert rule doesn't belong to an organization")
+
historical_data = fetch_historical_data(
- alert_rule=alert_rule, snuba_query=snuba_query, query_columns=query_columns, project=project
+ organization=alert_rule.organization,
+ snuba_query=snuba_query,
+ query_columns=query_columns,
+ project=project,
+ event_types=event_types,
)
if not historical_data:
@@ -182,7 +287,6 @@ def send_historical_data_to_seer(alert_rule: AlertRule, project: Project) -> Ale
)
raise Exception(message)
- MIN_DAYS = 7
data_start_index, data_end_index = _get_start_and_end_indices(formatted_data)
if data_start_index == -1:
return AlertRuleStatus.NOT_ENOUGH_DATA
diff --git a/src/sentry/seer/anomaly_detection/utils.py b/src/sentry/seer/anomaly_detection/utils.py
index f30d5ad4e79d3..1070c06a05bde 100644
--- a/src/sentry/seer/anomaly_detection/utils.py
+++ b/src/sentry/seer/anomaly_detection/utils.py
@@ -3,25 +3,58 @@
from django.utils import timezone
from django.utils.datastructures import MultiValueDict
+from rest_framework.exceptions import ParseError
from sentry import release_health
from sentry.api.bases.organization_events import resolve_axis_column
from sentry.api.serializers.snuba import SnubaTSResultSerializer
-from sentry.incidents.models.alert_rule import AlertRule, AlertRuleThresholdType
+from sentry.incidents.models.alert_rule import AlertRuleThresholdType
from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.search.events.types import SnubaParams
-from sentry.seer.anomaly_detection.types import TimeSeriesPoint
+from sentry.seer.anomaly_detection.types import AnomalyType, TimeSeriesPoint
from sentry.snuba import metrics_performance
from sentry.snuba.metrics.extraction import MetricSpecType
from sentry.snuba.models import SnubaQuery, SnubaQueryEventType
from sentry.snuba.referrer import Referrer
from sentry.snuba.sessions_v2 import QueryDefinition
-from sentry.snuba.utils import get_dataset
+from sentry.snuba.utils import DATASET_OPTIONS, get_dataset
from sentry.utils.snuba import SnubaTSResult
NUM_DAYS = 28
+SNUBA_QUERY_EVENT_TYPE_TO_STRING = {
+ SnubaQueryEventType.EventType.ERROR: "error",
+ SnubaQueryEventType.EventType.DEFAULT: "default",
+ SnubaQueryEventType.EventType.TRANSACTION: "transaction",
+}
+
+
+def has_anomaly(anomaly: TimeSeriesPoint, label: str) -> bool:
+ """
+ Helper function to determine whether we care about an anomaly based on the
+ anomaly type and trigger type.
+
+ """
+ from sentry.incidents.logic import WARNING_TRIGGER_LABEL
+
+ anomaly_type = anomaly.get("anomaly", {}).get("anomaly_type")
+
+ if anomaly_type == AnomalyType.HIGH_CONFIDENCE.value or (
+ label == WARNING_TRIGGER_LABEL and anomaly_type == AnomalyType.LOW_CONFIDENCE.value
+ ):
+ return True
+ return False
+
+
+def anomaly_has_confidence(anomaly: TimeSeriesPoint) -> bool:
+ """
+ Helper function to determine whether we have the 7+ days of data necessary
+ to detect anomalies/send alerts for dynamic alert rules.
+ """
+ anomaly_type = anomaly.get("anomaly", {}).get("anomaly_type")
+ return anomaly_type != AnomalyType.NO_DATA.value
+
def translate_direction(direction: int) -> str:
"""
@@ -35,27 +68,32 @@ def translate_direction(direction: int) -> str:
return direction_map[AlertRuleThresholdType(direction)]
-def get_snuba_query_string(snuba_query: SnubaQuery) -> str:
+def get_event_types(
+ snuba_query: SnubaQuery, event_types: list[SnubaQueryEventType.EventType] | None = None
+) -> list[SnubaQueryEventType.EventType]:
+ if not event_types:
+ event_types = snuba_query.event_types or []
+ return event_types
+
+
+def get_snuba_query_string(
+ snuba_query: SnubaQuery, event_types: list[SnubaQueryEventType.EventType] | None = None
+) -> str:
"""
Generate a query string that matches what the OrganizationEventsStatsEndpoint does
"""
- SNUBA_QUERY_EVENT_TYPE_TO_STRING = {
- SnubaQueryEventType.EventType.ERROR: "error",
- SnubaQueryEventType.EventType.DEFAULT: "default",
- SnubaQueryEventType.EventType.TRANSACTION: "transaction",
- }
-
+ event_types = get_event_types(snuba_query, event_types)
if len(snuba_query.event_types) > 1:
- # e.g. (is:unresolved) AND (event.type:[error, default])
+ # e.g. '(is:unresolved) AND (event.type:[error, default])'
event_types_list = [
- SNUBA_QUERY_EVENT_TYPE_TO_STRING[event_type] for event_type in snuba_query.event_types
+ SNUBA_QUERY_EVENT_TYPE_TO_STRING[event_type] for event_type in event_types
]
event_types_string = "(event.type:["
event_types_string += ", ".join(event_types_list)
event_types_string += "])"
else:
- # e.g. (is:unresolved) AND (event.type:error)
- snuba_query_event_type_string = SNUBA_QUERY_EVENT_TYPE_TO_STRING[snuba_query.event_types[0]]
+ # e.g. '(is:unresolved) AND (event.type:error)'
+ snuba_query_event_type_string = SNUBA_QUERY_EVENT_TYPE_TO_STRING[event_types[0]]
event_types_string = f"(event.type:{snuba_query_event_type_string})"
if snuba_query.query:
snuba_query_string = f"({snuba_query.query}) AND {event_types_string}"
@@ -142,7 +180,8 @@ def format_snuba_ts_data(
count_data = data[1]
count = 0
if len(count_data):
- count = count_data[0].get("count", 0)
+ # there are sometimes None values from snuba
+ count = count_data[0].get("count", 0) or 0
ts_point = TimeSeriesPoint(timestamp=data[0], value=count)
formatted_data.append(ts_point)
return formatted_data
@@ -160,13 +199,27 @@ def format_historical_data(
return format_snuba_ts_data(data, query_columns, organization)
+def get_dataset_from_label(dataset_label: str):
+ if dataset_label == "events":
+ # DATASET_OPTIONS expects the name 'errors'
+ dataset_label = "errors"
+ elif dataset_label in ["generic_metrics", "transactions"]:
+ # XXX: performance alerts dataset differs locally vs in prod
+ dataset_label = "metricsEnhanced"
+ dataset = get_dataset(dataset_label)
+ if dataset is None:
+ raise ParseError(detail=f"dataset must be one of: {', '.join(DATASET_OPTIONS.keys())}")
+ return dataset
+
+
def fetch_historical_data(
- alert_rule: AlertRule,
+ organization: Organization,
snuba_query: SnubaQuery,
query_columns: list[str],
project: Project,
start: datetime | None = None,
end: datetime | None = None,
+ event_types: list[SnubaQueryEventType.EventType] | None = None,
) -> SnubaTSResult | None:
"""
Fetch 28 days of historical data from Snuba to pass to Seer to build the anomaly detection model
@@ -189,10 +242,10 @@ def fetch_historical_data(
dataset_label = "errors"
elif dataset_label in ["generic_metrics", "transactions"]:
# XXX: performance alerts dataset differs locally vs in prod
- dataset_label = "discover"
- dataset = get_dataset(dataset_label)
+ dataset_label = "metricsEnhanced"
+ dataset = get_dataset_from_label(dataset_label)
- if not project or not dataset or not alert_rule.organization:
+ if not project or not dataset or not organization:
return None
environments = []
@@ -200,7 +253,7 @@ def fetch_historical_data(
environments = [snuba_query.environment]
snuba_params = SnubaParams(
- organization=alert_rule.organization,
+ organization=organization,
projects=[project],
start=start,
end=end,
@@ -209,11 +262,10 @@ def fetch_historical_data(
)
if dataset == metrics_performance:
- return get_crash_free_historical_data(
- start, end, project, alert_rule.organization, granularity
- )
+ return get_crash_free_historical_data(start, end, project, organization, granularity)
else:
- snuba_query_string = get_snuba_query_string(snuba_query)
+ event_types = get_event_types(snuba_query, event_types)
+ snuba_query_string = get_snuba_query_string(snuba_query, event_types)
historical_data = dataset.timeseries_query(
selected_columns=query_columns,
query=snuba_query_string,
diff --git a/tests/sentry/api/validators/sentry_apps/__init__.py b/src/sentry/seer/services/__init__.py
similarity index 100%
rename from tests/sentry/api/validators/sentry_apps/__init__.py
rename to src/sentry/seer/services/__init__.py
diff --git a/tests/sentry/mediators/sentry_app_installations/__init__.py b/src/sentry/seer/services/test_generation/__init__.py
similarity index 100%
rename from tests/sentry/mediators/sentry_app_installations/__init__.py
rename to src/sentry/seer/services/test_generation/__init__.py
diff --git a/src/sentry/seer/services/test_generation/impl.py b/src/sentry/seer/services/test_generation/impl.py
new file mode 100644
index 0000000000000..36f0990527e4b
--- /dev/null
+++ b/src/sentry/seer/services/test_generation/impl.py
@@ -0,0 +1,38 @@
+import orjson
+import requests
+from django.conf import settings
+
+from sentry.seer.services.test_generation.model import CreateUnitTestResponse
+from sentry.seer.services.test_generation.service import TestGenerationService
+
+
+class RegionBackedTestGenerationService(TestGenerationService):
+ def start_unit_test_generation(
+ self, *, region_name: str, github_org: str, repo: str, pr_id: int, external_id: str
+ ) -> CreateUnitTestResponse:
+ url = f"{settings.SEER_AUTOFIX_URL}/v1/automation/codegen/unit-tests"
+ body = orjson.dumps(
+ {
+ "repo": {
+ "provider": "github",
+ "owner": github_org,
+ "name": repo,
+ "external_id": external_id,
+ },
+ "pr_id": pr_id,
+ },
+ option=orjson.OPT_NON_STR_KEYS,
+ )
+
+ response = requests.post(
+ url,
+ data=body,
+ headers={
+ "content-type": "application/json;charset=utf-8",
+ },
+ )
+
+ if response.status_code == 200:
+ return CreateUnitTestResponse()
+ else:
+ return CreateUnitTestResponse(error_detail=response.text)
diff --git a/src/sentry/seer/services/test_generation/model.py b/src/sentry/seer/services/test_generation/model.py
new file mode 100644
index 0000000000000..8d27341e8a41a
--- /dev/null
+++ b/src/sentry/seer/services/test_generation/model.py
@@ -0,0 +1,14 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+# from typing import Any, Dict, Optional
+from sentry.hybridcloud.rpc import RpcModel
+
+
+class CreateUnitTestResponse(RpcModel):
+ error_detail: str | None = None
+
+ @property
+ def success(self):
+ return self.error_detail is None
diff --git a/src/sentry/seer/services/test_generation/service.py b/src/sentry/seer/services/test_generation/service.py
new file mode 100644
index 0000000000000..56027efae2bda
--- /dev/null
+++ b/src/sentry/seer/services/test_generation/service.py
@@ -0,0 +1,36 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+import abc
+
+from sentry.hybridcloud.rpc.resolvers import ByRegionName
+from sentry.hybridcloud.rpc.service import RpcService, regional_rpc_method
+from sentry.seer.services.test_generation.model import CreateUnitTestResponse
+from sentry.silo.base import SiloMode
+
+
+class TestGenerationService(RpcService):
+ """
+ Used in github webhooks to call regional seer for copilot requests.
+ """
+
+ key = "test_generation"
+ local_mode = SiloMode.REGION
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.seer.services.test_generation.impl import RegionBackedTestGenerationService
+
+ return RegionBackedTestGenerationService()
+
+ @regional_rpc_method(resolve=ByRegionName())
+ @abc.abstractmethod
+ def start_unit_test_generation(
+ self, *, region_name: str, github_org: str, repo: str, pr_id: int, external_id: str
+ ) -> CreateUnitTestResponse:
+ pass
+
+
+test_generation_service = TestGenerationService.create_delegation()
diff --git a/src/sentry/seer/similarity/grouping_records.py b/src/sentry/seer/similarity/grouping_records.py
index 19eb0ef0af94e..daea9b680801d 100644
--- a/src/sentry/seer/similarity/grouping_records.py
+++ b/src/sentry/seer/similarity/grouping_records.py
@@ -24,7 +24,6 @@ class CreateGroupingRecordData(TypedDict):
group_id: int
hash: str
project_id: int
- message: str
exception_type: str | None
diff --git a/src/sentry/seer/similarity/utils.py b/src/sentry/seer/similarity/utils.py
index 9ef635e1492d8..0eb63ede964a4 100644
--- a/src/sentry/seer/similarity/utils.py
+++ b/src/sentry/seer/similarity/utils.py
@@ -193,7 +193,7 @@ def get_stacktrace_string(data: dict[str, Any]) -> str:
if frame_dict["filename"].startswith(base64_prefix):
metrics.incr(
"seer.grouping.base64_encoded_filename",
- sample_rate=1.0,
+ sample_rate=options.get("seer.similarity.metrics_sample_rate"),
)
base64_encoded = True
break
diff --git a/src/sentry/mediators/alert_rule_actions/creator.py b/src/sentry/sentry_apps/alert_rule_action_creator.py
similarity index 55%
rename from src/sentry/mediators/alert_rule_actions/creator.py
rename to src/sentry/sentry_apps/alert_rule_action_creator.py
index 732bd883dce7e..cd7e1632d297e 100644
--- a/src/sentry/mediators/alert_rule_actions/creator.py
+++ b/src/sentry/sentry_apps/alert_rule_action_creator.py
@@ -1,26 +1,29 @@
-from django.db import router
+from collections.abc import Mapping
+from dataclasses import dataclass, field
+from typing import Any
+
+from django.db import router, transaction
from django.utils.functional import cached_property
from sentry.coreapi import APIError
-from sentry.mediators.external_requests.alert_rule_action_requester import (
+from sentry.sentry_apps.external_requests.alert_rule_action_requester import (
AlertRuleActionRequester,
AlertRuleActionResult,
)
-from sentry.mediators.mediator import Mediator
-from sentry.mediators.param import Param
from sentry.sentry_apps.models.sentry_app_component import SentryAppComponent
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
-class AlertRuleActionCreator(Mediator):
- using = router.db_for_write(SentryAppComponent)
- install = Param(SentryAppInstallation)
- fields = Param(object, default=[]) # array of dicts
+@dataclass
+class AlertRuleActionCreator:
+ install: SentryAppInstallation
+ fields: list[Mapping[str, Any]] = field(default_factory=list)
- def call(self) -> AlertRuleActionResult:
- uri = self._fetch_sentry_app_uri()
- self._make_external_request(uri)
- return self.response
+ def run(self) -> AlertRuleActionResult:
+ with transaction.atomic(router.db_for_write(SentryAppComponent)):
+ uri = self._fetch_sentry_app_uri()
+ response = self._make_external_request(uri)
+ return response
def _fetch_sentry_app_uri(self):
component = SentryAppComponent.objects.get(
@@ -32,12 +35,13 @@ def _fetch_sentry_app_uri(self):
def _make_external_request(self, uri=None):
if uri is None:
raise APIError("Sentry App request url not found")
-
- self.response = AlertRuleActionRequester.run(
+ response = AlertRuleActionRequester(
install=self.install,
uri=uri,
fields=self.fields,
- )
+ ).run()
+
+ return response
@cached_property
def sentry_app(self):
diff --git a/src/sentry/sentry_apps/api/bases/sentryapps.py b/src/sentry/sentry_apps/api/bases/sentryapps.py
new file mode 100644
index 0000000000000..1ce641f943707
--- /dev/null
+++ b/src/sentry/sentry_apps/api/bases/sentryapps.py
@@ -0,0 +1,512 @@
+from __future__ import annotations
+
+import logging
+from collections.abc import Sequence
+from functools import wraps
+from typing import Any
+
+from django.http import Http404
+from rest_framework.exceptions import PermissionDenied
+from rest_framework.permissions import BasePermission
+from rest_framework.request import Request
+from rest_framework.response import Response
+from rest_framework.serializers import ValidationError
+
+from sentry.api.authentication import ClientIdSecretAuthentication
+from sentry.api.base import Endpoint
+from sentry.api.permissions import SentryPermission, StaffPermissionMixin
+from sentry.auth.staff import is_active_staff
+from sentry.auth.superuser import is_active_superuser, superuser_has_permission
+from sentry.coreapi import APIError
+from sentry.integrations.api.bases.integration import PARANOID_GET
+from sentry.middleware.stats import add_request_metric_tags
+from sentry.models.organization import OrganizationStatus
+from sentry.organizations.services.organization import (
+ RpcUserOrganizationContext,
+ organization_service,
+)
+from sentry.sentry_apps.models.sentry_app import SentryApp
+from sentry.sentry_apps.services.app import RpcSentryApp, app_service
+from sentry.users.models.user import User
+from sentry.users.services.user import RpcUser
+from sentry.users.services.user.service import user_service
+from sentry.utils.sdk import Scope
+from sentry.utils.strings import to_single_line_str
+
+COMPONENT_TYPES = ["stacktrace-link", "issue-link"]
+
+logger = logging.getLogger(__name__)
+
+
+def catch_raised_errors(func):
+ @wraps(func)
+ def wrapped(self, *args, **kwargs):
+ try:
+ return func(self, *args, **kwargs)
+ except APIError as e:
+ return Response({"detail": e.msg}, status=400)
+
+ return wrapped
+
+
+def ensure_scoped_permission(request: Request, allowed_scopes: Sequence[str] | None) -> bool:
+ """
+ Verifies the User making the request has at least one required scope for
+ the endpoint being requested.
+
+ If no scopes were specified in a ``scope_map``, it means the endpoint should
+ not be accessible. That is, this function expects every accessible endpoint
+ to have a list of scopes.
+
+ That list of scopes may be empty, implying that the User does not need any
+ specific scope and the endpoint is public.
+ """
+ # If no scopes were found at all, the endpoint should not be accessible.
+ if allowed_scopes is None:
+ return False
+
+ # If there are no scopes listed, it implies a public endpoint.
+ if len(allowed_scopes) == 0:
+ return True
+
+ return any(request.access.has_scope(s) for s in set(allowed_scopes))
+
+
+def add_integration_platform_metric_tag(func):
+ @wraps(func)
+ def wrapped(self, *args, **kwargs):
+ add_request_metric_tags(self.request, integration_platform=True)
+ return func(self, *args, **kwargs)
+
+ return wrapped
+
+
+class SentryAppsPermission(SentryPermission):
+ scope_map = {
+ "GET": PARANOID_GET,
+ "POST": ("org:write", "org:admin"),
+ }
+
+ def has_object_permission(self, request: Request, view, context: RpcUserOrganizationContext):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ self.determine_access(request, context)
+
+ if superuser_has_permission(request):
+ return True
+
+ # User must be a part of the Org they're trying to create the app in.
+ if context.organization.status != OrganizationStatus.ACTIVE or not context.member:
+ raise Http404
+
+ assert request.method, "method must be present in request to get permissions"
+ return ensure_scoped_permission(request, self.scope_map.get(request.method))
+
+
+class SentryAppsAndStaffPermission(StaffPermissionMixin, SentryAppsPermission):
+ """Allows staff to access the GET method of sentry apps endpoints."""
+
+ staff_allowed_methods = {"GET"}
+
+
+class IntegrationPlatformEndpoint(Endpoint):
+ def dispatch(self, request, *args, **kwargs):
+ add_request_metric_tags(request, integration_platform=True)
+ return super().dispatch(request, *args, **kwargs)
+
+
+class SentryAppsBaseEndpoint(IntegrationPlatformEndpoint):
+ permission_classes: tuple[type[BasePermission], ...] = (SentryAppsAndStaffPermission,)
+
+ def _get_organization_slug(self, request: Request):
+ organization_slug = request.json_body.get("organization")
+ if not organization_slug or not isinstance(organization_slug, str):
+ error_message = "Please provide a valid value for the 'organization' field."
+ raise ValidationError({"organization": to_single_line_str(error_message)})
+ return organization_slug
+
+ def _get_organization_for_superuser_or_staff(
+ self, user: RpcUser | User, organization_slug: str
+ ) -> RpcUserOrganizationContext:
+ context = organization_service.get_organization_by_slug(
+ slug=organization_slug, only_visible=False, user_id=user.id
+ )
+
+ if context is None:
+ error_message = f"Organization '{organization_slug}' does not exist."
+ raise ValidationError({"organization": to_single_line_str(error_message)})
+
+ return context
+
+ def _get_organization_for_user(
+ self, user: RpcUser | User, organization_slug: str
+ ) -> RpcUserOrganizationContext:
+ context = organization_service.get_organization_by_slug(
+ slug=organization_slug, only_visible=True, user_id=user.id
+ )
+ if context is None or context.member is None:
+ error_message = f"User does not belong to the '{organization_slug}' organization."
+ raise PermissionDenied(to_single_line_str(error_message))
+ return context
+
+ def _get_org_context(self, request: Request) -> RpcUserOrganizationContext:
+ organization_slug = self._get_organization_slug(request)
+ assert request.user.is_authenticated, "User must be authenticated to get organization"
+
+ if is_active_superuser(request) or is_active_staff(request):
+ return self._get_organization_for_superuser_or_staff(request.user, organization_slug)
+ else:
+ return self._get_organization_for_user(request.user, organization_slug)
+
+ def convert_args(self, request: Request, *args, **kwargs):
+ """
+ This baseclass is the SentryApp collection endpoints:
+
+ [GET, POST] /sentry-apps
+
+ The GET endpoint is public and doesn't require (or handle) any query
+ params or request body.
+
+ The POST endpoint is for creating a Sentry App. Part of that creation
+ is associating it with the Organization that it's created within.
+
+ So in the case of POST requests, we want to pull the Organization out
+ of the request body so that we can ensure the User making the request
+ has access to it.
+
+ Since ``convert_args`` is conventionally where you materialize model
+ objects from URI params, we're applying the same logic for a param in
+ the request body.
+ """
+ if not request.json_body:
+ return (args, kwargs)
+
+ context = self._get_org_context(request)
+ self.check_object_permissions(request, context)
+ kwargs["organization"] = context.organization
+
+ return (args, kwargs)
+
+
+class SentryAppPermission(SentryPermission):
+ unpublished_scope_map = {
+ "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
+ "PUT": ("org:write", "org:admin"),
+ "POST": ("org:admin",), # used for publishing an app
+ "DELETE": ("org:admin",),
+ }
+
+ published_scope_map = {
+ "GET": PARANOID_GET,
+ "PUT": ("org:write", "org:admin"),
+ "POST": ("org:admin",),
+ "DELETE": ("org:admin",),
+ }
+
+ @property
+ def scope_map(self):
+ return self.published_scope_map
+
+ def has_object_permission(self, request: Request, view, sentry_app: RpcSentryApp | SentryApp):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ owner_app = organization_service.get_organization_by_id(
+ id=sentry_app.owner_id, user_id=request.user.id
+ )
+ assert owner_app, f"owner organization for {sentry_app.name} was not found"
+ self.determine_access(request, owner_app)
+
+ if superuser_has_permission(request):
+ return True
+
+ organizations = (
+ user_service.get_organizations(user_id=request.user.id)
+ if request.user.id is not None
+ else ()
+ )
+ # if app is unpublished, user must be in the Org who owns the app.
+ if not sentry_app.is_published:
+ if not any(sentry_app.owner_id == org.id for org in organizations):
+ raise Http404
+
+ # TODO(meredith): make a better way to allow for public
+ # endpoints. we can't use ensure_scoped_permission now
+ # that the public endpoint isn't denoted by '()'
+ if sentry_app.is_published and request.method == "GET":
+ return True
+
+ return ensure_scoped_permission(
+ request, self._scopes_for_sentry_app(sentry_app).get(request.method)
+ )
+
+ def _scopes_for_sentry_app(self, sentry_app):
+ if sentry_app.is_published:
+ return self.published_scope_map
+ else:
+ return self.unpublished_scope_map
+
+
+class SentryAppAndStaffPermission(StaffPermissionMixin, SentryAppPermission):
+ """Allows staff to access sentry app endpoints. Note that this is used for
+ endpoints acting on a single sentry app only."""
+
+ pass
+
+
+class SentryAppBaseEndpoint(IntegrationPlatformEndpoint):
+ permission_classes: tuple[type[BasePermission], ...] = (SentryAppPermission,)
+
+ def convert_args(
+ self, request: Request, sentry_app_id_or_slug: int | str, *args: Any, **kwargs: Any
+ ):
+ try:
+ sentry_app = SentryApp.objects.get(slug__id_or_slug=sentry_app_id_or_slug)
+ except SentryApp.DoesNotExist:
+ raise Http404
+
+ self.check_object_permissions(request, sentry_app)
+
+ Scope.get_isolation_scope().set_tag("sentry_app", sentry_app.slug)
+
+ kwargs["sentry_app"] = sentry_app
+ return (args, kwargs)
+
+
+class RegionSentryAppBaseEndpoint(IntegrationPlatformEndpoint):
+ def convert_args(
+ self, request: Request, sentry_app_id_or_slug: int | str, *args: Any, **kwargs: Any
+ ):
+ if str(sentry_app_id_or_slug).isdecimal():
+ sentry_app = app_service.get_sentry_app_by_id(id=int(sentry_app_id_or_slug))
+ else:
+ sentry_app = app_service.get_sentry_app_by_slug(slug=sentry_app_id_or_slug)
+ if sentry_app is None:
+ raise Http404
+
+ self.check_object_permissions(request, sentry_app)
+
+ Scope.get_isolation_scope().set_tag("sentry_app", sentry_app.slug)
+
+ kwargs["sentry_app"] = sentry_app
+ return (args, kwargs)
+
+
+class SentryAppInstallationsPermission(SentryPermission):
+ scope_map = {
+ "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
+ "POST": ("org:integrations", "org:write", "org:admin"),
+ }
+
+ def has_object_permission(self, request: Request, view, organization):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ self.determine_access(request, organization)
+
+ if superuser_has_permission(request):
+ return True
+
+ organizations = (
+ user_service.get_organizations(user_id=request.user.id)
+ if request.user.id is not None
+ else ()
+ )
+ if not any(organization.id == org.id for org in organizations):
+ raise Http404
+
+ assert request.method, "method must be present in request to get permissions"
+ return ensure_scoped_permission(request, self.scope_map.get(request.method))
+
+
+class SentryAppInstallationsBaseEndpoint(IntegrationPlatformEndpoint):
+ permission_classes = (SentryAppInstallationsPermission,)
+
+ def convert_args(self, request: Request, organization_id_or_slug, *args, **kwargs):
+ extra_args = {}
+ # We need to pass user_id if the user is not a superuser
+ if not is_active_superuser(request):
+ extra_args["user_id"] = request.user.id
+
+ if str(organization_id_or_slug).isdecimal():
+ organization = organization_service.get_org_by_id(
+ id=int(organization_id_or_slug), **extra_args
+ )
+ else:
+ organization = organization_service.get_org_by_slug(
+ slug=str(organization_id_or_slug), **extra_args
+ )
+
+ if organization is None:
+ raise Http404
+ self.check_object_permissions(request, organization)
+
+ kwargs["organization"] = organization
+ return (args, kwargs)
+
+
+class SentryAppInstallationPermission(SentryPermission):
+ scope_map = {
+ "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
+ "DELETE": ("org:integrations", "org:write", "org:admin"),
+ # NOTE(mn): The only POST endpoint right now is to create External
+ # Issues, which uses this baseclass since it's nested under an
+ # installation.
+ #
+ # The scopes below really only make sense for that endpoint. Any other
+ # nested endpoints will probably need different scopes - figure out how
+ # to deal with that when it happens.
+ "POST": ("org:integrations", "event:write", "event:admin"),
+ }
+
+ def has_permission(self, request: Request, *args, **kwargs):
+ # To let the app mark the installation as installed, we don't care about permissions
+ if (
+ hasattr(request, "user")
+ and hasattr(request.user, "is_sentry_app")
+ and request.user.is_sentry_app
+ and request.method == "PUT"
+ ):
+ return True
+ return super().has_permission(request, *args, **kwargs)
+
+ def has_object_permission(self, request: Request, view, installation):
+ if not hasattr(request, "user") or not request.user or not request.user.is_authenticated:
+ return False
+
+ self.determine_access(request, installation.organization_id)
+
+ if superuser_has_permission(request):
+ return True
+
+ # if user is an app, make sure it's for that same app
+ if request.user.is_sentry_app:
+ return request.user.id == installation.sentry_app.proxy_user_id
+
+ org_context = organization_service.get_organization_by_id(
+ id=installation.organization_id,
+ user_id=request.user.id,
+ include_teams=False,
+ include_projects=False,
+ )
+ if (
+ not org_context
+ or not org_context.member
+ or org_context.organization.status != OrganizationStatus.ACTIVE
+ ):
+ raise Http404
+
+ assert request.method, "method must be present in request to get permissions"
+ return ensure_scoped_permission(request, self.scope_map.get(request.method))
+
+
+class SentryAppInstallationBaseEndpoint(IntegrationPlatformEndpoint):
+ permission_classes: tuple[type[BasePermission], ...] = (SentryAppInstallationPermission,)
+
+ def convert_args(self, request: Request, uuid, *args, **kwargs):
+ installations = app_service.get_many(filter=dict(uuids=[uuid]))
+ installation = installations[0] if installations else None
+ if installation is None:
+ raise Http404
+
+ self.check_object_permissions(request, installation)
+
+ Scope.get_isolation_scope().set_tag("sentry_app_installation", installation.uuid)
+
+ kwargs["installation"] = installation
+ return (args, kwargs)
+
+
+class SentryAppInstallationExternalIssuePermission(SentryAppInstallationPermission):
+ scope_map = {
+ "POST": ("event:read", "event:write", "event:admin"),
+ "DELETE": ("event:admin",),
+ }
+
+
+class SentryAppInstallationExternalIssueBaseEndpoint(SentryAppInstallationBaseEndpoint):
+ permission_classes = (SentryAppInstallationExternalIssuePermission,)
+
+
+class SentryAppAuthorizationsPermission(SentryPermission):
+ def has_object_permission(self, request: Request, view, installation):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ installation_org_context = organization_service.get_organization_by_id(
+ id=installation.organization_id, user_id=request.user.id
+ )
+ assert installation_org_context, "organization for installation was not found"
+ self.determine_access(request, installation_org_context)
+
+ if not request.user.is_authenticated or not request.user.is_sentry_app:
+ return False
+
+ # Request must be made as the app's Proxy User, using their Client ID
+ # and Secret.
+ return request.user.id == installation.sentry_app.proxy_user_id
+
+
+class SentryAppAuthorizationsBaseEndpoint(SentryAppInstallationBaseEndpoint):
+ authentication_classes = (ClientIdSecretAuthentication,)
+ permission_classes = (SentryAppAuthorizationsPermission,)
+
+
+class SentryInternalAppTokenPermission(SentryPermission):
+ scope_map = {
+ "GET": ("org:write", "org:admin"),
+ "POST": ("org:write", "org:admin"),
+ "DELETE": ("org:write", "org:admin"),
+ }
+
+ def has_object_permission(self, request: Request, view, sentry_app):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ owner_app = organization_service.get_organization_by_id(
+ id=sentry_app.owner_id, user_id=request.user.id
+ )
+
+ assert owner_app, "Failed to get organization/owner_app to check in has_object_permission"
+ self.determine_access(request, owner_app)
+
+ if superuser_has_permission(request):
+ return True
+
+ assert request.method, "method must be present in request to get permissions"
+ return ensure_scoped_permission(request, self.scope_map.get(request.method))
+
+
+class SentryAppStatsPermission(SentryPermission):
+ scope_map = {
+ "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
+ # Anyone logged in can increment the stats, so leave the scopes empty
+ # Note: this only works for session-based auth so you cannot increment stats through API
+ "POST": (),
+ }
+
+ def has_object_permission(self, request: Request, view, sentry_app: SentryApp | RpcSentryApp):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ owner_app = organization_service.get_organization_by_id(
+ id=sentry_app.owner_id, user_id=request.user.id
+ )
+ if owner_app is None:
+ logger.error(
+ "sentry_app_stats.permission_org_not_found",
+ extra={
+ "sentry_app_id": sentry_app.id,
+ "owner_org_id": sentry_app.owner_id,
+ "user_id": request.user.id,
+ },
+ )
+ return False
+ self.determine_access(request, owner_app)
+
+ if is_active_superuser(request):
+ return True
+
+ assert request.method, "method must be present in request to get permissions"
+ return ensure_scoped_permission(request, self.scope_map.get(request.method))
diff --git a/src/sentry/sentry_apps/api/endpoints/installation_details.py b/src/sentry/sentry_apps/api/endpoints/installation_details.py
index 8d124ffb2814a..7e7fb50bce6f6 100644
--- a/src/sentry/sentry_apps/api/endpoints/installation_details.py
+++ b/src/sentry/sentry_apps/api/endpoints/installation_details.py
@@ -8,11 +8,16 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppInstallationBaseEndpoint
from sentry.api.serializers import serialize
-from sentry.api.serializers.rest_framework import SentryAppInstallationSerializer
-from sentry.mediators.sentry_app_installations.installation_notifier import InstallationNotifier
-from sentry.mediators.sentry_app_installations.updater import Updater
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppInstallationBaseEndpoint
+from sentry.sentry_apps.api.parsers.sentry_app_installation import SentryAppInstallationParser
+from sentry.sentry_apps.api.serializers.sentry_app_installation import (
+ SentryAppInstallationSerializer,
+)
+from sentry.sentry_apps.installations import (
+ SentryAppInstallationNotifier,
+ SentryAppInstallationUpdater,
+)
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
from sentry.utils.audit import create_audit_entry
@@ -28,44 +33,59 @@ class SentryAppInstallationDetailsEndpoint(SentryAppInstallationBaseEndpoint):
def get(self, request: Request, installation) -> Response:
return Response(
- serialize(SentryAppInstallation.objects.get(id=installation.id), access=request.access)
+ serialize(
+ objects=SentryAppInstallation.objects.get(id=installation.id),
+ access=request.access,
+ serializer=SentryAppInstallationSerializer(),
+ )
)
def delete(self, request: Request, installation) -> Response:
- installation = SentryAppInstallation.objects.get(id=installation.id)
+ sentry_app_installation = SentryAppInstallation.objects.get(id=installation.id)
with transaction.atomic(using=router.db_for_write(SentryAppInstallation)):
try:
- InstallationNotifier.run(install=installation, user=request.user, action="deleted")
+ assert (
+ request.user.is_authenticated
+ ), "User must be authenticated to delete installation"
+ SentryAppInstallationNotifier(
+ sentry_app_installation=sentry_app_installation,
+ user=request.user,
+ action="deleted",
+ ).run()
# if the error is from a request exception, log the error and continue
except RequestException as exc:
sentry_sdk.capture_exception(exc)
- deletions.exec_sync(installation)
+ deletions.exec_sync(sentry_app_installation)
create_audit_entry(
request=request,
- organization_id=installation.organization_id,
- target_object=installation.organization_id,
+ organization_id=sentry_app_installation.organization_id,
+ target_object=sentry_app_installation.organization_id,
event=audit_log.get_event_id("SENTRY_APP_UNINSTALL"),
- data={"sentry_app": installation.sentry_app.name},
+ data={"sentry_app": sentry_app_installation.sentry_app.name},
)
analytics.record(
"sentry_app.uninstalled",
user_id=request.user.id,
- organization_id=installation.organization_id,
- sentry_app=installation.sentry_app.slug,
+ organization_id=sentry_app_installation.organization_id,
+ sentry_app=sentry_app_installation.sentry_app.slug,
)
return Response(status=204)
def put(self, request: Request, installation) -> Response:
- serializer = SentryAppInstallationSerializer(installation, data=request.data, partial=True)
+ serializer = SentryAppInstallationParser(installation, data=request.data, partial=True)
if serializer.is_valid():
result = serializer.validated_data
- Updater.run(
- user=request.user, sentry_app_installation=installation, status=result.get("status")
- )
+ SentryAppInstallationUpdater(
+ sentry_app_installation=installation, status=result.get("status")
+ ).run()
return Response(
- serialize(SentryAppInstallation.objects.get(id=installation.id), request.user)
+ serialize(
+ objects=SentryAppInstallation.objects.get(id=installation.id),
+ user=request.user,
+ serializer=SentryAppInstallationSerializer(),
+ )
)
return Response(serializer.errors, status=400)
diff --git a/src/sentry/sentry_apps/api/endpoints/installation_external_issue_actions.py b/src/sentry/sentry_apps/api/endpoints/installation_external_issue_actions.py
index 1bd6397d2f0b0..068fea248ed36 100644
--- a/src/sentry/sentry_apps/api/endpoints/installation_external_issue_actions.py
+++ b/src/sentry/sentry_apps/api/endpoints/installation_external_issue_actions.py
@@ -5,11 +5,14 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.api.bases import SentryAppInstallationBaseEndpoint
from sentry.api.serializers import serialize
-from sentry.mediators.external_issues.issue_link_creator import IssueLinkCreator
from sentry.models.group import Group
from sentry.models.project import Project
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppInstallationBaseEndpoint
+from sentry.sentry_apps.api.serializers.platform_external_issue import (
+ PlatformExternalIssueSerializer,
+)
+from sentry.sentry_apps.external_issues.issue_link_creator import IssueLinkCreator
from sentry.users.models.user import User
from sentry.users.services.user.serial import serialize_rpc_user
@@ -60,15 +63,17 @@ def post(self, request: Request, installation) -> Response:
if isinstance(user, User):
user = serialize_rpc_user(user)
- external_issue = IssueLinkCreator.run(
+ external_issue = IssueLinkCreator(
install=installation,
group=group,
action=action,
fields=data,
uri=uri,
user=user,
- )
+ ).run()
except Exception:
return Response({"error": "Error communicating with Sentry App service"}, status=400)
- return Response(serialize(external_issue))
+ return Response(
+ serialize(objects=external_issue, serializer=PlatformExternalIssueSerializer())
+ )
diff --git a/src/sentry/sentry_apps/api/endpoints/installation_external_issue_details.py b/src/sentry/sentry_apps/api/endpoints/installation_external_issue_details.py
index 3197a29ad231d..3bbd636b84623 100644
--- a/src/sentry/sentry_apps/api/endpoints/installation_external_issue_details.py
+++ b/src/sentry/sentry_apps/api/endpoints/installation_external_issue_details.py
@@ -4,10 +4,10 @@
from sentry import deletions
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.api.bases import (
+from sentry.sentry_apps.api.bases.sentryapps import (
SentryAppInstallationExternalIssueBaseEndpoint as ExternalIssueBaseEndpoint,
)
-from sentry.models.platformexternalissue import PlatformExternalIssue
+from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
@region_silo_endpoint
diff --git a/src/sentry/sentry_apps/api/endpoints/installation_external_issues.py b/src/sentry/sentry_apps/api/endpoints/installation_external_issues.py
index 56d0c0c39e4fb..bd34f4ac9f95f 100644
--- a/src/sentry/sentry_apps/api/endpoints/installation_external_issues.py
+++ b/src/sentry/sentry_apps/api/endpoints/installation_external_issues.py
@@ -5,14 +5,17 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.api.bases import (
- SentryAppInstallationExternalIssueBaseEndpoint as ExternalIssueBaseEndpoint,
-)
from sentry.api.serializers import serialize
-from sentry.api.serializers.rest_framework import URLField
-from sentry.mediators.external_issues.creator import Creator
from sentry.models.group import Group
from sentry.models.project import Project
+from sentry.sentry_apps.api.bases.sentryapps import (
+ SentryAppInstallationExternalIssueBaseEndpoint as ExternalIssueBaseEndpoint,
+)
+from sentry.sentry_apps.api.parsers.sentry_app import URLField
+from sentry.sentry_apps.api.serializers.platform_external_issue import (
+ PlatformExternalIssueSerializer as ResponsePlatformExternalIssueSerializer,
+)
+from sentry.sentry_apps.external_issues.external_issue_creator import ExternalIssueCreator
class PlatformExternalIssueSerializer(serializers.Serializer):
@@ -41,13 +44,17 @@ def post(self, request: Request, installation) -> Response:
serializer = PlatformExternalIssueSerializer(data=request.data)
if serializer.is_valid():
- external_issue = Creator.run(
+ external_issue = ExternalIssueCreator(
install=installation,
group=group,
web_url=data["webUrl"],
project=data["project"],
identifier=data["identifier"],
+ ).run()
+ return Response(
+ serialize(
+ objects=external_issue, serializer=ResponsePlatformExternalIssueSerializer()
+ )
)
- return Response(serialize(external_issue))
return Response(serializer.errors, status=400)
diff --git a/src/sentry/sentry_apps/api/endpoints/installation_external_requests.py b/src/sentry/sentry_apps/api/endpoints/installation_external_requests.py
index c064ca9f8c178..0565b1b592aec 100644
--- a/src/sentry/sentry_apps/api/endpoints/installation_external_requests.py
+++ b/src/sentry/sentry_apps/api/endpoints/installation_external_requests.py
@@ -4,9 +4,9 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.api.bases import SentryAppInstallationBaseEndpoint
-from sentry.mediators.external_requests.select_requester import SelectRequester
from sentry.models.project import Project
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppInstallationBaseEndpoint
+from sentry.sentry_apps.external_requests.select_requester import SelectRequester
@region_silo_endpoint
@@ -35,7 +35,7 @@ def get(self, request: Request, installation) -> Response:
kwargs.update({"project_slug": project.slug})
try:
- choices = SelectRequester.run(**kwargs)
+ choices = SelectRequester(**kwargs).run()
except Exception:
return Response({"error": "Error communicating with Sentry App service"}, status=400)
diff --git a/src/sentry/sentry_apps/api/endpoints/organization_sentry_apps.py b/src/sentry/sentry_apps/api/endpoints/organization_sentry_apps.py
index 1c89fd73ee2b3..13d6e9017fcc7 100644
--- a/src/sentry/sentry_apps/api/endpoints/organization_sentry_apps.py
+++ b/src/sentry/sentry_apps/api/endpoints/organization_sentry_apps.py
@@ -4,13 +4,16 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import add_integration_platform_metric_tag
from sentry.api.bases.organization import ControlSiloOrganizationEndpoint
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
from sentry.constants import SentryAppStatus
from sentry.organizations.services.organization import RpcOrganization
from sentry.organizations.services.organization.model import RpcUserOrganizationContext
+from sentry.sentry_apps.api.bases.sentryapps import add_integration_platform_metric_tag
+from sentry.sentry_apps.api.serializers.sentry_app import (
+ SentryAppSerializer as ResponseSentryAppSerializer,
+)
from sentry.sentry_apps.models.sentry_app import SentryApp
@@ -39,5 +42,7 @@ def get(
queryset=queryset,
order_by="-date_added",
paginator_cls=OffsetPaginator,
- on_results=lambda x: serialize(x, request.user, access=request.access),
+ on_results=lambda x: serialize(
+ x, request.user, access=request.access, serializer=ResponseSentryAppSerializer()
+ ),
)
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py
index af2bf55457f3c..d705b2bd5a71e 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py
@@ -7,13 +7,13 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppAuthorizationsBaseEndpoint
from sentry.api.serializers.models.apitoken import ApiTokenSerializer
from sentry.auth.services.auth.impl import promote_request_api_user
from sentry.coreapi import APIUnauthorized
-from sentry.mediators.token_exchange.grant_exchanger import GrantExchanger
from sentry.mediators.token_exchange.refresher import Refresher
from sentry.mediators.token_exchange.util import GrantTypes
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppAuthorizationsBaseEndpoint
+from sentry.sentry_apps.token_exchange.grant_exchanger import GrantExchanger
logger = logging.getLogger(__name__)
@@ -34,12 +34,12 @@ def post(self, request: Request, installation) -> Response:
try:
if request.json_body.get("grant_type") == GrantTypes.AUTHORIZATION:
- token = GrantExchanger.run(
+ token = GrantExchanger(
install=installation,
code=request.json_body.get("code"),
client_id=request.json_body.get("client_id"),
user=promote_request_api_user(request),
- )
+ ).run()
elif request.json_body.get("grant_type") == GrantTypes.REFRESH:
token = Refresher.run(
install=installation,
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py
index 321d0552f0bd7..71894dc118204 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py
@@ -4,10 +4,11 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppBaseEndpoint
from sentry.api.bases.avatar import AvatarMixin
-from sentry.api.serializers.rest_framework.sentry_app import SentryAppAvatarSerializer
from sentry.models.avatars.sentry_app_avatar import SentryAppAvatar
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppBaseEndpoint
+from sentry.sentry_apps.api.parsers.sentry_app_avatar import SentryAppAvatarParser
+from sentry.sentry_apps.api.serializers.sentry_app import SentryAppSerializer
@control_silo_endpoint
@@ -19,13 +20,17 @@ class SentryAppAvatarEndpoint(AvatarMixin[SentryAppAvatar], SentryAppBaseEndpoin
}
object_type = "sentry_app"
model = SentryAppAvatar
- serializer_cls = SentryAppAvatarSerializer
+ serializer_cls = SentryAppAvatarParser
def get(self, request: Request, **kwargs) -> Response:
- return super().get(request, access=request.access, **kwargs)
+ return super().get(
+ request, access=request.access, serializer=SentryAppSerializer(), **kwargs
+ )
def put(self, request: Request, **kwargs) -> Response:
- return super().put(request, access=request.access, **kwargs)
+ return super().put(
+ request, access=request.access, serializer=SentryAppSerializer(), **kwargs
+ )
def get_avatar_filename(self, obj):
return f"{obj.slug}.png"
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py
index c32e752689f58..e32767df4a994 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py
@@ -5,7 +5,6 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppBaseEndpoint, add_integration_platform_metric_tag
from sentry.api.bases.organization import ControlSiloOrganizationEndpoint
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
@@ -14,6 +13,11 @@
RpcOrganization,
RpcUserOrganizationContext,
)
+from sentry.sentry_apps.api.bases.sentryapps import (
+ SentryAppBaseEndpoint,
+ add_integration_platform_metric_tag,
+)
+from sentry.sentry_apps.api.serializers.sentry_app_component import SentryAppComponentSerializer
from sentry.sentry_apps.components import SentryAppComponentPreparer
from sentry.sentry_apps.models.sentry_app_component import SentryAppComponent
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
@@ -34,7 +38,9 @@ def get(self, request: Request, sentry_app) -> Response:
request=request,
queryset=sentry_app.components.all(),
paginator_cls=OffsetPaginator,
- on_results=lambda x: serialize(x, request.user, errors=[]),
+ on_results=lambda x: serialize(
+ x, request.user, errors=[], serializer=SentryAppComponentSerializer()
+ ),
)
@@ -83,5 +89,7 @@ def get(
request=request,
queryset=components,
paginator_cls=OffsetPaginator,
- on_results=lambda x: serialize(x, request.user, errors=errors),
+ on_results=lambda x: serialize(
+ x, request.user, serializer=SentryAppComponentSerializer(), errors=errors
+ ),
)
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py
index 2d335e1e98629..09aae78ab8fbb 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py
@@ -11,17 +11,20 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases.sentryapps import (
- SentryAppAndStaffPermission,
- SentryAppBaseEndpoint,
- catch_raised_errors,
-)
from sentry.api.serializers import serialize
-from sentry.api.serializers.rest_framework import SentryAppSerializer
from sentry.auth.staff import is_active_staff
from sentry.constants import SentryAppStatus
-from sentry.mediators.sentry_app_installations.installation_notifier import InstallationNotifier
from sentry.organizations.services.organization import organization_service
+from sentry.sentry_apps.api.bases.sentryapps import (
+ SentryAppAndStaffPermission,
+ SentryAppBaseEndpoint,
+ catch_raised_errors,
+)
+from sentry.sentry_apps.api.parsers.sentry_app import SentryAppParser
+from sentry.sentry_apps.api.serializers.sentry_app import (
+ SentryAppSerializer as ResponseSentryAppSerializer,
+)
+from sentry.sentry_apps.installations import SentryAppInstallationNotifier
from sentry.sentry_apps.logic import SentryAppUpdater
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
@@ -50,7 +53,14 @@ class SentryAppDetailsEndpoint(SentryAppBaseEndpoint):
permission_classes = (SentryAppDetailsEndpointPermission,)
def get(self, request: Request, sentry_app) -> Response:
- return Response(serialize(sentry_app, request.user, access=request.access))
+ return Response(
+ serialize(
+ sentry_app,
+ request.user,
+ access=request.access,
+ serializer=ResponseSentryAppSerializer(),
+ )
+ )
@catch_raised_errors
def put(self, request: Request, sentry_app) -> Response:
@@ -83,7 +93,7 @@ def put(self, request: Request, sentry_app) -> Response:
# isInternal is not field of our model but it is a field of the serializer
data = request.data.copy()
data["isInternal"] = sentry_app.status == SentryAppStatus.INTERNAL
- serializer = SentryAppSerializer(
+ serializer = SentryAppParser(
sentry_app,
data=data,
partial=True,
@@ -115,7 +125,14 @@ def put(self, request: Request, sentry_app) -> Response:
popularity=result.get("popularity"),
).run(user=request.user)
- return Response(serialize(updated_app, request.user, access=request.access))
+ return Response(
+ serialize(
+ updated_app,
+ request.user,
+ access=request.access,
+ serializer=ResponseSentryAppSerializer(),
+ )
+ )
# log any errors with schema
if "schema" in serializer.errors:
@@ -145,9 +162,12 @@ def delete(self, request: Request, sentry_app) -> Response:
for install in sentry_app.installations.all():
try:
with transaction.atomic(using=router.db_for_write(SentryAppInstallation)):
- InstallationNotifier.run(
- install=install, user=request.user, action="deleted"
- )
+ assert (
+ request.user.is_authenticated
+ ), "User must be authenticated to delete installation"
+ SentryAppInstallationNotifier(
+ sentry_app_installation=install, user=request.user, action="deleted"
+ ).run()
deletions.exec_sync(install)
except RequestException as exc:
sentry_sdk.capture_exception(exc)
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_features.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_features.py
index 93c345645f8fc..c4a7ff4efad00 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_features.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_features.py
@@ -4,10 +4,13 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases.sentryapps import SentryAppBaseEndpoint
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
+from sentry.integrations.api.serializers.models.integration_feature import (
+ IntegrationFeatureSerializer,
+)
from sentry.integrations.models.integration_feature import IntegrationFeature, IntegrationTypes
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppBaseEndpoint
@control_silo_endpoint
@@ -26,5 +29,7 @@ def get(self, request: Request, sentry_app) -> Response:
request=request,
queryset=features,
paginator_cls=OffsetPaginator,
- on_results=lambda x: serialize(x, request.user),
+ on_results=lambda x: serialize(
+ x, request.user, serializer=IntegrationFeatureSerializer()
+ ),
)
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_installations.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_installations.py
index 6c82deffaf59c..4cdb526e11182 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_installations.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_installations.py
@@ -6,7 +6,6 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppInstallationsBaseEndpoint
from sentry.api.fields.sentry_slug import SentrySerializerSlugField
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
@@ -14,6 +13,10 @@
from sentry.constants import SENTRY_APP_SLUG_MAX_LENGTH, SentryAppStatus
from sentry.features.exceptions import FeatureNotRegistered
from sentry.integrations.models.integration_feature import IntegrationFeature, IntegrationTypes
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppInstallationsBaseEndpoint
+from sentry.sentry_apps.api.serializers.sentry_app_installation import (
+ SentryAppInstallationSerializer,
+)
from sentry.sentry_apps.installations import SentryAppInstallationCreator
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
@@ -41,7 +44,9 @@ def get(self, request: Request, organization) -> Response:
queryset=queryset,
order_by="-date_added",
paginator_cls=OffsetPaginator,
- on_results=lambda x: serialize(x, request.user, access=request.access),
+ on_results=lambda x: serialize(
+ x, request.user, access=request.access, serializer=SentryAppInstallationSerializer()
+ ),
)
def post(self, request: Request, organization) -> Response:
@@ -99,4 +104,6 @@ def post(self, request: Request, organization) -> Response:
organization_id=organization.id, slug=slug, notify=True
).run(user=request.user, request=request)
- return Response(serialize(install, access=request.access))
+ return Response(
+ serialize(install, access=request.access, serializer=SentryAppInstallationSerializer())
+ )
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_interaction.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_interaction.py
index ed3bd99d45015..819a0064bee14 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_interaction.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_interaction.py
@@ -7,8 +7,11 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import StatsMixin, region_silo_endpoint
-from sentry.api.bases import RegionSentryAppBaseEndpoint, SentryAppStatsPermission
-from sentry.api.bases.sentryapps import COMPONENT_TYPES
+from sentry.sentry_apps.api.bases.sentryapps import (
+ COMPONENT_TYPES,
+ RegionSentryAppBaseEndpoint,
+ SentryAppStatsPermission,
+)
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.sentry_apps.services.app import RpcSentryApp, app_service
from sentry.tsdb.base import TSDBModel
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_publish_request.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_publish_request.py
index 63b096b75f41f..5bbf3cf7e05f8 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_publish_request.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_publish_request.py
@@ -7,10 +7,10 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases.sentryapps import COMPONENT_TYPES, SentryAppBaseEndpoint
from sentry.constants import SentryAppStatus
from sentry.models.avatars.sentry_app_avatar import SentryAppAvatar, SentryAppAvatarTypes
from sentry.models.organizationmapping import OrganizationMapping
+from sentry.sentry_apps.api.bases.sentryapps import COMPONENT_TYPES, SentryAppBaseEndpoint
from sentry.sentry_apps.logic import SentryAppUpdater
from sentry.users.models.user import User
from sentry.users.services.user.model import RpcUser
@@ -58,8 +58,8 @@ def post(self, request: Request, sentry_app) -> Response:
status=400,
)
- assert isinstance(request.user, User) or isinstance(
- request.user, RpcUser
+ assert isinstance(
+ request.user, (User, RpcUser)
), "User must be authenticated to update a Sentry App"
SentryAppUpdater(
sentry_app=sentry_app,
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_requests.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_requests.py
index 0d0b4c6db9073..b35d079c8a3dd 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_requests.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_requests.py
@@ -9,10 +9,13 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.api.bases import RegionSentryAppBaseEndpoint, SentryAppStatsPermission
from sentry.api.serializers import serialize
-from sentry.api.serializers.rest_framework import RequestSerializer
from sentry.models.organization import Organization
+from sentry.sentry_apps.api.bases.sentryapps import (
+ RegionSentryAppBaseEndpoint,
+ SentryAppStatsPermission,
+)
+from sentry.sentry_apps.api.serializers.request import RequestSerializer
from sentry.utils.sentry_apps import EXTENDED_VALID_EVENTS, SentryAppWebhookRequestsBuffer
INVALID_DATE_FORMAT_MESSAGE = "Invalid date format. Format must be YYYY-MM-DD HH:MM:SS."
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py
index fa171c6f96ca9..c53b5504a9bb1 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py
@@ -8,13 +8,13 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases.sentryapps import SentryAppBaseEndpoint
from sentry.api.permissions import SentryPermission
from sentry.api.serializers import serialize
from sentry.auth.superuser import superuser_has_permission
from sentry.constants import SentryAppStatus
from sentry.models.apiapplication import generate_token
from sentry.organizations.services.organization import organization_service
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppBaseEndpoint
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.users.services.user.service import user_service
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/stats/details.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_stats_details.py
similarity index 83%
rename from src/sentry/api/endpoints/integrations/sentry_apps/stats/details.py
rename to src/sentry/sentry_apps/api/endpoints/sentry_app_stats_details.py
index 34dc8f7fb9ce7..f3d1119407e0c 100644
--- a/src/sentry/api/endpoints/integrations/sentry_apps/stats/details.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_stats_details.py
@@ -4,7 +4,7 @@
from sentry import tsdb
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import StatsMixin, control_silo_endpoint
-from sentry.api.bases import SentryAppBaseEndpoint, SentryAppStatsPermission
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppBaseEndpoint, SentryAppStatsPermission
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
@@ -34,18 +34,20 @@ def get(self, request: Request, sentry_app) -> Response:
sentry_app_id=sentry_app.id, date_deleted__isnull=False
).count()
- rollup, series = tsdb.get_optimal_rollup_series(query_args["start"], query_args["end"])
+ rollup, series = tsdb.backend.get_optimal_rollup_series(
+ query_args["start"], query_args["end"]
+ )
install_stats = dict.fromkeys(series, 0)
uninstall_stats = dict.fromkeys(series, 0)
for date_added, date_deleted, organization_id in installations:
- install_norm_epoch = tsdb.normalize_to_epoch(date_added, rollup)
+ install_norm_epoch = tsdb.backend.normalize_to_epoch(date_added, rollup)
if install_norm_epoch in install_stats:
install_stats[install_norm_epoch] += 1
if date_deleted is not None:
- uninstall_norm_epoch = tsdb.normalize_to_epoch(date_deleted, rollup)
+ uninstall_norm_epoch = tsdb.backend.normalize_to_epoch(date_deleted, rollup)
if uninstall_norm_epoch in uninstall_stats:
uninstall_stats[uninstall_norm_epoch] += 1
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_apps.py b/src/sentry/sentry_apps/api/endpoints/sentry_apps.py
index 73582331055df..efa42e1c3752e 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_apps.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_apps.py
@@ -9,13 +9,16 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppsBaseEndpoint
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
-from sentry.api.serializers.rest_framework import SentryAppSerializer
from sentry.auth.staff import is_active_staff
from sentry.auth.superuser import is_active_superuser
from sentry.constants import SentryAppStatus
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppsBaseEndpoint
+from sentry.sentry_apps.api.parsers.sentry_app import SentryAppParser
+from sentry.sentry_apps.api.serializers.sentry_app import (
+ SentryAppSerializer as ResponseSentryAppSerializer,
+)
from sentry.sentry_apps.logic import SentryAppCreator
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.users.models.user import User
@@ -73,7 +76,9 @@ def get(self, request: Request) -> Response:
queryset=queryset,
order_by="-date_added",
paginator_cls=OffsetPaginator,
- on_results=lambda x: serialize(x, request.user, access=request.access),
+ on_results=lambda x: serialize(
+ x, request.user, access=request.access, serializer=ResponseSentryAppSerializer()
+ ),
)
def post(self, request: Request, organization) -> Response:
@@ -109,7 +114,7 @@ def post(self, request: Request, organization) -> Response:
status=403,
)
- serializer = SentryAppSerializer(data=data, access=request.access)
+ serializer = SentryAppParser(data=data, access=request.access)
if serializer.is_valid():
if data.get("isInternal"):
@@ -141,7 +146,12 @@ def post(self, request: Request, organization) -> Response:
# we generate and validate the slug here instead of the serializer since the slug never changes
return Response(e.detail, status=400)
- return Response(serialize(sentry_app, access=request.access), status=201)
+ return Response(
+ serialize(
+ sentry_app, access=request.access, serializer=ResponseSentryAppSerializer()
+ ),
+ status=201,
+ )
# log any errors with schema
if "schema" in serializer.errors:
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/stats/index.py b/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py
similarity index 91%
rename from src/sentry/api/endpoints/integrations/sentry_apps/stats/index.py
rename to src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py
index c3b9b63732d6f..fac06f21cbccb 100644
--- a/src/sentry/api/endpoints/integrations/sentry_apps/stats/index.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py
@@ -5,10 +5,10 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppsBaseEndpoint
from sentry.api.permissions import SuperuserOrStaffFeatureFlaggedPermission
from sentry.api.serializers import serialize
from sentry.models.avatars.sentry_app_avatar import SentryAppAvatar
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppsBaseEndpoint
from sentry.sentry_apps.models.sentry_app import SentryApp
@@ -23,7 +23,7 @@ class SentryAppsStatsEndpoint(SentryAppsBaseEndpoint):
def get(self, request: Request) -> Response:
sentry_apps = (
SentryApp.objects.filter(installations__date_deleted=None)
- .annotate(Count("installations"))
+ .annotate(installations__count=Count("installations"))
.order_by("-installations__count")
)
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/details.py b/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_token_details.py
similarity index 95%
rename from src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/details.py
rename to src/sentry/sentry_apps/api/endpoints/sentry_internal_app_token_details.py
index 13b86f04d78b3..9af2f110bcfb2 100644
--- a/src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/details.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_token_details.py
@@ -8,8 +8,11 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppBaseEndpoint, SentryInternalAppTokenPermission
from sentry.models.apitoken import ApiToken
+from sentry.sentry_apps.api.bases.sentryapps import (
+ SentryAppBaseEndpoint,
+ SentryInternalAppTokenPermission,
+)
from sentry.sentry_apps.api.endpoints.sentry_app_details import PARTNERSHIP_RESTRICTED_ERROR_MESSAGE
from sentry.sentry_apps.models.sentry_app_installation_token import SentryAppInstallationToken
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/index.py b/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_tokens.py
similarity index 89%
rename from src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/index.py
rename to src/sentry/sentry_apps/api/endpoints/sentry_internal_app_tokens.py
index 932ace3fd120d..826f2f7c686a6 100644
--- a/src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/index.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_tokens.py
@@ -6,14 +6,19 @@
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.authentication import SessionNoAuthTokenAuthentication
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppBaseEndpoint, SentryInternalAppTokenPermission
from sentry.api.serializers.models.apitoken import ApiTokenSerializer
from sentry.exceptions import ApiTokenLimitError
from sentry.models.apitoken import ApiToken
+from sentry.sentry_apps.api.bases.sentryapps import (
+ SentryAppBaseEndpoint,
+ SentryInternalAppTokenPermission,
+)
from sentry.sentry_apps.api.endpoints.sentry_app_details import PARTNERSHIP_RESTRICTED_ERROR_MESSAGE
from sentry.sentry_apps.installations import SentryAppInstallationTokenCreator
from sentry.sentry_apps.models.sentry_app import MASKED_VALUE
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
+from sentry.users.models.user import User
+from sentry.users.services.user.model import RpcUser
@control_silo_endpoint
@@ -59,6 +64,9 @@ def post(self, request: Request, sentry_app) -> Response:
)
sentry_app_installation = SentryAppInstallation.objects.get(sentry_app_id=sentry_app.id)
try:
+ assert isinstance(
+ request.user, (User, RpcUser)
+ ), "User must be authenticated to install a sentry app"
api_token = SentryAppInstallationTokenCreator(
sentry_app_installation=sentry_app_installation
).run(request=request, user=request.user)
diff --git a/src/sentry/api/validators/sentry_apps/schema.py b/src/sentry/sentry_apps/api/parsers/schema.py
similarity index 100%
rename from src/sentry/api/validators/sentry_apps/schema.py
rename to src/sentry/sentry_apps/api/parsers/schema.py
diff --git a/src/sentry/api/serializers/rest_framework/sentry_app.py b/src/sentry/sentry_apps/api/parsers/sentry_app.py
similarity index 86%
rename from src/sentry/api/serializers/rest_framework/sentry_app.py
rename to src/sentry/sentry_apps/api/parsers/sentry_app.py
index 4e6c4adaef73f..c2b987e4270bb 100644
--- a/src/sentry/api/serializers/rest_framework/sentry_app.py
+++ b/src/sentry/sentry_apps/api/parsers/sentry_app.py
@@ -2,17 +2,15 @@
from rest_framework import serializers
from rest_framework.serializers import Serializer, ValidationError
-from sentry.api.fields.avatar import AvatarField
from sentry.api.serializers.rest_framework.base import camel_to_snake_case
-from sentry.api.validators.sentry_apps.schema import validate_ui_element_schema
from sentry.integrations.models.integration_feature import Feature
from sentry.models.apiscopes import ApiScopes
+from sentry.sentry_apps.api.parsers.schema import validate_ui_element_schema
from sentry.sentry_apps.models.sentry_app import (
REQUIRED_EVENT_PERMISSIONS,
UUID_CHARS_IN_SLUG,
VALID_EVENT_RESOURCES,
)
-from sentry.utils.avatar import is_black_alpha_only
class ApiScopesField(serializers.Field):
@@ -68,7 +66,7 @@ def to_internal_value(self, url):
return url
-class SentryAppSerializer(Serializer):
+class SentryAppParser(Serializer):
name = serializers.CharField()
author = serializers.CharField(required=False, allow_null=True)
scopes = ApiScopesField(allow_null=True)
@@ -179,28 +177,3 @@ def validate(self, attrs):
raise ValidationError({"author": "author required for public integrations"})
return attrs
-
-
-class SentryAppAvatarSerializer(Serializer):
- avatar_photo = AvatarField(required=False, is_sentry_app=True)
- avatar_type = serializers.ChoiceField(choices=(("default", "default"), ("upload", "upload")))
- color = serializers.BooleanField(required=True)
-
- def validate(self, attrs):
- attrs = super().validate(attrs)
-
- if attrs.get("avatar_type") == "upload" and not attrs.get("avatar_photo"):
- raise serializers.ValidationError({"avatar_photo": "A logo is required."})
-
- if (
- not attrs.get("color")
- and attrs.get("avatar_type") == "upload"
- and not is_black_alpha_only(attrs.get("avatar_photo"))
- ):
- raise serializers.ValidationError(
- {
- "avatar_photo": "The icon must only use black and should contain an alpha channel."
- }
- )
-
- return attrs
diff --git a/src/sentry/sentry_apps/api/parsers/sentry_app_avatar.py b/src/sentry/sentry_apps/api/parsers/sentry_app_avatar.py
new file mode 100644
index 0000000000000..efc7ee8cd65a9
--- /dev/null
+++ b/src/sentry/sentry_apps/api/parsers/sentry_app_avatar.py
@@ -0,0 +1,30 @@
+from rest_framework import serializers
+from rest_framework.serializers import Serializer
+
+from sentry.api.fields.avatar import AvatarField
+from sentry.utils.avatar import is_black_alpha_only
+
+
+class SentryAppAvatarParser(Serializer):
+ avatar_photo = AvatarField(required=False, is_sentry_app=True)
+ avatar_type = serializers.ChoiceField(choices=(("default", "default"), ("upload", "upload")))
+ color = serializers.BooleanField(required=True)
+
+ def validate(self, attrs):
+ attrs = super().validate(attrs)
+
+ if attrs.get("avatar_type") == "upload" and not attrs.get("avatar_photo"):
+ raise serializers.ValidationError({"avatar_photo": "A logo is required."})
+
+ if (
+ not attrs.get("color")
+ and attrs.get("avatar_type") == "upload"
+ and not is_black_alpha_only(attrs.get("avatar_photo"))
+ ):
+ raise serializers.ValidationError(
+ {
+ "avatar_photo": "The icon must only use black and should contain an alpha channel."
+ }
+ )
+
+ return attrs
diff --git a/src/sentry/api/serializers/rest_framework/sentry_app_installation.py b/src/sentry/sentry_apps/api/parsers/sentry_app_installation.py
similarity index 91%
rename from src/sentry/api/serializers/rest_framework/sentry_app_installation.py
rename to src/sentry/sentry_apps/api/parsers/sentry_app_installation.py
index 2172ccc61706a..0a69016461345 100644
--- a/src/sentry/api/serializers/rest_framework/sentry_app_installation.py
+++ b/src/sentry/sentry_apps/api/parsers/sentry_app_installation.py
@@ -4,7 +4,7 @@
from sentry.constants import SentryAppInstallationStatus
-class SentryAppInstallationSerializer(Serializer):
+class SentryAppInstallationParser(Serializer):
status = serializers.CharField()
def validate_status(self, new_status):
diff --git a/src/sentry/api/validators/servicehook.py b/src/sentry/sentry_apps/api/parsers/servicehook.py
similarity index 100%
rename from src/sentry/api/validators/servicehook.py
rename to src/sentry/sentry_apps/api/parsers/servicehook.py
diff --git a/src/sentry/api/serializers/models/app_platform_event.py b/src/sentry/sentry_apps/api/serializers/app_platform_event.py
similarity index 100%
rename from src/sentry/api/serializers/models/app_platform_event.py
rename to src/sentry/sentry_apps/api/serializers/app_platform_event.py
diff --git a/src/sentry/api/serializers/models/platformexternalissue.py b/src/sentry/sentry_apps/api/serializers/platform_external_issue.py
similarity index 83%
rename from src/sentry/api/serializers/models/platformexternalissue.py
rename to src/sentry/sentry_apps/api/serializers/platform_external_issue.py
index 109a4cc900252..e0c2398b78e08 100644
--- a/src/sentry/api/serializers/models/platformexternalissue.py
+++ b/src/sentry/sentry_apps/api/serializers/platform_external_issue.py
@@ -1,5 +1,5 @@
from sentry.api.serializers import Serializer, register
-from sentry.models.platformexternalissue import PlatformExternalIssue
+from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
@register(PlatformExternalIssue)
diff --git a/src/sentry/api/serializers/rest_framework/sentry_app_request.py b/src/sentry/sentry_apps/api/serializers/request.py
similarity index 94%
rename from src/sentry/api/serializers/rest_framework/sentry_app_request.py
rename to src/sentry/sentry_apps/api/serializers/request.py
index fb75908ea6f33..8d8b22c470af4 100644
--- a/src/sentry/api/serializers/rest_framework/sentry_app_request.py
+++ b/src/sentry/sentry_apps/api/serializers/request.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from collections.abc import Mapping, MutableMapping
+from collections.abc import Mapping, MutableMapping, Sequence
from typing import Any
from django.urls import reverse
@@ -17,7 +17,9 @@ class RequestSerializer(Serializer):
def __init__(self, sentry_app: SentryApp) -> None:
self.sentry_app = sentry_app
- def get_attrs(self, item_list: list[Any], user: Any, **kwargs: Any) -> MutableMapping[Any, Any]:
+ def get_attrs(
+ self, item_list: Sequence[Any], user: Any, **kwargs: Any
+ ) -> MutableMapping[Any, Any]:
project_ids = {item.data.get("project_id") for item in item_list}
projects = Project.objects.filter(id__in=project_ids)
projects_by_id = {project.id: project for project in projects}
diff --git a/src/sentry/api/serializers/models/sentry_app.py b/src/sentry/sentry_apps/api/serializers/sentry_app.py
similarity index 86%
rename from src/sentry/api/serializers/models/sentry_app.py
rename to src/sentry/sentry_apps/api/serializers/sentry_app.py
index 64851baa6652d..fcc24d7deaddd 100644
--- a/src/sentry/api/serializers/models/sentry_app.py
+++ b/src/sentry/sentry_apps/api/serializers/sentry_app.py
@@ -1,4 +1,4 @@
-from collections.abc import Mapping
+from collections.abc import Mapping, Sequence
from datetime import timedelta
from typing import Any
@@ -14,6 +14,9 @@
from sentry.models.apiapplication import ApiApplication
from sentry.models.avatars.sentry_app_avatar import SentryAppAvatar
from sentry.organizations.services.organization import organization_service
+from sentry.sentry_apps.api.serializers.sentry_app_avatar import (
+ SentryAppAvatarSerializer as ResponseSentryAppAvatarSerializer,
+)
from sentry.sentry_apps.models.sentry_app import MASKED_VALUE, SentryApp
from sentry.users.models.user import User
from sentry.users.services.user.service import user_service
@@ -21,7 +24,7 @@
@register(SentryApp)
class SentryAppSerializer(Serializer):
- def get_attrs(self, item_list: list[SentryApp], user: User, **kwargs: Any):
+ def get_attrs(self, item_list: Sequence[SentryApp], user: User, **kwargs: Any):
# Get associated IntegrationFeatures
app_feature_attrs = IntegrationFeature.objects.get_by_targets_as_dict(
targets=item_list, target_type=IntegrationTypes.SENTRY_APP
@@ -54,7 +57,7 @@ def get_attrs(self, item_list: list[SentryApp], user: User, **kwargs: Any):
for item in item_list
}
- def serialize(self, obj, attrs, user, access):
+ def serialize(self, obj: SentryApp, attrs: Mapping[str, Any], user: User, **kwargs: Any):
from sentry.sentry_apps.logic import consolidate_events
application = attrs["application"]
@@ -62,7 +65,11 @@ def serialize(self, obj, attrs, user, access):
data = {
"allowedOrigins": application.get_allowed_origins(),
"author": obj.author,
- "avatars": serialize(attrs.get("avatars"), user),
+ "avatars": serialize(
+ objects=attrs.get("avatars"),
+ user=user,
+ serializer=ResponseSentryAppAvatarSerializer(),
+ ),
"events": consolidate_events(obj.events),
"featureData": [],
"isAlertable": obj.is_alertable,
@@ -81,7 +88,7 @@ def serialize(self, obj, attrs, user, access):
}
if obj.status != SentryAppStatus.INTERNAL:
- data["featureData"] = [serialize(x, user) for x in attrs.get("features")]
+ data["featureData"] = [serialize(x, user) for x in attrs.get("features", [])]
if obj.status == SentryAppStatus.PUBLISHED and obj.date_published:
data.update({"datePublished": obj.date_published})
@@ -101,9 +108,12 @@ def serialize(self, obj, attrs, user, access):
id=owner.id, user_id=user.id
)
+ assert obj.application, "Sentry App must have an associated ApiApplication"
+
client_secret = MASKED_VALUE
if elevated_user or (
owner_context
+ and owner_context.member
and "org:write" in owner_context.member.scopes
and obj.show_auth_info(owner_context.member)
):
diff --git a/src/sentry/api/serializers/models/sentry_app_avatar.py b/src/sentry/sentry_apps/api/serializers/sentry_app_avatar.py
similarity index 100%
rename from src/sentry/api/serializers/models/sentry_app_avatar.py
rename to src/sentry/sentry_apps/api/serializers/sentry_app_avatar.py
diff --git a/src/sentry/api/serializers/models/sentry_app_component.py b/src/sentry/sentry_apps/api/serializers/sentry_app_component.py
similarity index 100%
rename from src/sentry/api/serializers/models/sentry_app_component.py
rename to src/sentry/sentry_apps/api/serializers/sentry_app_component.py
diff --git a/src/sentry/api/serializers/models/sentry_app_installation.py b/src/sentry/sentry_apps/api/serializers/sentry_app_installation.py
similarity index 100%
rename from src/sentry/api/serializers/models/sentry_app_installation.py
rename to src/sentry/sentry_apps/api/serializers/sentry_app_installation.py
diff --git a/src/sentry/api/serializers/models/servicehook.py b/src/sentry/sentry_apps/api/serializers/servicehook.py
similarity index 100%
rename from src/sentry/api/serializers/models/servicehook.py
rename to src/sentry/sentry_apps/api/serializers/servicehook.py
diff --git a/src/sentry/sentry_apps/components.py b/src/sentry/sentry_apps/components.py
index 024fda2964ca2..ffedd70c2ee35 100644
--- a/src/sentry/sentry_apps/components.py
+++ b/src/sentry/sentry_apps/components.py
@@ -8,7 +8,7 @@
from django.utils.encoding import force_str
from django.utils.http import urlencode
-from sentry.mediators.external_requests.select_requester import SelectRequester
+from sentry.sentry_apps.external_requests.select_requester import SelectRequester
from sentry.sentry_apps.models.sentry_app_component import SentryAppComponent
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
from sentry.sentry_apps.services.app.model import RpcSentryAppComponent, RpcSentryAppInstallation
@@ -105,9 +105,9 @@ def _request(self, uri: str, dependent_data: str | None = None) -> Any:
install = self.install
if isinstance(install, SentryAppInstallation):
install = serialize_sentry_app_installation(install, install.sentry_app)
- return SelectRequester.run(
+ return SelectRequester(
install=install,
project_slug=self.project_slug,
uri=uri,
dependent_data=dependent_data,
- )
+ ).run()
diff --git a/src/sentry/sentry_apps/external_issues/external_issue_creator.py b/src/sentry/sentry_apps/external_issues/external_issue_creator.py
new file mode 100644
index 0000000000000..105e27666f00f
--- /dev/null
+++ b/src/sentry/sentry_apps/external_issues/external_issue_creator.py
@@ -0,0 +1,29 @@
+from dataclasses import dataclass
+from html import escape
+
+from django.db import router, transaction
+
+from sentry.models.group import Group
+from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
+from sentry.sentry_apps.services.app import RpcSentryAppInstallation
+
+
+@dataclass
+class ExternalIssueCreator:
+ install: RpcSentryAppInstallation
+ group: Group
+ web_url: str
+ project: str
+ identifier: str
+
+ def run(self) -> PlatformExternalIssue:
+ with transaction.atomic(using=router.db_for_write(PlatformExternalIssue)):
+ display_name = f"{escape(self.project)}#{escape(self.identifier)}"
+ self.external_issue = PlatformExternalIssue.objects.create(
+ group_id=self.group.id,
+ project_id=self.group.project_id,
+ service_type=self.install.sentry_app.slug,
+ display_name=display_name,
+ web_url=self.web_url,
+ )
+ return self.external_issue
diff --git a/src/sentry/sentry_apps/external_issues/issue_link_creator.py b/src/sentry/sentry_apps/external_issues/issue_link_creator.py
new file mode 100644
index 0000000000000..7c12f8608b801
--- /dev/null
+++ b/src/sentry/sentry_apps/external_issues/issue_link_creator.py
@@ -0,0 +1,57 @@
+from dataclasses import dataclass
+from typing import Any
+
+from django.db import router, transaction
+
+from sentry.coreapi import APIUnauthorized
+from sentry.models.group import Group
+from sentry.sentry_apps.external_issues.external_issue_creator import ExternalIssueCreator
+from sentry.sentry_apps.external_requests.issue_link_requester import IssueLinkRequester
+from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
+from sentry.sentry_apps.services.app import RpcSentryAppInstallation
+from sentry.users.services.user import RpcUser
+
+VALID_ACTIONS = ["link", "create"]
+
+
+@dataclass
+class IssueLinkCreator:
+ install: RpcSentryAppInstallation
+ group: Group
+ action: str
+ fields: dict[str, Any]
+ uri: str
+ user: RpcUser
+
+ def run(self) -> PlatformExternalIssue:
+ with transaction.atomic(using=router.db_for_write(PlatformExternalIssue)):
+ self._verify_action()
+ response = self._make_external_request()
+ external_issue = self._create_external_issue(response=response)
+ return external_issue
+
+ def _verify_action(self) -> None:
+ if self.action not in VALID_ACTIONS:
+ raise APIUnauthorized(f"Invalid action '{self.action}'")
+
+ def _make_external_request(self) -> dict[str, Any]:
+ response = IssueLinkRequester(
+ install=self.install,
+ uri=self.uri,
+ group=self.group,
+ fields=self.fields,
+ user=self.user,
+ action=self.action,
+ ).run()
+ return response
+
+ def _create_external_issue(self, response: dict[str, Any]) -> PlatformExternalIssue:
+ external_issue = ExternalIssueCreator(
+ install=self.install,
+ group=self.group,
+ web_url=response["webUrl"],
+ project=response["project"],
+ identifier=response["identifier"],
+ ).run()
+
+ return external_issue
diff --git a/src/sentry/mediators/external_requests/alert_rule_action_requester.py b/src/sentry/sentry_apps/external_requests/alert_rule_action_requester.py
similarity index 72%
rename from src/sentry/mediators/external_requests/alert_rule_action_requester.py
rename to src/sentry/sentry_apps/external_requests/alert_rule_action_requester.py
index 839e89baa4a30..a9249af4849f6 100644
--- a/src/sentry/mediators/external_requests/alert_rule_action_requester.py
+++ b/src/sentry/sentry_apps/external_requests/alert_rule_action_requester.py
@@ -1,77 +1,49 @@
import logging
+from collections.abc import Mapping, Sequence
+from dataclasses import dataclass, field
from typing import TypedDict
from urllib.parse import urlparse, urlunparse
from uuid import uuid4
-from django.db import router
from django.utils.functional import cached_property
from requests import RequestException
from requests.models import Response
-from sentry.mediators.external_requests.util import send_and_save_sentry_app_request
-from sentry.mediators.mediator import Mediator
-from sentry.mediators.param import Param
+from sentry.sentry_apps.external_requests.utils import send_and_save_sentry_app_request
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
+from sentry.sentry_apps.services.app.model import RpcSentryAppInstallation
from sentry.utils import json
-logger = logging.getLogger("sentry.mediators.external-requests")
-
DEFAULT_SUCCESS_MESSAGE = "Success!"
DEFAULT_ERROR_MESSAGE = "Something went wrong!"
+logger = logging.getLogger("sentry.sentry_apps.external_requests")
+
class AlertRuleActionResult(TypedDict):
success: bool
message: str
-class AlertRuleActionRequester(Mediator):
- """
- Makes a POST request to another service to fetch/update the values for each field in the
- AlertRuleAction settings schema
- """
-
- install = Param(SentryAppInstallation)
- uri = Param(str)
- fields = Param(list, required=False, default=[])
- http_method = Param(str, required=False, default="POST")
- using = router.db_for_write(SentryAppInstallation)
-
- def call(self):
- return self._make_request()
-
- def _build_url(self):
- urlparts = list(urlparse(self.sentry_app.webhook_url))
- urlparts[2] = self.uri
- return urlunparse(urlparts)
-
- def _get_response_message(self, response: Response | None, default_message: str) -> str:
- """
- Returns the message from the response body, if in the expected location.
- Used to bubble up info from the Sentry App to the UI.
- The location should be coordinated with the docs on Alert Rule Action UI Components.
- """
- if response is None:
- message = default_message
- else:
- try:
- message = response.json().get("message", default_message)
- except Exception:
- message = default_message
-
- return f"{self.sentry_app.name}: {message}"
+@dataclass
+class AlertRuleActionRequester:
+ install: SentryAppInstallation | RpcSentryAppInstallation
+ uri: str
+ fields: Sequence[Mapping[str, str]] = field(default_factory=list)
+ http_method: str | None = "POST"
- def _make_request(self) -> AlertRuleActionResult:
+ def run(self) -> AlertRuleActionResult:
try:
response = send_and_save_sentry_app_request(
- self._build_url(),
- self.sentry_app,
- self.install.organization_id,
- "alert_rule_action.requested",
+ url=self._build_url(),
+ sentry_app=self.sentry_app,
+ org_id=self.install.organization_id,
+ event="alert_rule_action.requested",
headers=self._build_headers(),
method=self.http_method,
data=self.body,
)
+
except RequestException as e:
logger.info(
"alert_rule_action.error",
@@ -82,6 +54,7 @@ def _make_request(self) -> AlertRuleActionResult:
"error_message": str(e),
},
)
+
return AlertRuleActionResult(
success=False, message=self._get_response_message(e.response, DEFAULT_ERROR_MESSAGE)
)
@@ -89,7 +62,12 @@ def _make_request(self) -> AlertRuleActionResult:
success=True, message=self._get_response_message(response, DEFAULT_SUCCESS_MESSAGE)
)
- def _build_headers(self):
+ def _build_url(self) -> str:
+ urlparts = list(urlparse(self.sentry_app.webhook_url))
+ urlparts[2] = self.uri
+ return urlunparse(urlparts)
+
+ def _build_headers(self) -> dict[str, str]:
request_uuid = uuid4().hex
return {
@@ -98,6 +76,22 @@ def _build_headers(self):
"Sentry-App-Signature": self.sentry_app.build_signature(self.body),
}
+ def _get_response_message(self, response: Response | None, default_message: str) -> str:
+ """
+ Returns the message from the response body, if in the expected location.
+ Used to bubble up info from the Sentry App to the UI.
+ The location should be coordinated with the docs on Alert Rule Action UI Components.
+ """
+ if response is None:
+ message = default_message
+ else:
+ try:
+ message = response.json().get("message", default_message)
+ except Exception:
+ message = default_message
+
+ return f"{self.sentry_app.name}: {message}"
+
@cached_property
def body(self):
return json.dumps(
diff --git a/src/sentry/mediators/external_requests/issue_link_requester.py b/src/sentry/sentry_apps/external_requests/issue_link_requester.py
similarity index 65%
rename from src/sentry/mediators/external_requests/issue_link_requester.py
rename to src/sentry/sentry_apps/external_requests/issue_link_requester.py
index f62e35341658c..9b4916dbaff33 100644
--- a/src/sentry/mediators/external_requests/issue_link_requester.py
+++ b/src/sentry/sentry_apps/external_requests/issue_link_requester.py
@@ -1,27 +1,25 @@
-from __future__ import annotations
-
import logging
+from dataclasses import dataclass
from typing import Any
from urllib.parse import urlparse
from uuid import uuid4
-from django.db import router
from django.utils.functional import cached_property
from sentry.coreapi import APIError
from sentry.http import safe_urlread
-from sentry.mediators.external_requests.util import send_and_save_sentry_app_request, validate
-from sentry.mediators.mediator import Mediator
-from sentry.mediators.param import Param
from sentry.models.group import Group
+from sentry.sentry_apps.external_requests.utils import send_and_save_sentry_app_request, validate
from sentry.sentry_apps.services.app import RpcSentryAppInstallation
from sentry.users.services.user import RpcUser
from sentry.utils import json
-logger = logging.getLogger("sentry.mediators.external-requests")
+logger = logging.getLogger("sentry.sentry_apps.external_requests")
+ACTION_TO_PAST_TENSE = {"create": "created", "link": "linked"}
-class IssueLinkRequester(Mediator):
+@dataclass
+class IssueLinkRequester:
"""
1. Makes a POST request to another service with data used for creating or
linking a Sentry issue to an issue in the other service.
@@ -51,36 +49,29 @@ class IssueLinkRequester(Mediator):
issue in the UI (i.e. #)
"""
- install = Param(RpcSentryAppInstallation)
- uri = Param(str)
- group = Param(Group)
- fields = Param(dict)
- user = Param(RpcUser)
- action = Param(str)
- using = router.db_for_write(Group)
-
- def call(self):
- return self._make_request()
-
- def _build_url(self):
- urlparts = urlparse(self.sentry_app.webhook_url)
- return f"{urlparts.scheme}://{urlparts.netloc}{self.uri}"
+ install: RpcSentryAppInstallation
+ uri: str
+ group: Group
+ fields: dict[str, Any]
+ user: RpcUser
+ action: str
- def _make_request(self):
- action_to_past_tense = {"create": "created", "link": "linked"}
+ def run(self) -> dict[str, Any]:
+ response: dict[str, str] = {}
try:
- req = send_and_save_sentry_app_request(
+ request = send_and_save_sentry_app_request(
self._build_url(),
self.sentry_app,
self.install.organization_id,
- f"external_issue.{action_to_past_tense[self.action]}",
+ f"external_issue.{ACTION_TO_PAST_TENSE[self.action]}",
headers=self._build_headers(),
method="POST",
data=self.body,
)
- body = safe_urlread(req)
+ body = safe_urlread(request)
response = json.loads(body)
+
except Exception as e:
logger.info(
"issue-link-requester.error",
@@ -93,17 +84,22 @@ def _make_request(self):
"error_message": str(e),
},
)
- response = {}
if not self._validate_response(response):
- raise APIError()
+ raise APIError(
+ f"Invalid response format from sentry app {self.sentry_app} when linking issue"
+ )
return response
- def _validate_response(self, resp):
+ def _build_url(self) -> str:
+ urlparts = urlparse(self.sentry_app.webhook_url)
+ return f"{urlparts.scheme}://{urlparts.netloc}{self.uri}"
+
+ def _validate_response(self, resp: dict[str, str]) -> bool:
return validate(instance=resp, schema_type="issue_link")
- def _build_headers(self):
+ def _build_headers(self) -> dict[str, str]:
request_uuid = uuid4().hex
return {
@@ -114,16 +110,16 @@ def _build_headers(self):
@cached_property
def body(self):
- body: dict[str, Any] = {"fields": {}}
- for name, value in self.fields.items():
- body["fields"][name] = value
-
- body["issueId"] = self.group.id
- body["installationId"] = self.install.uuid
- body["webUrl"] = self.group.get_absolute_url()
- project = self.group.project
- body["project"] = {"slug": project.slug, "id": project.id}
- body["actor"] = {"type": "user", "id": self.user.id, "name": self.user.name}
+ body: dict[str, Any] = {
+ "fields": {},
+ "issueId": self.group.id,
+ "installationId": self.install.uuid,
+ "webUrl": self.group.get_absolute_url(),
+ "project": {"slug": self.group.project.slug, "id": self.group.project.id},
+ "actor": {"type": "user", "id": self.user.id, "name": self.user.name},
+ }
+ body["fields"].update(self.fields)
+
return json.dumps(body)
@cached_property
diff --git a/src/sentry/mediators/external_requests/select_requester.py b/src/sentry/sentry_apps/external_requests/select_requester.py
similarity index 66%
rename from src/sentry/mediators/external_requests/select_requester.py
rename to src/sentry/sentry_apps/external_requests/select_requester.py
index 543b269a4db9f..14f9c6ba6b863 100644
--- a/src/sentry/mediators/external_requests/select_requester.py
+++ b/src/sentry/sentry_apps/external_requests/select_requester.py
@@ -1,4 +1,6 @@
import logging
+from dataclasses import dataclass, field
+from typing import Any
from urllib.parse import urlencode, urlparse, urlunparse
from uuid import uuid4
@@ -6,16 +8,16 @@
from sentry.coreapi import APIError
from sentry.http import safe_urlread
-from sentry.mediators.external_requests.util import send_and_save_sentry_app_request, validate
-from sentry.mediators.mediator import Mediator
-from sentry.mediators.param import Param
+from sentry.sentry_apps.external_requests.utils import send_and_save_sentry_app_request, validate
from sentry.sentry_apps.services.app import RpcSentryAppInstallation
+from sentry.sentry_apps.services.app.model import RpcSentryApp
from sentry.utils import json
-logger = logging.getLogger("sentry.mediators.external-requests")
+logger = logging.getLogger("sentry.sentry_apps.external_requests")
-class SelectRequester(Mediator):
+@dataclass
+class SelectRequester:
"""
1. Makes a GET request to another service to fetch data needed to populate
the SelectField dropdown in the UI.
@@ -25,35 +27,14 @@ class SelectRequester(Mediator):
2. Validates and formats the response.
"""
- install = Param(RpcSentryAppInstallation)
- project_slug = Param(str, required=False)
- uri = Param(str)
- query = Param(str, required=False)
- dependent_data = Param(str, required=False)
- using = None
+ install: RpcSentryAppInstallation
+ uri: str
+ project_slug: str | None = field(default=None)
+ query: str | None = field(default=None)
+ dependent_data: str | None = field(default=None)
- def call(self):
- return self._make_request()
-
- def _build_url(self):
- urlparts = list(urlparse(self.sentry_app.webhook_url))
- urlparts[2] = self.uri
-
- query = {"installationId": self.install.uuid}
-
- if self.project_slug:
- query["projectSlug"] = self.project_slug
-
- if self.query:
- query["query"] = self.query
-
- if self.dependent_data:
- query["dependentData"] = self.dependent_data
-
- urlparts[4] = urlencode(query)
- return urlunparse(urlparts)
-
- def _make_request(self):
+ def run(self) -> dict[str, Any]:
+ response: list[dict[str, str]] = []
try:
body = safe_urlread(
send_and_save_sentry_app_request(
@@ -77,34 +58,54 @@ def _make_request(self):
"error_message": str(e),
},
)
- response = {}
- if not self._validate_response(response):
- raise APIError()
+ if not self._validate_response(response) or not response:
+ raise APIError(
+ f"Invalid response format for SelectField in {self.sentry_app} from uri: {self.uri}"
+ )
return self._format_response(response)
- def _validate_response(self, resp):
+ def _build_url(self) -> str:
+ urlparts: list[str] = [url_part for url_part in urlparse(self.sentry_app.webhook_url)]
+ urlparts[2] = self.uri
+
+ query = {"installationId": self.install.uuid}
+
+ if self.project_slug:
+ query["projectSlug"] = self.project_slug
+
+ if self.query:
+ query["query"] = self.query
+
+ if self.dependent_data:
+ query["dependentData"] = self.dependent_data
+
+ urlparts[4] = urlencode(query)
+ return str(urlunparse(urlparts))
+
+ def _validate_response(self, resp: list[dict[str, Any]]) -> bool:
return validate(instance=resp, schema_type="select")
- def _format_response(self, resp):
+ def _format_response(self, resp: list[dict[str, Any]]) -> dict[str, Any]:
# the UI expects the following form:
# choices: [[label, value]]
# default: [label, value]
- response = {}
- choices = []
+ response: dict[str, Any] = {}
+ choices: list[list[str]] = []
for option in resp:
if not ("value" in option and "label" in option):
- raise APIError("Missing `value` or `label` in option data")
+ raise APIError("Missing `value` or `label` in option data for SelectField")
choices.append([option["value"], option["label"]])
+
if option.get("default"):
response["defaultValue"] = option["value"]
response["choices"] = choices
return response
- def _build_headers(self):
+ def _build_headers(self) -> dict[str, str]:
request_uuid = uuid4().hex
return {
@@ -114,5 +115,5 @@ def _build_headers(self):
}
@cached_property
- def sentry_app(self):
+ def sentry_app(self) -> RpcSentryApp:
return self.install.sentry_app
diff --git a/src/sentry/mediators/external_requests/util.py b/src/sentry/sentry_apps/external_requests/utils.py
similarity index 96%
rename from src/sentry/mediators/external_requests/util.py
rename to src/sentry/sentry_apps/external_requests/utils.py
index 13923f5943002..ad68475f3f156 100644
--- a/src/sentry/mediators/external_requests/util.py
+++ b/src/sentry/sentry_apps/external_requests/utils.py
@@ -7,6 +7,7 @@
from sentry.http import safe_urlopen
from sentry.sentry_apps.models.sentry_app import SentryApp, track_response_code
+from sentry.sentry_apps.services.app.model import RpcSentryApp
from sentry.utils.sentry_apps import SentryAppWebhookRequestsBuffer
from sentry.utils.sentry_apps.webhooks import TIMEOUT_STATUS_CODE
@@ -49,7 +50,7 @@ def validate(instance, schema_type):
def send_and_save_sentry_app_request(
url: str,
- sentry_app: SentryApp,
+ sentry_app: SentryApp | RpcSentryApp,
org_id: int,
event: str,
**kwargs: Any,
diff --git a/src/sentry/sentry_apps/installations.py b/src/sentry/sentry_apps/installations.py
index b28a3aa5c7ddc..383618749509d 100644
--- a/src/sentry/sentry_apps/installations.py
+++ b/src/sentry/sentry_apps/installations.py
@@ -8,19 +8,28 @@
from django.http.request import HttpRequest
from sentry import analytics, audit_log
+from sentry.api.serializers import serialize
from sentry.constants import INTERNAL_INTEGRATION_TOKEN_COUNT_MAX, SentryAppInstallationStatus
+from sentry.coreapi import APIUnauthorized
from sentry.exceptions import ApiTokenLimitError
from sentry.models.apiapplication import ApiApplication
from sentry.models.apigrant import ApiGrant
from sentry.models.apitoken import ApiToken
+from sentry.sentry_apps.api.serializers.app_platform_event import AppPlatformEvent
+from sentry.sentry_apps.api.serializers.sentry_app_installation import (
+ SentryAppInstallationSerializer,
+)
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
from sentry.sentry_apps.models.sentry_app_installation_token import SentryAppInstallationToken
from sentry.sentry_apps.services.hook import hook_service
-from sentry.tasks.sentry_apps import installation_webhook
+from sentry.sentry_apps.tasks.sentry_apps import installation_webhook
from sentry.users.models.user import User
from sentry.users.services.user.model import RpcUser
from sentry.utils import metrics
+from sentry.utils.sentry_apps import send_and_save_webhook_request
+
+VALID_ACTIONS = ["created", "deleted"]
@dataclasses.dataclass
@@ -174,3 +183,69 @@ def api_application(self) -> ApiApplication:
@cached_property
def sentry_app(self) -> SentryApp:
return SentryApp.objects.get(slug=self.slug)
+
+
+@dataclasses.dataclass
+class SentryAppInstallationNotifier:
+ sentry_app_installation: SentryAppInstallation
+ user: User | RpcUser
+ action: str
+
+ def run(self) -> None:
+ if self.action not in VALID_ACTIONS:
+ raise APIUnauthorized(
+ f"Invalid action '{self.action} for installation notifier for {self.sentry_app}"
+ )
+
+ send_and_save_webhook_request(self.sentry_app, self.request)
+
+ @property
+ def request(self) -> AppPlatformEvent:
+ data = serialize(
+ self.sentry_app_installation,
+ user=self.user,
+ serializer=SentryAppInstallationSerializer(),
+ is_webhook=True,
+ )
+
+ return AppPlatformEvent(
+ resource="installation",
+ action=self.action,
+ install=self.sentry_app_installation,
+ data={"installation": data},
+ actor=self.user,
+ )
+
+ @cached_property
+ def sentry_app(self) -> SentryApp:
+ return self.sentry_app_installation.sentry_app
+
+ @cached_property
+ def api_grant(self) -> ApiGrant | None:
+ return self.sentry_app_installation.api_grant_id and self.sentry_app_installation.api_grant
+
+
+@dataclasses.dataclass
+class SentryAppInstallationUpdater:
+ sentry_app_installation: SentryAppInstallation
+ status: str | None = None
+
+ def run(self) -> SentryAppInstallation:
+ with transaction.atomic(router.db_for_write(SentryAppInstallation)):
+ self._update_status()
+ self.record_analytics()
+ return self.sentry_app_installation
+
+ def _update_status(self):
+ # convert from string to integer
+ if self.status == SentryAppInstallationStatus.INSTALLED_STR:
+ for install in SentryAppInstallation.objects.filter(id=self.sentry_app_installation.id):
+ install.update(status=SentryAppInstallationStatus.INSTALLED)
+
+ def record_analytics(self):
+ analytics.record(
+ "sentry_app_installation.updated",
+ sentry_app_installation_id=self.sentry_app_installation.id,
+ sentry_app_id=self.sentry_app_installation.sentry_app.id,
+ organization_id=self.sentry_app_installation.organization_id,
+ )
diff --git a/src/sentry/sentry_apps/logic.py b/src/sentry/sentry_apps/logic.py
index f99226d4b4b5e..bcdfa20503721 100644
--- a/src/sentry/sentry_apps/logic.py
+++ b/src/sentry/sentry_apps/logic.py
@@ -37,7 +37,7 @@
)
from sentry.sentry_apps.models.sentry_app_component import SentryAppComponent
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
-from sentry.tasks.sentry_apps import create_or_update_service_hooks_for_sentry_app
+from sentry.sentry_apps.tasks.sentry_apps import create_or_update_service_hooks_for_sentry_app
from sentry.users.models.user import User
from sentry.users.services.user.model import RpcUser
from sentry.utils.sentry_apps.service_hook_manager import (
diff --git a/src/sentry/sentry_apps/models/__init__.py b/src/sentry/sentry_apps/models/__init__.py
index 0528893406958..4df7950fb9e52 100644
--- a/src/sentry/sentry_apps/models/__init__.py
+++ b/src/sentry/sentry_apps/models/__init__.py
@@ -1,3 +1,4 @@
+from .platformexternalissue import PlatformExternalIssue
from .sentry_app import SentryApp
from .sentry_app_component import SentryAppComponent
from .sentry_app_installation import SentryAppInstallation
@@ -12,4 +13,5 @@
"ServiceHook",
"SentryAppInstallationForProvider",
"SentryAppComponent",
+ "PlatformExternalIssue",
)
diff --git a/src/sentry/models/platformexternalissue.py b/src/sentry/sentry_apps/models/platformexternalissue.py
similarity index 100%
rename from src/sentry/models/platformexternalissue.py
rename to src/sentry/sentry_apps/models/platformexternalissue.py
diff --git a/src/sentry/sentry_apps/models/sentry_app.py b/src/sentry/sentry_apps/models/sentry_app.py
index 9b7f237212044..973c024066d4b 100644
--- a/src/sentry/sentry_apps/models/sentry_app.py
+++ b/src/sentry/sentry_apps/models/sentry_app.py
@@ -95,7 +95,7 @@ def get_alertable_sentry_apps(self, organization_id: int) -> QuerySet:
installations__date_deleted=None,
).distinct()
- def visible_for_user(self, request: Request) -> QuerySet:
+ def visible_for_user(self, request: Request) -> QuerySet["SentryApp"]:
from sentry.auth.superuser import is_active_superuser
if is_active_superuser(request):
diff --git a/src/sentry/sentry_apps/services/app/impl.py b/src/sentry/sentry_apps/services/app/impl.py
index 880be08e9c68c..74f3a5fa03348 100644
--- a/src/sentry/sentry_apps/services/app/impl.py
+++ b/src/sentry/sentry_apps/services/app/impl.py
@@ -6,11 +6,14 @@
from django.db.models import Q, QuerySet
-from sentry.api.serializers import SentryAppAlertRuleActionSerializer, Serializer, serialize
+from sentry.api.serializers import Serializer, serialize
from sentry.auth.services.auth import AuthenticationContext
from sentry.constants import SentryAppInstallationStatus, SentryAppStatus
from sentry.hybridcloud.rpc.filter_query import FilterQueryDatabaseImpl, OpaqueSerializedResponse
-from sentry.mediators import alert_rule_actions
+from sentry.sentry_apps.alert_rule_action_creator import AlertRuleActionCreator
+from sentry.sentry_apps.api.serializers.sentry_app_component import (
+ SentryAppAlertRuleActionSerializer,
+)
from sentry.sentry_apps.logic import SentryAppCreator
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.sentry_apps.models.sentry_app_component import SentryAppComponent
@@ -252,7 +255,7 @@ def trigger_sentry_app_action_creators(
install = SentryAppInstallation.objects.get(uuid=install_uuid)
except SentryAppInstallation.DoesNotExist:
return RpcAlertRuleActionResult(success=False, message="Installation does not exist")
- result = alert_rule_actions.AlertRuleActionCreator.run(install=install, fields=fields)
+ result = AlertRuleActionCreator(install=install, fields=fields).run()
return RpcAlertRuleActionResult(success=result["success"], message=result["message"])
def find_service_hook_sentry_app(self, *, api_application_id: int) -> RpcSentryApp | None:
diff --git a/src/sentry/sentry_apps/tasks/__init__.py b/src/sentry/sentry_apps/tasks/__init__.py
new file mode 100644
index 0000000000000..62d7eee5405b7
--- /dev/null
+++ b/src/sentry/sentry_apps/tasks/__init__.py
@@ -0,0 +1,23 @@
+from .sentry_apps import (
+ build_comment_webhook,
+ clear_region_cache,
+ create_or_update_service_hooks_for_sentry_app,
+ installation_webhook,
+ process_resource_change_bound,
+ send_alert_event,
+ send_resource_change_webhook,
+ workflow_notification,
+)
+from .service_hooks import process_service_hook
+
+__all__ = (
+ "send_alert_event",
+ "build_comment_webhook",
+ "clear_region_cache",
+ "create_or_update_service_hooks_for_sentry_app",
+ "installation_webhook",
+ "process_resource_change_bound",
+ "send_resource_change_webhook",
+ "workflow_notification",
+ "process_service_hook",
+)
diff --git a/src/sentry/sentry_apps/tasks/sentry_apps.py b/src/sentry/sentry_apps/tasks/sentry_apps.py
new file mode 100644
index 0000000000000..d97c5661fa040
--- /dev/null
+++ b/src/sentry/sentry_apps/tasks/sentry_apps.py
@@ -0,0 +1,515 @@
+from __future__ import annotations
+
+import logging
+from collections import defaultdict
+from collections.abc import Mapping
+from typing import Any
+
+from celery import Task, current_task
+from django.urls import reverse
+from requests.exceptions import RequestException
+
+from sentry import analytics
+from sentry.api.serializers import serialize
+from sentry.constants import SentryAppInstallationStatus
+from sentry.db.models.base import Model
+from sentry.eventstore.models import Event, GroupEvent
+from sentry.hybridcloud.rpc.caching import region_caching_service
+from sentry.models.activity import Activity
+from sentry.models.group import Group
+from sentry.models.organization import Organization
+from sentry.models.organizationmapping import OrganizationMapping
+from sentry.models.project import Project
+from sentry.sentry_apps.api.serializers.app_platform_event import AppPlatformEvent
+from sentry.sentry_apps.models.sentry_app import VALID_EVENTS, SentryApp
+from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
+from sentry.sentry_apps.models.servicehook import ServiceHook, ServiceHookProject
+from sentry.sentry_apps.services.app.model import RpcSentryAppInstallation
+from sentry.sentry_apps.services.app.service import (
+ app_service,
+ get_by_application_id,
+ get_installation,
+)
+from sentry.shared_integrations.exceptions import ApiHostError, ApiTimeoutError, ClientError
+from sentry.silo.base import SiloMode
+from sentry.tasks.base import instrumented_task, retry
+from sentry.users.services.user.model import RpcUser
+from sentry.users.services.user.service import user_service
+from sentry.utils import metrics
+from sentry.utils.http import absolute_uri
+from sentry.utils.sentry_apps import send_and_save_webhook_request
+from sentry.utils.sentry_apps.service_hook_manager import (
+ create_or_update_service_hooks_for_installation,
+)
+
+logger = logging.getLogger("sentry.sentry_apps.tasks.sentry_apps")
+
+TASK_OPTIONS = {
+ "queue": "app_platform",
+ "default_retry_delay": (60 * 5), # Five minutes.
+ "max_retries": 3,
+ "record_timing": True,
+ "silo_mode": SiloMode.REGION,
+}
+CONTROL_TASK_OPTIONS = {
+ "queue": "app_platform.control",
+ "default_retry_delay": (60 * 5), # Five minutes.
+ "max_retries": 3,
+ "silo_mode": SiloMode.CONTROL,
+}
+
+retry_decorator = retry(
+ on=(RequestException, ApiHostError, ApiTimeoutError),
+ ignore=(ClientError,),
+)
+
+# We call some models by a different name, publicly, than their class name.
+# For example the model Group is called "Issue" in the UI. We want the Service
+# Hook events to match what we externally call these primitives.
+RESOURCE_RENAMES = {"Group": "issue"}
+
+TYPES = {"Group": Group, "Error": Event, "Comment": Activity}
+
+
+def _webhook_event_data(
+ event: Event | GroupEvent, group_id: int, project_id: int
+) -> dict[str, Any]:
+ project = Project.objects.get_from_cache(id=project_id)
+ organization = Organization.objects.get_from_cache(id=project.organization_id)
+
+ event_context = event.as_dict()
+ event_context["url"] = absolute_uri(
+ reverse(
+ "sentry-api-0-project-event-details",
+ args=[project.organization.slug, project.slug, event.event_id],
+ )
+ )
+
+ event_context["web_url"] = absolute_uri(
+ reverse(
+ "sentry-organization-event-detail", args=[organization.slug, group_id, event.event_id]
+ )
+ )
+
+ # The URL has a regex OR in it ("|") which means `reverse` cannot generate
+ # a valid URL (it can't know which option to pick). We have to manually
+ # create this URL for, that reason.
+ event_context["issue_url"] = absolute_uri(f"/api/0/issues/{group_id}/")
+ event_context["issue_id"] = str(group_id)
+ return event_context
+
+
+@instrumented_task(name="sentry.sentry_apps.tasks.sentry_apps.send_alert_event", **TASK_OPTIONS)
+@retry_decorator
+def send_alert_event(
+ event: Event | GroupEvent,
+ rule: str,
+ sentry_app_id: int,
+ additional_payload_key: str | None = None,
+ additional_payload: Mapping[str, Any] | None = None,
+) -> None:
+ """
+ When an incident alert is triggered, send incident data to the SentryApp's webhook.
+ :param event: The `Event` for which to build a payload.
+ :param rule: The AlertRule that was triggered.
+ :param sentry_app_id: The SentryApp to notify.
+ :param additional_payload_key: The key used to attach additional data to the webhook payload
+ :param additional_payload: The extra data attached to the payload body at the key specified by `additional_payload_key`.
+ :return:
+ """
+ group = event.group
+ assert group, "Group must exist to get related attributes"
+ project = Project.objects.get_from_cache(id=group.project_id)
+ organization = Organization.objects.get_from_cache(id=project.organization_id)
+
+ extra = {
+ "sentry_app_id": sentry_app_id,
+ "project_slug": project.slug,
+ "organization_slug": organization.slug,
+ "rule": rule,
+ }
+
+ sentry_app = app_service.get_sentry_app_by_id(id=sentry_app_id)
+ if sentry_app is None:
+ logger.info("event_alert_webhook.missing_sentry_app", extra=extra)
+ return
+
+ installations = app_service.get_many(
+ filter=dict(
+ organization_id=organization.id,
+ app_ids=[sentry_app.id],
+ status=SentryAppInstallationStatus.INSTALLED,
+ )
+ )
+ if not installations:
+ logger.info("event_alert_webhook.missing_installation", extra=extra)
+ return
+ (install,) = installations
+
+ event_context = _webhook_event_data(event, group.id, project.id)
+
+ data = {"event": event_context, "triggered_rule": rule}
+
+ # Attach extra payload to the webhook
+ if additional_payload_key and additional_payload:
+ data[additional_payload_key] = additional_payload
+
+ request_data = AppPlatformEvent(
+ resource="event_alert", action="triggered", install=install, data=data
+ )
+
+ send_and_save_webhook_request(sentry_app, request_data)
+
+ # On success, record analytic event for Alert Rule UI Component
+ if request_data.data.get("issue_alert"):
+ analytics.record(
+ "alert_rule_ui_component_webhook.sent",
+ organization_id=organization.id,
+ sentry_app_id=sentry_app_id,
+ event=f"{request_data.resource}.{request_data.action}",
+ )
+
+
+def _process_resource_change(
+ *,
+ action: str,
+ sender: str,
+ instance_id: int,
+ retryer: Task | None = None,
+ **kwargs: Any,
+) -> None:
+ # The class is serialized as a string when enqueueing the class.
+ model: type[Event] | type[Model] = TYPES[sender]
+ instance: Event | Model | None = None
+ # The Event model has different hooks for the different event types. The sender
+ # determines which type eg. Error and therefore the 'name' eg. error
+ if issubclass(model, Event):
+ if not kwargs.get("instance"):
+ extra = {"sender": sender, "action": action, "event_id": instance_id}
+ logger.info("process_resource_change.event_missing_event", extra=extra)
+ return
+ name = sender.lower()
+ else:
+ # Some resources are named differently than their model. eg. Group vs Issue.
+ # Looks up the human name for the model. Defaults to the model name.
+ name = RESOURCE_RENAMES.get(model.__name__, model.__name__.lower())
+
+ # By default, use Celery's `current_task` but allow a value to be passed for the
+ # bound Task.
+ retryer = retryer or current_task
+
+ # We may run into a race condition where this task executes before the
+ # transaction that creates the Group has committed.
+ if issubclass(model, Event):
+ # XXX:(Meredith): Passing through the entire event was an intentional choice
+ # to avoid having to query NodeStore again for data we had previously in
+ # post_process. While this is not ideal, changing this will most likely involve
+ # an overhaul of how we do things in post_process, not just this task alone.
+ instance = kwargs.get("instance")
+ else:
+ try:
+ instance = model.objects.get(id=instance_id)
+ except model.DoesNotExist as e:
+ # Explicitly requeue the task, so we don't report this to Sentry until
+ # we hit the max number of retries.
+ return retryer.retry(exc=e)
+
+ event = f"{name}.{action}"
+
+ if event not in VALID_EVENTS:
+ return
+
+ org = None
+
+ if isinstance(instance, (Group, Event, GroupEvent)):
+ org = Organization.objects.get_from_cache(
+ id=Project.objects.get_from_cache(id=instance.project_id).organization_id
+ )
+ assert org, "organization must exist to get related sentry app installations"
+ installations: list[RpcSentryAppInstallation] = [
+ installation
+ for installation in app_service.get_installed_for_organization(organization_id=org.id)
+ if event in installation.sentry_app.events
+ ]
+
+ for installation in installations:
+ data = {}
+ if isinstance(instance, (Event, GroupEvent)):
+ assert instance.group_id, "group id is required to create webhook event data"
+ data[name] = _webhook_event_data(instance, instance.group_id, instance.project_id)
+ else:
+ data[name] = serialize(instance)
+
+ # Trigger a new task for each webhook
+ send_resource_change_webhook.delay(
+ installation_id=installation.id, event=event, data=data
+ )
+
+
+@instrumented_task(
+ "sentry.sentry_apps.tasks.sentry_apps.process_resource_change_bound", bind=True, **TASK_OPTIONS
+)
+@retry_decorator
+def process_resource_change_bound(
+ self: Task, action: str, sender: str, instance_id: int, **kwargs: Any
+) -> None:
+ _process_resource_change(
+ action=action, sender=sender, instance_id=instance_id, retryer=self, **kwargs
+ )
+
+
+@instrumented_task(
+ name="sentry.sentry_apps.tasks.sentry_apps.installation_webhook", **CONTROL_TASK_OPTIONS
+)
+@retry_decorator
+def installation_webhook(installation_id: int, user_id: int, *args: Any, **kwargs: Any) -> None:
+ from sentry.sentry_apps.installations import SentryAppInstallationNotifier
+
+ extra = {"installation_id": installation_id, "user_id": user_id}
+ try:
+ # we should send the webhook for pending installations on the install event in case that's part of the workflow
+ install = SentryAppInstallation.objects.get(id=installation_id)
+ except SentryAppInstallation.DoesNotExist:
+ logger.info("installation_webhook.missing_installation", extra=extra)
+ return
+
+ user = user_service.get_user(user_id=user_id)
+ if not user:
+ logger.info("installation_webhook.missing_user", extra=extra)
+ return
+
+ SentryAppInstallationNotifier(
+ sentry_app_installation=install, user=user, action="created"
+ ).run()
+
+
+@instrumented_task(
+ name="sentry.sentry_apps.tasks.sentry_apps.clear_region_cache", **CONTROL_TASK_OPTIONS
+)
+def clear_region_cache(sentry_app_id: int, region_name: str) -> None:
+ try:
+ sentry_app = SentryApp.objects.get(id=sentry_app_id)
+ except SentryApp.DoesNotExist:
+ return
+
+ # When a sentry app's definition changes purge cache for all the installations.
+ # This could get slow for large applications, but generally big applications don't change often.
+ install_query = SentryAppInstallation.objects.filter(
+ sentry_app=sentry_app,
+ ).values("id", "organization_id")
+
+ # There isn't a constraint on org : sentryapp so we have to handle lists
+ install_map: dict[int, list[int]] = defaultdict(list)
+ for install_row in install_query:
+ install_map[install_row["organization_id"]].append(install_row["id"])
+
+ # Clear application_id cache
+ region_caching_service.clear_key(
+ key=get_by_application_id.key_from(sentry_app.application_id), region_name=region_name
+ )
+
+ # Limit our operations to the region this outbox is for.
+ # This could be a single query if we use raw_sql.
+ region_query = OrganizationMapping.objects.filter(
+ organization_id__in=list(install_map.keys()), region_name=region_name
+ ).values("organization_id")
+ for region_row in region_query:
+ installs = install_map[region_row["organization_id"]]
+ for install_id in installs:
+ region_caching_service.clear_key(
+ key=get_installation.key_from(install_id), region_name=region_name
+ )
+
+
+@instrumented_task(
+ name="sentry.sentry_apps.tasks.sentry_apps.workflow_notification", **TASK_OPTIONS
+)
+@retry_decorator
+def workflow_notification(
+ installation_id: int, issue_id: int, type: str, user_id: int, *args: Any, **kwargs: Any
+) -> None:
+ webhook_data = get_webhook_data(installation_id, issue_id, user_id)
+ if not webhook_data:
+ return
+ install, issue, user = webhook_data
+ data = kwargs.get("data", {})
+ data.update({"issue": serialize(issue)})
+ send_webhooks(installation=install, event=f"issue.{type}", data=data, actor=user)
+ analytics.record(
+ f"sentry_app.issue.{type}",
+ user_id=user_id,
+ group_id=issue_id,
+ installation_id=installation_id,
+ )
+
+
+@instrumented_task(
+ name="sentry.sentry_apps.tasks.sentry_apps.build_comment_webhook", **TASK_OPTIONS
+)
+@retry_decorator
+def build_comment_webhook(
+ installation_id: int, issue_id: int, type: str, user_id: int, *args: Any, **kwargs: Any
+) -> None:
+ webhook_data = get_webhook_data(installation_id, issue_id, user_id)
+ if not webhook_data:
+ return None
+ install, _, user = webhook_data
+ data = kwargs.get("data", {})
+ project_slug = data.get("project_slug")
+ comment_id = data.get("comment_id")
+ payload = {
+ "comment_id": data.get("comment_id"),
+ "issue_id": issue_id,
+ "project_slug": data.get("project_slug"),
+ "timestamp": data.get("timestamp"),
+ "comment": data.get("comment"),
+ }
+ send_webhooks(installation=install, event=type, data=payload, actor=user)
+ # `type` is comment.created, comment.updated, or comment.deleted
+ analytics.record(
+ type,
+ user_id=user_id,
+ group_id=issue_id,
+ project_slug=project_slug,
+ installation_id=installation_id,
+ comment_id=comment_id,
+ )
+
+
+def get_webhook_data(
+ installation_id: int, issue_id: int, user_id: int
+) -> tuple[RpcSentryAppInstallation, Group, RpcUser | None] | None:
+ extra = {"installation_id": installation_id, "issue_id": issue_id}
+ install = app_service.installation_by_id(id=installation_id)
+ if not install:
+ logger.info("workflow_notification.missing_installation", extra=extra)
+ return None
+
+ try:
+ issue = Group.objects.get(id=issue_id)
+ except Group.DoesNotExist:
+ logger.info("workflow_notification.missing_issue", extra=extra)
+ return None
+
+ user = None
+ if user_id:
+ user = user_service.get_user(user_id=user_id)
+ if not user:
+ logger.info("workflow_notification.missing_user", extra=extra)
+
+ return (install, issue, user)
+
+
+@instrumented_task(
+ "sentry.sentry_apps.tasks.sentry_apps.send_resource_change_webhook", **TASK_OPTIONS
+)
+@retry_decorator
+def send_resource_change_webhook(
+ installation_id: int, event: str, data: dict[str, Any], *args: Any, **kwargs: Any
+) -> None:
+ installation = app_service.installation_by_id(id=installation_id)
+ if not installation:
+ logger.info(
+ "send_resource_change_webhook.missing_installation",
+ extra={"installation_id": installation_id, "event": event},
+ )
+ return
+
+ send_webhooks(installation, event, data=data)
+
+ metrics.incr("resource_change.processed", sample_rate=1.0, tags={"change_event": event})
+
+
+def notify_sentry_app(event: Event | GroupEvent, futures):
+ for f in futures:
+ if not f.kwargs.get("sentry_app"):
+ continue
+
+ extra_kwargs: dict[str, Any] = {
+ "additional_payload_key": None,
+ "additional_payload": None,
+ }
+ # If the future comes from a rule with a UI component form in the schema, append the issue alert payload
+ settings = f.kwargs.get("schema_defined_settings")
+ if settings:
+ extra_kwargs["additional_payload_key"] = "issue_alert"
+ extra_kwargs["additional_payload"] = {
+ "id": f.rule.id,
+ "title": f.rule.label,
+ "sentry_app_id": f.kwargs["sentry_app"].id,
+ "settings": settings,
+ }
+
+ send_alert_event.delay(
+ event=event,
+ rule=f.rule.label,
+ sentry_app_id=f.kwargs["sentry_app"].id,
+ **extra_kwargs,
+ )
+
+
+def send_webhooks(installation: RpcSentryAppInstallation, event: str, **kwargs: Any) -> None:
+ servicehook: ServiceHook
+ try:
+ servicehook = ServiceHook.objects.get(
+ organization_id=installation.organization_id, actor_id=installation.id
+ )
+ except ServiceHook.DoesNotExist:
+ logger.info(
+ "send_webhooks.missing_servicehook",
+ extra={"installation_id": installation.id, "event": event},
+ )
+ return None
+
+ if event not in servicehook.events:
+ return None
+
+ # The service hook applies to all projects if there are no
+ # ServiceHookProject records. Otherwise we want check if
+ # the event is within the allowed projects.
+ project_limited = ServiceHookProject.objects.filter(service_hook_id=servicehook.id).exists()
+
+ # TODO(nola): This is disabled for now, because it could potentially affect internal integrations w/ error.created
+ # # If the event is error.created & the request is going out to the Org that owns the Sentry App,
+ # # Make sure we don't send the request, to prevent potential infinite loops
+ # if (
+ # event == "error.created"
+ # and installation.organization_id == installation.sentry_app.owner_id
+ # ):
+ # # We just want to exclude error.created from the project that the integration lives in
+ # # Need to first implement project mapping for integration partners
+ # metrics.incr(
+ # "webhook_request.dropped",
+ # tags={"sentry_app": installation.sentry_app.id, "event": event},
+ # )
+ # return
+
+ if not project_limited:
+ resource, action = event.split(".")
+
+ kwargs["resource"] = resource
+ kwargs["action"] = action
+ kwargs["install"] = installation
+
+ request_data = AppPlatformEvent(**kwargs)
+ send_and_save_webhook_request(
+ installation.sentry_app,
+ request_data,
+ installation.sentry_app.webhook_url,
+ )
+
+
+@instrumented_task(
+ "sentry.sentry_apps.tasks.sentry_apps.create_or_update_service_hooks_for_sentry_app",
+ **CONTROL_TASK_OPTIONS,
+)
+def create_or_update_service_hooks_for_sentry_app(
+ sentry_app_id: int, webhook_url: str, events: list[str], **kwargs: dict
+) -> None:
+ installations = SentryAppInstallation.objects.filter(sentry_app_id=sentry_app_id)
+ for installation in installations:
+ create_or_update_service_hooks_for_installation(
+ installation=installation,
+ events=events,
+ webhook_url=webhook_url,
+ )
diff --git a/src/sentry/sentry_apps/tasks/service_hooks.py b/src/sentry/sentry_apps/tasks/service_hooks.py
new file mode 100644
index 0000000000000..76685918e9f6b
--- /dev/null
+++ b/src/sentry/sentry_apps/tasks/service_hooks.py
@@ -0,0 +1,60 @@
+from time import time
+
+from sentry.api.serializers import serialize
+from sentry.http import safe_urlopen
+from sentry.sentry_apps.models.servicehook import ServiceHook
+from sentry.silo.base import SiloMode
+from sentry.tasks.base import instrumented_task, retry
+from sentry.tsdb.base import TSDBModel
+from sentry.utils import json
+
+
+def get_payload_v0(event):
+ group = event.group
+ project = group.project
+
+ group_context = serialize(group)
+ group_context["url"] = group.get_absolute_url()
+
+ event_context = serialize(event)
+ event_context["url"] = f"{group.get_absolute_url()}events/{event.event_id}/"
+ data = {
+ "project": {"slug": project.slug, "name": project.name},
+ "group": group_context,
+ "event": event_context,
+ }
+ return data
+
+
+@instrumented_task(
+ name="sentry.sentry_apps.tasks.service_hooks.process_service_hook",
+ default_retry_delay=60 * 5,
+ max_retries=5,
+ silo_mode=SiloMode.REGION,
+)
+@retry
+def process_service_hook(servicehook_id, event, **kwargs):
+ try:
+ servicehook = ServiceHook.objects.get(id=servicehook_id)
+ except ServiceHook.DoesNotExist:
+ return
+
+ if servicehook.version == 0:
+ payload = get_payload_v0(event)
+ else:
+ raise NotImplementedError
+
+ from sentry import tsdb
+
+ tsdb.backend.incr(TSDBModel.servicehook_fired, servicehook.id)
+
+ headers = {
+ "Content-Type": "application/json",
+ "X-ServiceHook-Timestamp": str(int(time())),
+ "X-ServiceHook-GUID": servicehook.guid,
+ "X-ServiceHook-Signature": servicehook.build_signature(json.dumps(payload)),
+ }
+
+ safe_urlopen(
+ url=servicehook.url, data=json.dumps(payload), headers=headers, timeout=5, verify_ssl=False
+ )
diff --git a/src/sentry/mediators/token_exchange/grant_exchanger.py b/src/sentry/sentry_apps/token_exchange/grant_exchanger.py
similarity index 66%
rename from src/sentry/mediators/token_exchange/grant_exchanger.py
rename to src/sentry/sentry_apps/token_exchange/grant_exchanger.py
index 53a697ed36802..589a3126ec078 100644
--- a/src/sentry/mediators/token_exchange/grant_exchanger.py
+++ b/src/sentry/sentry_apps/token_exchange/grant_exchanger.py
@@ -1,12 +1,11 @@
+from dataclasses import dataclass
from datetime import datetime, timezone
-from django.db import router
+from django.db import router, transaction
from django.utils.functional import cached_property
from sentry import analytics
from sentry.coreapi import APIUnauthorized
-from sentry.mediators.mediator import Mediator
-from sentry.mediators.param import Param
from sentry.mediators.token_exchange.util import token_expiration
from sentry.mediators.token_exchange.validator import Validator
from sentry.models.apiapplication import ApiApplication
@@ -19,71 +18,75 @@
from sentry.users.models.user import User
-class GrantExchanger(Mediator):
+@dataclass
+class GrantExchanger:
"""
Exchanges a Grant Code for an Access Token
"""
- install = Param(RpcSentryAppInstallation)
- code = Param(str)
- client_id = Param(str)
- user = Param(User)
- using = router.db_for_write(User)
+ install: RpcSentryAppInstallation
+ code: str
+ client_id: str
+ user: User
- def call(self):
- self._validate()
- self._create_token()
+ def run(self):
+ with transaction.atomic(using=router.db_for_write(ApiToken)):
+ self._validate()
+ token = self._create_token()
- # Once it's exchanged it's no longer valid and should not be
- # exchangeable, so we delete it.
- self._delete_grant()
+ # Once it's exchanged it's no longer valid and should not be
+ # exchangeable, so we delete it.
+ self._delete_grant()
+ self.record_analytics()
- return self.token
+ return token
- def record_analytics(self):
+ def record_analytics(self) -> None:
analytics.record(
"sentry_app.token_exchanged",
sentry_app_installation_id=self.install.id,
exchange_type="authorization",
)
- def _validate(self):
+ def _validate(self) -> None:
Validator.run(install=self.install, client_id=self.client_id, user=self.user)
if not self._grant_belongs_to_install() or not self._sentry_app_user_owns_grant():
- raise APIUnauthorized
+ raise APIUnauthorized("Forbidden grant")
if not self._grant_is_active():
raise APIUnauthorized("Grant has already expired.")
- def _grant_belongs_to_install(self):
+ def _grant_belongs_to_install(self) -> bool:
return self.grant.sentry_app_installation.id == self.install.id
- def _sentry_app_user_owns_grant(self):
+ def _sentry_app_user_owns_grant(self) -> bool:
return self.grant.application.owner == self.user
- def _grant_is_active(self):
+ def _grant_is_active(self) -> bool:
return self.grant.expires_at > datetime.now(timezone.utc)
- def _delete_grant(self):
+ def _delete_grant(self) -> None:
# This will cause a set null to trigger which does not need to cascade an outbox
with unguarded_write(router.db_for_write(ApiGrant)):
self.grant.delete()
- def _create_token(self):
- self.token = ApiToken.objects.create(
+ def _create_token(self) -> ApiToken:
+ token = ApiToken.objects.create(
user=self.user,
application=self.application,
scope_list=self.sentry_app.scope_list,
expires_at=token_expiration(),
)
try:
- SentryAppInstallation.objects.get(id=self.install.id).update(api_token=self.token)
+ SentryAppInstallation.objects.get(id=self.install.id).update(api_token=token)
except SentryAppInstallation.DoesNotExist:
pass
+ return token
+
@cached_property
- def grant(self):
+ def grant(self) -> ApiGrant:
try:
return (
ApiGrant.objects.select_related("sentry_app_installation")
@@ -92,18 +95,18 @@ def grant(self):
.get(code=self.code)
)
except ApiGrant.DoesNotExist:
- raise APIUnauthorized
+ raise APIUnauthorized("Could not find grant")
@property
- def application(self):
+ def application(self) -> ApiApplication:
try:
return self.grant.application
except ApiApplication.DoesNotExist:
- raise APIUnauthorized
+ raise APIUnauthorized("Could not find application")
@property
- def sentry_app(self):
+ def sentry_app(self) -> SentryApp:
try:
return self.application.sentry_app
except SentryApp.DoesNotExist:
- raise APIUnauthorized
+ raise APIUnauthorized("Could not find sentry app")
diff --git a/src/sentry/web/frontend/debug/debug_sentry_app_notify_disable.py b/src/sentry/sentry_apps/web/debug_sentry_app_notify_disable.py
similarity index 97%
rename from src/sentry/web/frontend/debug/debug_sentry_app_notify_disable.py
rename to src/sentry/sentry_apps/web/debug_sentry_app_notify_disable.py
index 53df7c98ce44f..06a0f0c791458 100644
--- a/src/sentry/web/frontend/debug/debug_sentry_app_notify_disable.py
+++ b/src/sentry/sentry_apps/web/debug_sentry_app_notify_disable.py
@@ -6,8 +6,7 @@
from sentry.models.organization import Organization
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
-
-from .mail import MailPreview
+from sentry.web.frontend.debug.mail import MailPreview
class DebugSentryAppNotifyDisableView(View):
diff --git a/src/sentry/web/frontend/sentryapp_avatar.py b/src/sentry/sentry_apps/web/sentryapp_avatar.py
similarity index 100%
rename from src/sentry/web/frontend/sentryapp_avatar.py
rename to src/sentry/sentry_apps/web/sentryapp_avatar.py
diff --git a/src/sentry/sentry_metrics/consumers/indexer/processing.py b/src/sentry/sentry_metrics/consumers/indexer/processing.py
index 9af23bb62da10..4a72736ff4886 100644
--- a/src/sentry/sentry_metrics/consumers/indexer/processing.py
+++ b/src/sentry/sentry_metrics/consumers/indexer/processing.py
@@ -73,12 +73,13 @@ def __get_schema_validator(self) -> Callable[[str, IngestMetric], None]:
).validate
def process_messages(self, outer_message: Message[MessageBatch]) -> IndexerOutputMessageBatch:
- # TODO-anton: remove sampled here and let traces_sampler decide
+ sample_rate = (
+ settings.SENTRY_METRICS_INDEXER_TRANSACTIONS_SAMPLE_RATE
+ * settings.SENTRY_BACKEND_APM_SAMPLING
+ )
with sentry_sdk.start_transaction(
name="sentry.sentry_metrics.consumers.indexer.processing.process_messages",
- custom_sampling_context={
- "sample_rate": settings.SENTRY_METRICS_INDEXER_TRANSACTIONS_SAMPLE_RATE
- },
+ custom_sampling_context={"sample_rate": sample_rate},
):
return self._process_messages_impl(outer_message)
diff --git a/src/sentry/sentry_metrics/indexer/cache.py b/src/sentry/sentry_metrics/indexer/cache.py
index 0676acf5e59d7..4ced794ad5402 100644
--- a/src/sentry/sentry_metrics/indexer/cache.py
+++ b/src/sentry/sentry_metrics/indexer/cache.py
@@ -284,7 +284,7 @@ def resolve(self, use_case_id: UseCaseID, org_id: int, string: str) -> int | Non
_INDEXER_CACHE_RESOLVE_METRIC,
tags={"cache_hit": "false", "use_case": use_case_id.value},
)
- # TODO this random rollout is backwards
+ # TODO: this random rollout is backwards
if random.random() >= options.get(
"sentry-metrics.indexer.disable-memcache-replenish-rollout"
):
diff --git a/src/sentry/sentry_metrics/querying/eap/mql_eap_bridge.py b/src/sentry/sentry_metrics/querying/eap/mql_eap_bridge.py
index f6383dc9e2774..347322458c893 100644
--- a/src/sentry/sentry_metrics/querying/eap/mql_eap_bridge.py
+++ b/src/sentry/sentry_metrics/querying/eap/mql_eap_bridge.py
@@ -24,7 +24,7 @@
from sentry.models.organization import Organization
from sentry.models.project import Project
-from sentry.utils import snuba
+from sentry.utils import snuba_rpc
def parse_mql_filters(group: ConditionGroup) -> Iterable[TraceItemFilter]:
@@ -94,7 +94,7 @@ def make_eap_request(
name=ts.metric.mri.split("/")[1].split("@")[0], type=AttributeKey.TYPE_FLOAT
),
)
- aggregate_resp = snuba.rpc(aggregate_req, AggregateBucketResponse)
+ aggregate_resp = snuba_rpc.rpc(aggregate_req, AggregateBucketResponse)
series_data = list(aggregate_resp.result)
duration = end - start
diff --git a/src/sentry/shared_integrations/exceptions/__init__.py b/src/sentry/shared_integrations/exceptions/__init__.py
index 19c1feb912622..a871bbf4f75c7 100644
--- a/src/sentry/shared_integrations/exceptions/__init__.py
+++ b/src/sentry/shared_integrations/exceptions/__init__.py
@@ -20,7 +20,10 @@
"ApiTimeoutError",
"ApiUnauthorized",
"ApiRateLimitedError",
+ "ApiInvalidRequestError",
"IntegrationError",
+ "IntegrationFormError",
+ "UnsupportedResponseType",
)
@@ -84,6 +87,8 @@ def from_response(cls, response: Response, url: str | None = None) -> ApiError:
return ApiRateLimitedError(response.text, url=url)
elif response.status_code == 409:
return ApiConflictError(response.text, url=url)
+ elif response.status_code == 400:
+ return ApiInvalidRequestError(response.text, url=url)
return cls(response.text, response.status_code, url=url)
@@ -151,6 +156,10 @@ class ApiConnectionResetError(ApiError):
code = errno.ECONNRESET
+class ApiInvalidRequestError(ApiError):
+ code = 400
+
+
class UnsupportedResponseType(ApiError):
@property
def content_type(self) -> str:
diff --git a/src/sentry/signals.py b/src/sentry/signals.py
index 979ebefab65ad..abce0805ba6cd 100644
--- a/src/sentry/signals.py
+++ b/src/sentry/signals.py
@@ -135,7 +135,7 @@ def _log_robust_failure(self, receiver: object, err: Exception) -> None:
inbound_filter_toggled = BetterSignal() # ["project"]
sso_enabled = BetterSignal() # ["organization_id", "user_id", "provider"]
data_scrubber_enabled = BetterSignal() # ["organization"]
-# ["project", "rule", "user", "rule_type", "is_api_token", "duplicate_rule", "wizard_v3"]
+# ["project", "rule", "user", "rule_type", "is_api_token", "duplicate_rule", "wizard_v3", "query_type"]
alert_rule_created = BetterSignal()
alert_rule_edited = BetterSignal() # ["project", "rule", "user", "rule_type", "is_api_token"]
repo_linked = BetterSignal() # ["repo", "user"]
diff --git a/src/sentry/silo/base.py b/src/sentry/silo/base.py
index f847180c186e3..85d175e0fae94 100644
--- a/src/sentry/silo/base.py
+++ b/src/sentry/silo/base.py
@@ -8,7 +8,7 @@
import typing
from collections.abc import Callable, Generator, Iterable
from enum import Enum
-from typing import Any
+from typing import Any, ParamSpec, TypeVar
from sentry.utils.env import in_test_environment
@@ -16,6 +16,10 @@
from sentry.types.region import Region
+P = ParamSpec("P")
+R = TypeVar("R")
+
+
class SiloMode(Enum):
"""Defines which "silo" component the application is acting as.
@@ -114,10 +118,10 @@ class AvailabilityError(Exception):
@abc.abstractmethod
def handle_when_unavailable(
self,
- original_method: Callable[..., Any],
+ original_method: Callable[P, R],
current_mode: SiloMode,
available_modes: Iterable[SiloMode],
- ) -> Callable[..., Any]:
+ ) -> Callable[P, R]:
"""Handle an attempt to access an unavailable element.
Return a callback that accepts the same varargs as the original call to
@@ -132,8 +136,8 @@ def is_available(self) -> bool:
def create_override(
self,
- original_method: Callable[..., Any],
- ) -> Callable[..., Any]:
+ original_method: Callable[P, R],
+ ) -> Callable[P, R]:
"""Create a method that conditionally overrides another method.
The returned method passes through to the original only if this server
@@ -143,7 +147,7 @@ def create_override(
:return: the conditional method object
"""
- def override(*args: Any, **kwargs: Any) -> Any:
+ def override(*args: P.args, **kwargs: P.kwargs) -> R:
# It's important to do this check inside the override, so that tests
# using `override_settings` or a similar context can change the value of
# settings.SILO_MODE effectively. Otherwise, availability would be
@@ -169,10 +173,10 @@ class FunctionSiloLimit(SiloLimit):
def handle_when_unavailable(
self,
- original_method: Callable[..., Any],
+ original_method: Callable[P, R],
current_mode: SiloMode,
available_modes: Iterable[SiloMode],
- ) -> Callable[..., Any]:
+ ) -> Callable[P, R]:
if in_test_environment():
mode_str = ", ".join(str(m) for m in available_modes)
message = (
@@ -182,7 +186,7 @@ def handle_when_unavailable(
raise self.AvailabilityError(message)
return original_method
- def __call__(self, decorated_obj: Any) -> Any:
+ def __call__(self, decorated_obj: Callable[P, R]) -> Callable[P, R]:
if not callable(decorated_obj):
raise TypeError("`@FunctionSiloLimit` must decorate a function")
return self.create_override(decorated_obj)
diff --git a/src/sentry/similarity/features.py b/src/sentry/similarity/features.py
index 07cf865438cf3..ed089808d4026 100644
--- a/src/sentry/similarity/features.py
+++ b/src/sentry/similarity/features.py
@@ -1,4 +1,3 @@
-import functools
import itertools
import logging
@@ -66,10 +65,10 @@ def __init__(
self.expected_encoding_errors = expected_encoding_errors
assert set(self.aliases) == set(self.features)
- def __get_scope(self, project):
+ def __get_scope(self, project) -> str:
return f"{project.id}"
- def __get_key(self, group):
+ def __get_key(self, group) -> str:
return f"{group.id}"
def extract(self, event):
@@ -81,7 +80,7 @@ def extract(self, event):
log = (
logger.debug
if isinstance(error, self.expected_extraction_errors)
- else functools.partial(logger.warning, exc_info=True)
+ else logger.warning
)
log(
"Could not extract features from %r for %r due to error: %r",
@@ -96,8 +95,8 @@ def record(self, events):
if not events:
return []
- scope = None
- key = None
+ scope: str | None = None
+ key: str | None = None
items = []
for event in events:
@@ -124,13 +123,14 @@ def record(self, events):
log = (
logger.debug
if isinstance(error, self.expected_encoding_errors)
- else functools.partial(logger.warning, exc_info=True)
+ else logger.warning
)
log(
"Could not encode features from %r for %r due to error: %r",
event,
label,
error,
+ exc_info=True,
)
else:
if features:
@@ -145,7 +145,7 @@ def classify(self, events, limit=None, thresholds=None):
if thresholds is None:
thresholds = {}
- scope = None
+ scope: str | None = None
labels = []
items = []
@@ -164,13 +164,14 @@ def classify(self, events, limit=None, thresholds=None):
log = (
logger.debug
if isinstance(error, self.expected_encoding_errors)
- else functools.partial(logger.warning, exc_info=True)
+ else logger.warning
)
log(
"Could not encode features from %r for %r due to error: %r",
event,
label,
error,
+ exc_info=True,
)
else:
if features:
@@ -210,7 +211,7 @@ def add_index_aliases_to_key(key):
# within so that we can make the most efficient queries possible and
# reject queries that cross scopes if we haven't explicitly allowed
# unsafe actions.
- scopes = {}
+ scopes: dict[str, set[str]] = {}
for source in sources:
scopes.setdefault(self.__get_scope(source.project), set()).add(source)
diff --git a/src/sentry/snuba/dataset.py b/src/sentry/snuba/dataset.py
index 6a341345dabc9..799029ea0e7b1 100644
--- a/src/sentry/snuba/dataset.py
+++ b/src/sentry/snuba/dataset.py
@@ -52,7 +52,7 @@ class Dataset(Enum):
indexed spans are similar to indexed transactions in the fields available to search
"""
- SpansEAP = "eap_spans"
+ EventsAnalyticsPlatform = "events_analytics_platform"
MetricsSummaries = "metrics_summaries"
"""
@@ -66,6 +66,7 @@ class EntityKey(Enum):
Events = "events"
Sessions = "sessions"
Spans = "spans"
+ EAPSpans = "eap_spans"
Transactions = "transactions"
MetricsSets = "metrics_sets"
MetricsCounters = "metrics_counters"
diff --git a/src/sentry/snuba/discover.py b/src/sentry/snuba/discover.py
index 0e1ff0f42e829..edf113b3bb4ad 100644
--- a/src/sentry/snuba/discover.py
+++ b/src/sentry/snuba/discover.py
@@ -340,7 +340,7 @@ def timeseries_query(
Dataset.Transactions,
], "A dataset is required to query discover"
- with sentry_sdk.start_span(op="discover.discover", description="timeseries.filter_transform"):
+ with sentry_sdk.start_span(op="discover.discover", name="timeseries.filter_transform"):
equations, columns = categorize_columns(selected_columns)
base_builder = TimeseriesQueryBuilder(
dataset,
@@ -379,7 +379,7 @@ def timeseries_query(
[query.get_snql_query() for query in query_list], referrer, query_source=query_source
)
- with sentry_sdk.start_span(op="discover.discover", description="timeseries.transform_results"):
+ with sentry_sdk.start_span(op="discover.discover", name="timeseries.transform_results"):
results = []
for snql_query, snuba_result in zip(query_list, query_results):
results.append(
@@ -506,7 +506,7 @@ def top_events_timeseries(
], "A dataset is required to query discover"
if top_events is None:
- with sentry_sdk.start_span(op="discover.discover", description="top_events.fetch_events"):
+ with sentry_sdk.start_span(op="discover.discover", name="top_events.fetch_events"):
top_events = query(
selected_columns,
query=user_query,
@@ -577,9 +577,7 @@ def top_events_timeseries(
snuba_params.end_date,
rollup,
)
- with sentry_sdk.start_span(
- op="discover.discover", description="top_events.transform_results"
- ) as span:
+ with sentry_sdk.start_span(op="discover.discover", name="top_events.transform_results") as span:
span.set_data("result_count", len(result.get("data", [])))
result = top_events_builder.process_results(result)
@@ -660,7 +658,7 @@ def get_facets(
sample = len(snuba_params.project_ids) > 2
fetch_projects = len(snuba_params.project_ids) > 1
- with sentry_sdk.start_span(op="discover.discover", description="facets.frequent_tags"):
+ with sentry_sdk.start_span(op="discover.discover", name="facets.frequent_tags"):
key_name_builder = DiscoverQueryBuilder(
Dataset.Discover,
params={},
@@ -706,7 +704,7 @@ def get_facets(
project_results = []
# Inject project data on the first page if multiple projects are selected
if fetch_projects and cursor == 0:
- with sentry_sdk.start_span(op="discover.discover", description="facets.projects"):
+ with sentry_sdk.start_span(op="discover.discover", name="facets.projects"):
project_value_builder = DiscoverQueryBuilder(
Dataset.Discover,
params={},
@@ -740,9 +738,7 @@ def get_facets(
else:
individual_tags.append(tag)
- with sentry_sdk.start_span(
- op="discover.discover", description="facets.individual_tags"
- ) as span:
+ with sentry_sdk.start_span(op="discover.discover", name="facets.individual_tags") as span:
span.set_data("tag_count", len(individual_tags))
for tag_name in individual_tags:
tag = f"tags[{tag_name}]"
@@ -767,7 +763,7 @@ def get_facets(
)
if aggregate_tags:
- with sentry_sdk.start_span(op="discover.discover", description="facets.aggregate_tags"):
+ with sentry_sdk.start_span(op="discover.discover", name="facets.aggregate_tags"):
aggregate_value_builder = DiscoverQueryBuilder(
Dataset.Discover,
params={},
diff --git a/src/sentry/snuba/entity_subscription.py b/src/sentry/snuba/entity_subscription.py
index e6a483303fd7c..417e0d1697859 100644
--- a/src/sentry/snuba/entity_subscription.py
+++ b/src/sentry/snuba/entity_subscription.py
@@ -16,15 +16,14 @@
from sentry.search.events.builder.base import BaseQueryBuilder
from sentry.search.events.builder.discover import DiscoverQueryBuilder
from sentry.search.events.builder.metrics import AlertMetricsQueryBuilder
+from sentry.search.events.builder.spans_indexed import SpansEAPQueryBuilder
from sentry.search.events.types import ParamsType, QueryBuilderConfig
from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.sentry_metrics.utils import (
- MetricIndexNotFound,
resolve,
resolve_tag_key,
resolve_tag_value,
resolve_tag_values,
- reverse_resolve_tag_value,
)
from sentry.snuba.dataset import Dataset, EntityKey
from sentry.snuba.metrics.extraction import MetricSpecType
@@ -49,6 +48,7 @@
EntityKey.GenericMetricsGauges: "timestamp",
EntityKey.MetricsCounters: "timestamp",
EntityKey.MetricsSets: "timestamp",
+ EntityKey.EAPSpans: "timestamp",
}
CRASH_RATE_ALERT_AGGREGATE_RE = (
r"^percentage\([ ]*(sessions_crashed|users_crashed)[ ]*\,[ ]*(sessions|users)[ ]*\)"
@@ -219,6 +219,41 @@ class PerformanceTransactionsEntitySubscription(BaseEventsAndTransactionEntitySu
dataset = Dataset.Transactions
+class PerformanceSpansEAPEntitySubscription(BaseEventsAndTransactionEntitySubscription):
+ query_type = SnubaQuery.Type.PERFORMANCE
+ dataset = Dataset.EventsAnalyticsPlatform
+
+ def build_query_builder(
+ self,
+ query: str,
+ project_ids: list[int],
+ environment: Environment | None,
+ params: ParamsType | None = None,
+ skip_field_validation_for_entity_subscription_deletion: bool = False,
+ ) -> BaseQueryBuilder:
+ if params is None:
+ params = {}
+
+ params["project_id"] = project_ids
+
+ query = apply_dataset_query_conditions(self.query_type, query, self.event_types)
+ if environment:
+ params["environment"] = environment.name
+
+ return SpansEAPQueryBuilder(
+ dataset=Dataset(self.dataset.value),
+ query=query,
+ selected_columns=[self.aggregate],
+ params=params,
+ offset=None,
+ limit=None,
+ config=QueryBuilderConfig(
+ skip_time_conditions=True,
+ skip_field_validation_for_entity_subscription_deletion=skip_field_validation_for_entity_subscription_deletion,
+ ),
+ )
+
+
class BaseMetricsEntitySubscription(BaseEntitySubscription, ABC):
def __init__(
self, aggregate: str, time_window: int, extra_fields: _EntitySpecificParams | None = None
@@ -374,77 +409,8 @@ def get_granularity(self) -> int:
granularity = 24 * 3600
return granularity
- @staticmethod
- def translate_sessions_tag_keys_and_values(
- data: list[dict[str, Any]], org_id: int, alias: str | None = None
- ) -> tuple[int, int]:
- value_col_name = alias if alias else "value"
- try:
- translated_data: dict[str, Any] = {}
- session_status = resolve_tag_key(UseCaseID.SESSIONS, org_id, "session.status")
- for row in data:
- tag_value = reverse_resolve_tag_value(
- UseCaseID.SESSIONS, org_id, row[session_status]
- )
- if tag_value is None:
- raise MetricIndexNotFound()
- translated_data[tag_value] = row[value_col_name]
-
- total_session_count = translated_data.get("init", 0)
- crash_count = translated_data.get("crashed", 0)
- except MetricIndexNotFound:
- metrics.incr("incidents.entity_subscription.metric_index_not_found")
- total_session_count = crash_count = 0
- return total_session_count, crash_count
-
- @staticmethod
- def is_crash_rate_format_v2(data: list[dict[str, Any]]) -> bool:
- """Check if this is the new update format.
- This function can be removed once all subscriptions have been updated.
- """
- return bool(data) and "crashed" in data[0]
-
def aggregate_query_results(
self, data: list[dict[str, Any]], alias: str | None = None
- ) -> list[dict[str, Any]]:
- """Handle both update formats. Once all subscriptions have been updated
- to v2, we can remove v1 and replace this function with current v2.
- """
- if self.is_crash_rate_format_v2(data):
- version = "v2"
- result = self._aggregate_query_results_v2(data, alias)
- else:
- version = "v1"
- result = self._aggregate_query_results_v1(data, alias)
-
- metrics.incr(
- "incidents.entity_subscription.aggregate_query_results",
- tags={"format": version},
- sample_rate=1.0,
- )
- return result
-
- def _aggregate_query_results_v1(
- self, data: list[dict[str, Any]], alias: str | None = None
- ) -> list[dict[str, Any]]:
- aggregated_results: list[dict[str, Any]]
- total_session_count, crash_count = self.translate_sessions_tag_keys_and_values(
- org_id=self.org_id, data=data, alias=alias
- )
- if total_session_count == 0:
- metrics.incr(
- "incidents.entity_subscription.metrics.aggregate_query_results.no_session_data"
- )
- crash_free_rate = None
- else:
- crash_free_rate = round((1 - crash_count / total_session_count) * 100, 3)
-
- col_name = alias if alias else CRASH_RATE_ALERT_AGGREGATE_ALIAS
- aggregated_results = [{col_name: crash_free_rate}]
- return aggregated_results
-
- def _aggregate_query_results_v2(
- self, data: list[dict[str, Any]], alias: str | None = None
) -> list[dict[str, Any]]:
aggregated_results: list[dict[str, Any]]
if not data:
@@ -524,6 +490,7 @@ def get_snql_aggregations(self) -> list[str]:
MetricsSetsEntitySubscription,
PerformanceTransactionsEntitySubscription,
PerformanceMetricsEntitySubscription,
+ PerformanceSpansEAPEntitySubscription,
]
@@ -547,6 +514,8 @@ def get_entity_subscription(
entity_subscription_cls = PerformanceTransactionsEntitySubscription
elif dataset in (Dataset.Metrics, Dataset.PerformanceMetrics):
entity_subscription_cls = PerformanceMetricsEntitySubscription
+ elif dataset == Dataset.EventsAnalyticsPlatform:
+ entity_subscription_cls = PerformanceSpansEAPEntitySubscription
if query_type == SnubaQuery.Type.CRASH_RATE:
entity_key = determine_crash_rate_alert_entity(aggregate)
if entity_key == EntityKey.MetricsCounters:
diff --git a/src/sentry/snuba/errors.py b/src/sentry/snuba/errors.py
index 22a3ac287886e..d3e94149f6a0c 100644
--- a/src/sentry/snuba/errors.py
+++ b/src/sentry/snuba/errors.py
@@ -106,7 +106,7 @@ def timeseries_query(
query_source: QuerySource | None = None,
):
- with sentry_sdk.start_span(op="errors", description="timeseries.filter_transform"):
+ with sentry_sdk.start_span(op="errors", name="timeseries.filter_transform"):
equations, columns = categorize_columns(selected_columns)
base_builder = ErrorsTimeseriesQueryBuilder(
Dataset.Events,
@@ -145,7 +145,7 @@ def timeseries_query(
[query.get_snql_query() for query in query_list], referrer, query_source=query_source
)
- with sentry_sdk.start_span(op="errors", description="timeseries.transform_results"):
+ with sentry_sdk.start_span(op="errors", name="timeseries.transform_results"):
results = []
for snql_query, result in zip(query_list, query_results):
results.append(
@@ -238,7 +238,7 @@ def top_events_timeseries(
the top events earlier and want to save a query.
"""
if top_events is None:
- with sentry_sdk.start_span(op="discover.errors", description="top_events.fetch_events"):
+ with sentry_sdk.start_span(op="discover.errors", name="top_events.fetch_events"):
top_events = query(
selected_columns,
query=user_query,
@@ -308,9 +308,7 @@ def top_events_timeseries(
snuba_params.end_date,
rollup,
)
- with sentry_sdk.start_span(
- op="discover.errors", description="top_events.transform_results"
- ) as span:
+ with sentry_sdk.start_span(op="discover.errors", name="top_events.transform_results") as span:
span.set_data("result_count", len(result.get("data", [])))
result = top_events_builder.process_results(result)
diff --git a/src/sentry/snuba/functions.py b/src/sentry/snuba/functions.py
index 74fcc5681398c..e1bac0a139453 100644
--- a/src/sentry/snuba/functions.py
+++ b/src/sentry/snuba/functions.py
@@ -156,7 +156,7 @@ def top_events_timeseries(
assert not include_other, "Other is not supported" # TODO: support other
if top_events is None:
- with sentry_sdk.start_span(op="discover.discover", description="top_events.fetch_events"):
+ with sentry_sdk.start_span(op="discover.discover", name="top_events.fetch_events"):
top_events = query(
selected_columns,
query=user_query,
@@ -231,9 +231,7 @@ def format_top_events_timeseries_results(
rollup,
)
- with sentry_sdk.start_span(
- op="discover.discover", description="top_events.transform_results"
- ) as span:
+ with sentry_sdk.start_span(op="discover.discover", name="top_events.transform_results") as span:
result = query_builder.strip_alias_prefix(result)
span.set_data("result_count", len(result.get("data", [])))
diff --git a/src/sentry/snuba/issue_platform.py b/src/sentry/snuba/issue_platform.py
index 62d6397c520b2..e2ea0ad6be317 100644
--- a/src/sentry/snuba/issue_platform.py
+++ b/src/sentry/snuba/issue_platform.py
@@ -145,7 +145,7 @@ def timeseries_query(
allow_metric_aggregates (bool) Ignored here, only used in metric enhanced performance
"""
- with sentry_sdk.start_span(op="issueplatform", description="timeseries.filter_transform"):
+ with sentry_sdk.start_span(op="issueplatform", name="timeseries.filter_transform"):
equations, columns = categorize_columns(selected_columns)
base_builder = IssuePlatformTimeseriesQueryBuilder(
Dataset.IssuePlatform,
@@ -182,7 +182,7 @@ def timeseries_query(
[query.get_snql_query() for query in query_list], referrer, query_source=query_source
)
- with sentry_sdk.start_span(op="issueplatform", description="timeseries.transform_results"):
+ with sentry_sdk.start_span(op="issueplatform", name="timeseries.transform_results"):
results = []
for snql_query, result in zip(query_list, query_results):
results.append(
diff --git a/src/sentry/snuba/metrics/mqb_query_transformer.py b/src/sentry/snuba/metrics/mqb_query_transformer.py
index 7ee19e9176d34..81b2d6101a112 100644
--- a/src/sentry/snuba/metrics/mqb_query_transformer.py
+++ b/src/sentry/snuba/metrics/mqb_query_transformer.py
@@ -22,7 +22,7 @@
class MQBQueryTransformationException(Exception):
- ...
+ pass
def _get_derived_op_metric_field_from_snuba_function(function: Function):
diff --git a/src/sentry/snuba/metrics/query_builder.py b/src/sentry/snuba/metrics/query_builder.py
index 143b607481328..eac1874aa3349 100644
--- a/src/sentry/snuba/metrics/query_builder.py
+++ b/src/sentry/snuba/metrics/query_builder.py
@@ -396,7 +396,7 @@ def resolve_tags(
if not allowed:
raise InvalidParams(
f"The tag key {name} usage has been prohibited by one of the expressions "
- f"{set(allowed_tag_keys.values()) if allowed_tag_keys else {} }"
+ f"{set(allowed_tag_keys.values()) if allowed_tag_keys else {}}"
)
return Column(name=resolve_tag_key(use_case_id, org_id, name))
diff --git a/src/sentry/snuba/metrics/utils.py b/src/sentry/snuba/metrics/utils.py
index 441925a887e14..91e1f3e2a580b 100644
--- a/src/sentry/snuba/metrics/utils.py
+++ b/src/sentry/snuba/metrics/utils.py
@@ -431,27 +431,27 @@ def combine_dictionary_of_list_values(main_dict, other_dict):
class MetricDoesNotExistException(Exception):
- ...
+ pass
class MetricDoesNotExistInIndexer(Exception):
- ...
+ pass
class DerivedMetricException(Exception, ABC):
- ...
+ pass
class DerivedMetricParseException(DerivedMetricException):
- ...
+ pass
class NotSupportedOverCompositeEntityException(DerivedMetricException):
- ...
+ pass
class OrderByNotSupportedOverCompositeEntityException(NotSupportedOverCompositeEntityException):
- ...
+ pass
@overload
@@ -487,7 +487,7 @@ def to_intervals(
assert interval_seconds > 0
# horrible hack for backward compatibility
- # TODO Try to fix this upstream
+ # TODO: Try to fix this upstream
if start is None or end is None:
return None, None, 0
diff --git a/src/sentry/snuba/metrics_performance.py b/src/sentry/snuba/metrics_performance.py
index 2b5eaa1f007a5..7a6080a00418e 100644
--- a/src/sentry/snuba/metrics_performance.py
+++ b/src/sentry/snuba/metrics_performance.py
@@ -53,7 +53,7 @@ def query(
fallback_to_transactions=False,
query_source: QuerySource | None = None,
):
- with sentry_sdk.start_span(op="mep", description="MetricQueryBuilder"):
+ with sentry_sdk.start_span(op="mep", name="MetricQueryBuilder"):
metrics_query = MetricsQueryBuilder(
dataset=Dataset.PerformanceMetrics,
params={},
@@ -80,7 +80,7 @@ def query(
)
metrics_referrer = referrer + ".metrics-enhanced"
results = metrics_query.run_query(referrer=metrics_referrer, query_source=query_source)
- with sentry_sdk.start_span(op="mep", description="query.transform_results"):
+ with sentry_sdk.start_span(op="mep", name="query.transform_results"):
results = metrics_query.process_results(results)
results["meta"]["isMetricsData"] = True
results["meta"]["isMetricsExtractedData"] = metrics_query.use_on_demand
@@ -162,7 +162,7 @@ def bulk_timeseries_query(
metrics_compatible = True
if metrics_compatible:
- with sentry_sdk.start_span(op="mep", description="TimeseriesMetricQueryBuilder"):
+ with sentry_sdk.start_span(op="mep", name="TimeseriesMetricQueryBuilder"):
metrics_queries = []
for query in queries:
metrics_query = TimeseriesMetricQueryBuilder(
@@ -190,7 +190,7 @@ def bulk_timeseries_query(
for br in bulk_result:
_result["data"] = [*_result["data"], *br["data"]]
_result["meta"] = br["meta"]
- with sentry_sdk.start_span(op="mep", description="query.transform_results"):
+ with sentry_sdk.start_span(op="mep", name="query.transform_results"):
result = metrics_query.process_results(_result)
sentry_sdk.set_tag("performance.dataset", "metrics")
result["meta"]["isMetricsData"] = True
@@ -268,7 +268,7 @@ def timeseries_query(
metrics_compatible = not equations
def run_metrics_query(inner_params: SnubaParams):
- with sentry_sdk.start_span(op="mep", description="TimeseriesMetricQueryBuilder"):
+ with sentry_sdk.start_span(op="mep", name="TimeseriesMetricQueryBuilder"):
metrics_query = TimeseriesMetricQueryBuilder(
params={},
interval=rollup,
@@ -287,7 +287,7 @@ def run_metrics_query(inner_params: SnubaParams):
)
metrics_referrer = referrer + ".metrics-enhanced"
result = metrics_query.run_query(referrer=metrics_referrer, query_source=query_source)
- with sentry_sdk.start_span(op="mep", description="query.transform_results"):
+ with sentry_sdk.start_span(op="mep", name="query.transform_results"):
result = metrics_query.process_results(result)
result["data"] = (
discover.zerofill(
diff --git a/src/sentry/snuba/referrer.py b/src/sentry/snuba/referrer.py
index 34749851cc722..006b5c4180930 100644
--- a/src/sentry/snuba/referrer.py
+++ b/src/sentry/snuba/referrer.py
@@ -178,6 +178,20 @@ class Referrer(Enum):
API_ORGANIZATION_VITALS = "api.organization-vitals"
API_PERFORMANCE_DURATIONPERCENTILECHART = "api.performance.durationpercentilechart"
API_AI_PIPELINES_VIEW = "api.ai-pipelines.view"
+ API_PERFORMANCE_BROWSER_RESOURCE_MAIN_TABLE = "api.performance.browser.resources.main-table"
+ API_PERFORMANCE_BROWSER_RESOURCES_PAGE_SELECTOR = (
+ "api.performance.browser.resources.page-selector"
+ )
+ API_PERFORMANCE_BROWSER_WEB_VITALS_PROJECT = "api.performance.browser.web-vitals.project"
+ API_PERFORMANCE_BROWSER_WEB_VITALS_PROJECT_SCORES = (
+ "api.performance.browser.web-vitals.project-scores"
+ )
+ API_PERFORMANCE_BROWSER_WEB_VITALS_TRANSACTION = (
+ "api.performance.browser.web-vitals.transaction"
+ )
+ API_PERFORMANCE_BROWSER_WEB_VITALS_TRANSACTIONS_SCORES = (
+ "api.performance.browser.web-vitals.transactions-scores"
+ )
API_PERFORMANCE_GENERIC_WIDGET_CHART_APDEX_AREA_METRICS_ENHANCED = (
"api.performance.generic-widget-chart.apdex-area.metrics-enhanced"
)
@@ -208,6 +222,9 @@ class Referrer(Enum):
API_PERFORMANCE_GENERIC_WIDGET_CHART_FROZEN_FRAMES_AREA = (
"api.performance.generic-widget-chart.frozen-frames-area"
)
+ API_PERFORMANCE_GENERIC_WIDGET_CHART_HIGHEST_CACHE_MISS_RATE_TRANSACTIONS = (
+ "api.performance.generic-widget-chart.highest-cache--miss-rate-transactions"
+ )
API_PERFORMANCE_GENERIC_WIDGET_CHART_LCP_HISTOGRAM = (
"api.performance.generic-widget-chart.lcp-histogram"
)
@@ -235,6 +252,15 @@ class Referrer(Enum):
API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_SLOW_FRAMES = (
"api.performance.generic-widget-chart.most-slow-frames"
)
+ API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_TIME_CONSUMING_DOMAINS = (
+ "api.performance.generic-widget-chart.most-time-consuming-domains"
+ )
+ API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_TIME_CONSUMING_RESOURCES = (
+ "api.performance.generic-widget-chart.most-time-consuming-resources"
+ )
+ API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_TIME_SPENT_DB_QUERIES = (
+ "api.performance.generic-widget-chart.most-time-spent-db-queries"
+ )
API_PERFORMANCE_GENERIC_WIDGET_CHART_P50_DURATION_AREA_METRICS_ENHANCED = (
"api.performance.generic-widget-chart.p50-duration-area.metrics-enhanced"
)
@@ -295,16 +321,19 @@ class Referrer(Enum):
API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_RESOURCE_OPS = (
"api.performance.generic-widget-chart.slow-resource-ops"
)
+ API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_SCREENS_BY_TTID = (
+ "api.performance.generic-widget-chart.slow-screens-by-ttid"
+ )
API_PERFORMANCE_GENERIC_WIDGET_CHART_TPM_AREA_METRICS_ENHANCED = (
"api.performance.generic-widget-chart.tpm-area.metrics-enhanced"
)
API_PERFORMANCE_GENERIC_WIDGET_CHART_TPM_AREA = "api.performance.generic-widget-chart.tpm-area"
- API_PERFORMANCE_GENERIC_WIDGET_CHART_USER_MISERY_AREA_METRICS_ENHANCED = (
- "api.performance.generic-widget-chart.user-misery-area.metrics-enhanced"
- )
API_PERFORMANCE_GENERIC_WIDGET_CHART_USER_MISERY_AREA = (
"api.performance.generic-widget-chart.user-misery-area"
)
+ API_PERFORMANCE_GENERIC_WIDGET_CHART_USER_MISERY_AREA_METRICS_ENHANCED = (
+ "api.performance.generic-widget-chart.user-misery-area.metrics-enhanced"
+ )
API_PERFORMANCE_GENERIC_WIDGET_CHART_WARM_STARTUP_AREA_METRICS_ENHANCED = (
"api.performance.generic-widget-chart.warm-startup-area.metrics-enhanced"
)
@@ -343,6 +372,11 @@ class Referrer(Enum):
API_PERFORMANCE_LANDING_TABLE = "api.performance.landing-table"
API_PERFORMANCE_STATUS_BREAKDOWN = "api.performance.status-breakdown"
API_PERFORMANCE_TAG_PAGE = "api.performance.tag-page"
+ API_PERFORMANCE_TRACE_TRACE_DRAWER_TRANSACTION_CACHE_METRICS = (
+ "api.performance.trace.trace-drawer-transaction-cache-metrics"
+ )
+ API_PERFORMANCE_TRANSACTION_EVENTS = "api.performance.transaction-events"
+ API_PERFORMANCE_TRANSACTION_NAME_SEARCH_BAR = "api.performance.transaction-name-search-bar"
API_PERFORMANCE_TRANSACTION_SPANS = "api.performance.transaction-spans"
API_PERFORMANCE_TRANSACTION_SUMMARY_DURATION = "api.performance.transaction-summary.duration"
API_PERFORMANCE_TRANSACTION_SUMMARY_SIDEBAR_CHART_METRICS_ENHANCED = (
@@ -416,13 +450,15 @@ class Referrer(Enum):
API_PROJECT_EVENTS = "api.project-events"
API_RELEASES_RELEASE_DETAILS_CHART = "api.releases.release-details-chart"
API_REPLAY_DETAILS_PAGE = "api.replay.details-page"
-
+ API_STARFISH_DATABASE_SYSTEM_SELECTOR = "api.starfish.database-system-selector"
API_STARFISH_ENDPOINT_LIST = "api.starfish.endpoint-list"
+ API_STARFISH_FULL_SPAN_FROM_TRACE = "api.starfish.full-span-from-trace"
API_STARFISH_GET_SPAN_ACTIONS = "api.starfish.get-span-actions"
API_STARFISH_GET_SPAN_DOMAINS = "api.starfish.get-span-domains"
API_STARFISH_GET_SPAN_OPERATIONS = "api.starfish.get-span-operations"
API_STARFISH_SIDEBAR_SPAN_METRICS = "api.starfish.sidebar-span-metrics"
API_STARFISH_SPAN_CATEGORY_BREAKDOWN = "api.starfish-web-service.span-category-breakdown"
+ API_STARFISH_SPAN_DESCRIPTION = "api.starfish.span-description"
API_STARFISH_SPAN_LIST = "api.starfish.use-span-list"
API_STARFISH_SPAN_LIST_PRIMARY = "api.starfish.use-span-list.primary"
API_STARFISH_SPAN_SUMMARY_P95 = "api.starfish.span-summary-panel-samples-table-p95"
@@ -474,6 +510,8 @@ class Referrer(Enum):
API_TRACE_EXPLORER_TRACE_SPANS_LIST = "api.trace-explorer.trace-spans-list"
API_SPANS_TAG_KEYS = "api.spans.tags-keys"
API_SPANS_TAG_KEYS_RPC = "api.spans.tags-keys.rpc"
+ API_SPANS_TAG_VALUES = "api.spans.tags-values"
+ API_SPANS_TAG_VALUES_RPC = "api.spans.tags-values.rpc"
API_SPANS_TRACE_VIEW = "api.spans.trace-view"
# Performance Mobile UI Module
@@ -846,6 +884,9 @@ class Referrer(Enum):
TAGSTORE_GET_TAG_VALUE_PAGINATOR_FOR_PROJECTS = "tagstore.get_tag_value_paginator_for_projects"
TASKS_MONITOR_RELEASE_ADOPTION = "tasks.monitor_release_adoption"
TASKS_PERFORMANCE_SPLIT_DISCOVER_DATASET = "tasks.performance.split_discover_dataset"
+ TASKS_PERFORMANCE_SPLIT_DISCOVER_DATASET_METRICS_ENHANCED = (
+ "tasks.performance.split_discover_dataset.metrics-enhanced"
+ )
TASKS_PROCESS_PROJECTS_WITH_SESSIONS_SESSION_COUNT = (
"tasks.process_projects_with_sessions.session_count"
)
diff --git a/src/sentry/snuba/spans_eap.py b/src/sentry/snuba/spans_eap.py
index cadcf634ce47d..ee23bb77240fb 100644
--- a/src/sentry/snuba/spans_eap.py
+++ b/src/sentry/snuba/spans_eap.py
@@ -49,9 +49,10 @@ def query(
dataset: Dataset = Dataset.Discover,
fallback_to_transactions: bool = False,
query_source: QuerySource | None = None,
+ enable_rpc: bool | None = False,
):
builder = SpansEAPQueryBuilder(
- Dataset.SpansEAP,
+ Dataset.EventsAnalyticsPlatform,
{},
snuba_params=snuba_params,
query=query,
@@ -102,9 +103,9 @@ def timeseries_query(
"""
equations, columns = categorize_columns(selected_columns)
- with sentry_sdk.start_span(op="spans_indexed", description="TimeseriesSpanIndexedQueryBuilder"):
+ with sentry_sdk.start_span(op="spans_indexed", name="TimeseriesSpanIndexedQueryBuilder"):
querybuilder = TimeseriesSpanEAPIndexedQueryBuilder(
- Dataset.SpansEAP,
+ Dataset.EventsAnalyticsPlatform,
{},
rollup,
snuba_params=snuba_params,
@@ -115,7 +116,7 @@ def timeseries_query(
),
)
result = querybuilder.run_query(referrer, query_source=query_source)
- with sentry_sdk.start_span(op="spans_indexed", description="query.transform_results"):
+ with sentry_sdk.start_span(op="spans_indexed", name="query.transform_results"):
result = querybuilder.process_results(result)
result["data"] = (
discover.zerofill(
@@ -167,7 +168,7 @@ def top_events_timeseries(
this API should match that of sentry.snuba.discover.top_events_timeseries
"""
if top_events is None:
- with sentry_sdk.start_span(op="spans_indexed", description="top_events.fetch_events"):
+ with sentry_sdk.start_span(op="spans_indexed", name="top_events.fetch_events"):
top_events = query(
selected_columns,
query=user_query,
@@ -184,7 +185,7 @@ def top_events_timeseries(
)
top_events_builder = TopEventsSpanEAPQueryBuilder(
- Dataset.SpansEAP,
+ Dataset.EventsAnalyticsPlatform,
{},
rollup,
top_events["data"],
@@ -201,7 +202,7 @@ def top_events_timeseries(
)
if len(top_events["data"]) == limit and include_other:
other_events_builder = TopEventsSpanEAPQueryBuilder(
- Dataset.SpansEAP,
+ Dataset.EventsAnalyticsPlatform,
{},
rollup,
top_events["data"],
@@ -239,9 +240,7 @@ def top_events_timeseries(
snuba_params.end_date,
rollup,
)
- with sentry_sdk.start_span(
- op="spans_indexed", description="top_events.transform_results"
- ) as span:
+ with sentry_sdk.start_span(op="spans_indexed", name="top_events.transform_results") as span:
span.set_data("result_count", len(result.get("data", [])))
result = top_events_builder.process_results(result)
diff --git a/src/sentry/snuba/spans_indexed.py b/src/sentry/snuba/spans_indexed.py
index 3991906fe68c7..9656902d4801f 100644
--- a/src/sentry/snuba/spans_indexed.py
+++ b/src/sentry/snuba/spans_indexed.py
@@ -98,7 +98,7 @@ def timeseries_query(
"""
equations, columns = categorize_columns(selected_columns)
- with sentry_sdk.start_span(op="spans_indexed", description="TimeseriesSpanIndexedQueryBuilder"):
+ with sentry_sdk.start_span(op="spans_indexed", name="TimeseriesSpanIndexedQueryBuilder"):
query = TimeseriesSpanIndexedQueryBuilder(
Dataset.SpansIndexed,
{},
@@ -111,7 +111,7 @@ def timeseries_query(
),
)
result = query.run_query(referrer, query_source=query_source)
- with sentry_sdk.start_span(op="spans_indexed", description="query.transform_results"):
+ with sentry_sdk.start_span(op="spans_indexed", name="query.transform_results"):
result = query.process_results(result)
result["data"] = (
discover.zerofill(
@@ -163,7 +163,7 @@ def top_events_timeseries(
"""
if top_events is None:
- with sentry_sdk.start_span(op="spans_indexed", description="top_events.fetch_events"):
+ with sentry_sdk.start_span(op="spans_indexed", name="top_events.fetch_events"):
top_events = query(
selected_columns,
query=user_query,
@@ -235,9 +235,7 @@ def top_events_timeseries(
snuba_params.end_date,
rollup,
)
- with sentry_sdk.start_span(
- op="spans_indexed", description="top_events.transform_results"
- ) as span:
+ with sentry_sdk.start_span(op="spans_indexed", name="top_events.transform_results") as span:
span.set_data("result_count", len(result.get("data", [])))
result = top_events_builder.process_results(result)
diff --git a/src/sentry/snuba/spans_rpc.py b/src/sentry/snuba/spans_rpc.py
new file mode 100644
index 0000000000000..3d5cd5d7d8340
--- /dev/null
+++ b/src/sentry/snuba/spans_rpc.py
@@ -0,0 +1,89 @@
+from typing import Any
+
+from sentry.search.eap.types import SearchResolverConfig
+from sentry.search.events.types import SnubaParams
+from sentry.snuba.referrer import Referrer
+
+
+def run_table_query(
+ params: SnubaParams,
+ query_string: str,
+ selected_columns: list[str], # Aggregations & Fields?
+ orderby: list[str],
+ offset: int,
+ limit: int,
+ referrer: Referrer,
+ config: SearchResolverConfig,
+) -> Any:
+ pass
+ """Make the query"""
+ # maker = SearchResolver(params)
+ # columns, contexts = maker.resolve_columns(selected_columns)
+ # query = maker.resolve_query(query_string)
+
+ """Run the query"""
+ # rpc = table_RPC(columns=[column.proto_definition for column in columns], query=query)
+ # result = rpc.run()
+
+ """Process the results"""
+ # for row in result:
+ # for column in columns:
+ # column.process(row)
+ # return result
+
+
+def run_timeseries_query(
+ params: SnubaParams,
+ query_string: str,
+ y_axes: list[str],
+ groupby: list[str],
+) -> Any:
+ pass
+ """Make the query"""
+ # maker = SearchResolver(params)
+ # groupby, contexts = maker.resolve_columns(groupby)
+ # yaxes = maker.resolve_aggregate(y_axes)
+ # query = maker.resolve_query(query_string)
+
+ """Run the query"""
+ # rpc = timeseries_RPC(columns=[column.proto_definition for column in groupby], query=query)
+ # result = rpc.run()
+
+ """Process the results"""
+ # return _process_timeseries(result, columns)
+
+
+def run_top_events_timeseries_query(
+ params: SnubaParams,
+ query_string: str,
+ y_axes: list[str],
+ groupby: list[str],
+ orderby: list[str],
+) -> Any:
+ """We intentionally duplicate run_timeseries_query code here to reduce the complexity of needing multiple helper
+ functions that both would call
+ This is because at time of writing, the query construction is very straightforward, if that changes perhaps we can
+ change this"""
+ pass
+ """Make the query"""
+ # maker = SearchResolver(params)
+ # top_events = run_table_query() with process_results off
+ # new_conditions = construct conditions based on top_events
+ # resolved_query = And(new_conditions, maker.resolve_query(query_string))
+ # groupby, contexts = maker.resolve_columns(groupby)
+ # yaxes = maker.resolve_aggregate(y_axes)
+
+ """Run the query"""
+ # rpc = timeseries_RPC(columns=[column.proto_definition for column in groupby], query=query)
+
+ """Process the results"""
+ # result = rpc.run()
+ # return _process_timeseries(result, columns)
+
+
+def _process_timeseries(result, columns):
+ pass
+ # for row in result:
+ # for column in columns:
+ # column.process(row)
+ # return result
diff --git a/src/sentry/snuba/tasks.py b/src/sentry/snuba/tasks.py
index 413e8fc6fe7d4..cb1ca305e614d 100644
--- a/src/sentry/snuba/tasks.py
+++ b/src/sentry/snuba/tasks.py
@@ -203,7 +203,7 @@ def delete_subscription_from_snuba(query_subscription_id, **kwargs):
def _create_in_snuba(subscription: QuerySubscription) -> str:
assert subscription.snuba_query is not None
- with sentry_sdk.start_span(op="snuba.tasks", description="create_in_snuba") as span:
+ with sentry_sdk.start_span(op="snuba.tasks", name="create_in_snuba") as span:
span.set_tag(
"uses_metrics_layer",
features.has("organizations:use-metrics-layer", subscription.project.organization),
diff --git a/src/sentry/spans/consumers/process/factory.py b/src/sentry/spans/consumers/process/factory.py
index 5d8d3686dba6d..4f76b504ab073 100644
--- a/src/sentry/spans/consumers/process/factory.py
+++ b/src/sentry/spans/consumers/process/factory.py
@@ -209,7 +209,7 @@ def _expand_segments(should_process_segments: list[ProcessSegmentsContext]):
client = RedisSpansBuffer()
payload_context = {}
- with txn.start_child(op="process", description="fetch_unprocessed_segments"):
+ with txn.start_child(op="process", name="fetch_unprocessed_segments"):
keys = client.get_unprocessed_segments_and_prune_bucket(timestamp, partition)
sentry_sdk.set_measurement("segments.count", len(keys))
@@ -218,7 +218,7 @@ def _expand_segments(should_process_segments: list[ProcessSegmentsContext]):
# With pipelining, redis server is forced to queue replies using
# up memory, so batching the keys we fetch.
- with txn.start_child(op="process", description="read_and_expire_many_segments"):
+ with txn.start_child(op="process", name="read_and_expire_many_segments"):
for i in range(0, len(keys), BATCH_SIZE):
segments = client.read_and_expire_many_segments(keys[i : i + BATCH_SIZE])
diff --git a/src/sentry/stacktraces/processing.py b/src/sentry/stacktraces/processing.py
index 1029acb6062a4..c66c5332eff1c 100644
--- a/src/sentry/stacktraces/processing.py
+++ b/src/sentry/stacktraces/processing.py
@@ -324,7 +324,7 @@ def normalize_stacktraces_for_grouping(
# the trimming produces a different function than the function we have
# otherwise stored in `function` to not make the payload larger
# unnecessarily.
- with sentry_sdk.start_span(op=op, description="iterate_frames"):
+ with sentry_sdk.start_span(op=op, name="iterate_frames"):
stripped_querystring = False
for frames in stacktrace_frames:
for frame in frames:
@@ -347,7 +347,7 @@ def normalize_stacktraces_for_grouping(
# If a grouping config is available, run grouping enhancers
if grouping_config is not None:
- with sentry_sdk.start_span(op=op, description="apply_modifications_to_frame"):
+ with sentry_sdk.start_span(op=op, name="apply_modifications_to_frame"):
for frames, stacktrace_container in zip(stacktrace_frames, stacktrace_containers):
# This call has a caching mechanism when the same stacktrace and rules are used
grouping_config.enhancements.apply_modifications_to_frame(
diff --git a/src/sentry/statistical_detectors/issue_platform_adapter.py b/src/sentry/statistical_detectors/issue_platform_adapter.py
index f1c1173d57ca1..30c937ae50f54 100644
--- a/src/sentry/statistical_detectors/issue_platform_adapter.py
+++ b/src/sentry/statistical_detectors/issue_platform_adapter.py
@@ -24,7 +24,7 @@ def send_regression_to_platform(regression: BreakpointData):
displayed_new_baseline = round(float(regression["aggregate_range_2"]), 2)
# For legacy reasons, we're passing project id as project
- # TODO fix this in the breakpoint microservice and in trends v2
+ # TODO: fix this in the breakpoint microservice and in trends v2
project_id = int(regression["project"])
issue_type: type[GroupType] = PerformanceP95EndpointRegressionGroupType
diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py
index e7839bd3622f8..bf9922b42b5b9 100644
--- a/src/sentry/tagstore/snuba/backend.py
+++ b/src/sentry/tagstore/snuba/backend.py
@@ -441,6 +441,11 @@ def get_tag_keys_for_projects(
# So only disable sampling if the timerange is short enough.
if len(projects) <= max_unsampled_projects and end - start <= timedelta(days=14):
optimize_kwargs["sample"] = 1
+
+ # Replays doesn't support sampling.
+ if dataset == Dataset.Replays:
+ optimize_kwargs = {}
+
return self.__get_tag_keys_for_projects(
projects,
None,
diff --git a/src/sentry/tasks/auto_ongoing_issues.py b/src/sentry/tasks/auto_ongoing_issues.py
index 977d5873f10f9..1a69a74eca2b1 100644
--- a/src/sentry/tasks/auto_ongoing_issues.py
+++ b/src/sentry/tasks/auto_ongoing_issues.py
@@ -131,7 +131,7 @@ def get_total_count(results):
extra=logger_extra,
)
- with sentry_sdk.start_span(description="iterate_chunked_group_ids"):
+ with sentry_sdk.start_span(name="iterate_chunked_group_ids"):
for groups in chunked(
RangeQuerySetWrapper(
base_queryset,
@@ -172,7 +172,7 @@ def run_auto_transition_issues_new_to_ongoing(
Child task of `auto_transition_issues_new_to_ongoing`
to conduct the update of specified Groups to Ongoing.
"""
- with sentry_sdk.start_span(description="bulk_transition_group_to_ongoing") as span:
+ with sentry_sdk.start_span(name="bulk_transition_group_to_ongoing") as span:
span.set_tag("group_ids", group_ids)
bulk_transition_group_to_ongoing(
GroupStatus.UNRESOLVED,
@@ -220,7 +220,7 @@ def get_total_count(results):
.filter(recent_regressed_history__lte=datetime.fromtimestamp(date_added_lte, timezone.utc))
)
- with sentry_sdk.start_span(description="iterate_chunked_group_ids"):
+ with sentry_sdk.start_span(name="iterate_chunked_group_ids"):
for group_ids_with_regressed_history in chunked(
RangeQuerySetWrapper(
base_queryset.values_list("id", flat=True),
@@ -260,7 +260,7 @@ def run_auto_transition_issues_regressed_to_ongoing(
Child task of `auto_transition_issues_regressed_to_ongoing`
to conduct the update of specified Groups to Ongoing.
"""
- with sentry_sdk.start_span(description="bulk_transition_group_to_ongoing") as span:
+ with sentry_sdk.start_span(name="bulk_transition_group_to_ongoing") as span:
span.set_tag("group_ids", group_ids)
bulk_transition_group_to_ongoing(
GroupStatus.UNRESOLVED,
@@ -308,7 +308,7 @@ def get_total_count(results):
.filter(recent_escalating_history__lte=datetime.fromtimestamp(date_added_lte, timezone.utc))
)
- with sentry_sdk.start_span(description="iterate_chunked_group_ids"):
+ with sentry_sdk.start_span(name="iterate_chunked_group_ids"):
for new_group_ids in chunked(
RangeQuerySetWrapper(
base_queryset.values_list("id", flat=True),
@@ -348,7 +348,7 @@ def run_auto_transition_issues_escalating_to_ongoing(
Child task of `auto_transition_issues_escalating_to_ongoing`
to conduct the update of specified Groups to Ongoing.
"""
- with sentry_sdk.start_span(description="bulk_transition_group_to_ongoing") as span:
+ with sentry_sdk.start_span(name="bulk_transition_group_to_ongoing") as span:
span.set_tag("group_ids", group_ids)
bulk_transition_group_to_ongoing(
GroupStatus.UNRESOLVED,
diff --git a/src/sentry/tasks/auto_resolve_issues.py b/src/sentry/tasks/auto_resolve_issues.py
index 6681d512256da..8193917f0ee45 100644
--- a/src/sentry/tasks/auto_resolve_issues.py
+++ b/src/sentry/tasks/auto_resolve_issues.py
@@ -16,6 +16,7 @@
from sentry.models.groupinbox import GroupInboxRemoveAction, remove_group_from_inbox
from sentry.models.options.project_option import ProjectOption
from sentry.models.project import Project
+from sentry.signals import issue_resolved
from sentry.silo.base import SiloMode
from sentry.tasks.auto_ongoing_issues import log_error_if_queue_has_items
from sentry.tasks.base import instrumented_task
@@ -126,6 +127,17 @@ def auto_resolve_project_issues(project_id, cutoff=None, chunk_size=1000, **kwar
issue_type=group.issue_type.slug,
issue_category=group.issue_category.name.lower(),
)
+ # auto-resolve is a kind of resolve and this signal makes
+ # sure all things that need to happen after resolve are triggered
+ # examples are analytics and webhooks
+ issue_resolved.send_robust(
+ organization_id=project.organization_id,
+ user=None,
+ group=group,
+ project=project,
+ resolution_type="autoresolve",
+ sender="auto_resolve_issues",
+ )
if might_have_more:
auto_resolve_project_issues.delay(
diff --git a/src/sentry/tasks/base.py b/src/sentry/tasks/base.py
index ba8d4a58e485d..3b703d27fd9fb 100644
--- a/src/sentry/tasks/base.py
+++ b/src/sentry/tasks/base.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import logging
import resource
from collections.abc import Callable, Iterable
from contextlib import contextmanager
@@ -8,6 +9,7 @@
from typing import Any, TypeVar
from celery import current_task
+from django.conf import settings
from django.db.models import Model
from sentry.celery import app
@@ -17,6 +19,8 @@
ModelT = TypeVar("ModelT", bound=Model)
+logger = logging.getLogger(__name__)
+
class TaskSiloLimit(SiloLimit):
"""
@@ -129,6 +133,12 @@ def _wrapped(*args, **kwargs):
return result
+ # If the split task router is configured for the task, always use queues defined
+ # in the split task configuration
+ if name in settings.CELERY_SPLIT_QUEUE_TASK_ROUTES and "queue" in kwargs:
+ q = kwargs.pop("queue")
+ logger.warning("ignoring queue: %s, using value from CELERY_SPLIT_QUEUE_TASK_ROUTES", q)
+
# We never use result backends in Celery. Leaving `trail=True` means that if we schedule
# many tasks from a parent task, each task leaks memory. This can lead to the scheduler
# being OOM killed.
diff --git a/src/sentry/tasks/commit_context.py b/src/sentry/tasks/commit_context.py
index 6da5636654469..f017618e8199f 100644
--- a/src/sentry/tasks/commit_context.py
+++ b/src/sentry/tasks/commit_context.py
@@ -2,10 +2,9 @@
import logging
from collections.abc import Mapping, Sequence
-from datetime import datetime, timedelta, timezone
+from datetime import timedelta
from typing import Any
-import sentry_sdk
from celery import Task
from celery.exceptions import MaxRetriesExceededError
from django.utils import timezone as django_timezone
@@ -13,7 +12,7 @@
from sentry import analytics
from sentry.api.serializers.models.release import get_users_for_authors
-from sentry.integrations.base import IntegrationInstallation
+from sentry.integrations.source_code_management.commit_context import CommitContextIntegration
from sentry.integrations.utils.code_mapping import get_sorted_code_mapping_configs
from sentry.integrations.utils.commit_context import (
find_commit_context_for_event_all_frames,
@@ -22,24 +21,14 @@
from sentry.locks import locks
from sentry.models.commit import Commit
from sentry.models.commitauthor import CommitAuthor
-from sentry.models.group import Group
from sentry.models.groupowner import GroupOwner, GroupOwnerType
-from sentry.models.options.organization_option import OrganizationOption
from sentry.models.project import Project
from sentry.models.projectownership import ProjectOwnership
-from sentry.models.pullrequest import (
- CommentType,
- PullRequest,
- PullRequestComment,
- PullRequestCommit,
-)
-from sentry.models.repository import Repository
from sentry.shared_integrations.exceptions import ApiError
from sentry.silo.base import SiloMode
from sentry.tasks.base import instrumented_task
from sentry.tasks.groupowner import process_suspect_commits
from sentry.utils import metrics
-from sentry.utils.cache import cache
from sentry.utils.locking import UnableToAcquireLock
from sentry.utils.sdk import set_current_event_project
@@ -49,87 +38,11 @@
PR_COMMENT_WINDOW = 14 # days
# TODO: replace this with isinstance(installation, CommitContextIntegration)
-PR_COMMENT_SUPPORTED_PROVIDERS = {"integrations:github"}
+PR_COMMENT_SUPPORTED_PROVIDERS = {"github"}
logger = logging.getLogger(__name__)
-def queue_comment_task_if_needed(
- commit: Commit, group_owner: GroupOwner, repo: Repository, installation: IntegrationInstallation
-) -> None:
- from sentry.integrations.github.tasks.pr_comment import github_comment_workflow
-
- logger.info(
- "github.pr_comment.queue_comment_check",
- extra={"organization_id": commit.organization_id, "merge_commit_sha": commit.key},
- )
-
- # client will raise an Exception if the request is not successful
- try:
- client = installation.get_client()
- merge_commit_sha = client.get_merge_commit_sha_from_commit(repo=repo.name, sha=commit.key)
- except Exception as e:
- sentry_sdk.capture_exception(e)
- return
-
- if merge_commit_sha is None:
- logger.info(
- "github.pr_comment.queue_comment_check.commit_not_in_default_branch",
- extra={
- "organization_id": commit.organization_id,
- "repository_id": repo.id,
- "commit_sha": commit.key,
- },
- )
- return
-
- pr_query = PullRequest.objects.filter(
- organization_id=commit.organization_id,
- repository_id=commit.repository_id,
- merge_commit_sha=merge_commit_sha,
- )
- if not pr_query.exists():
- logger.info(
- "github.pr_comment.queue_comment_check.missing_pr",
- extra={
- "organization_id": commit.organization_id,
- "repository_id": repo.id,
- "commit_sha": commit.key,
- },
- )
- return
-
- pr = pr_query.first()
- assert pr is not None
- # need to query explicitly for merged PR comments since we can have multiple comments per PR
- merged_pr_comment_query = PullRequestComment.objects.filter(
- pull_request_id=pr.id, comment_type=CommentType.MERGED_PR
- )
- if pr.date_added >= datetime.now(tz=timezone.utc) - timedelta(days=PR_COMMENT_WINDOW) and (
- not merged_pr_comment_query.exists()
- or group_owner.group_id not in merged_pr_comment_query[0].group_ids
- ):
- lock = locks.get(
- DEBOUNCE_PR_COMMENT_LOCK_KEY(pr.id), duration=10, name="queue_comment_task"
- )
- with lock.acquire():
- cache_key = DEBOUNCE_PR_COMMENT_CACHE_KEY(pullrequest_id=pr.id)
- if cache.get(cache_key) is not None:
- return
-
- # create PR commit row for suspect commit and PR
- PullRequestCommit.objects.get_or_create(commit=commit, pull_request=pr)
-
- logger.info(
- "github.pr_comment.queue_comment_workflow",
- extra={"pullrequest_id": pr.id, "project_id": group_owner.project_id},
- )
-
- cache.set(cache_key, True, PR_COMMENT_TASK_TTL)
-
- github_comment_workflow.delay(pullrequest_id=pr.id, project_id=group_owner.project_id)
-
-
@instrumented_task(
name="sentry.tasks.process_commit_context",
queue="group_owners.process_commit_context",
@@ -270,29 +183,13 @@ def process_commit_context(
}, # Updates date of an existing owner, since we just matched them with this new event
)
- if OrganizationOption.objects.get_value(
- organization=project.organization,
- key="sentry:github_pr_bot",
- default=True,
+ if (
+ installation
+ and isinstance(installation, CommitContextIntegration)
+ and installation.integration_name
+ in PR_COMMENT_SUPPORTED_PROVIDERS # TODO: remove this check
):
- logger.info(
- "github.pr_comment",
- extra={"organization_id": project.organization_id},
- )
- repo = Repository.objects.filter(id=commit.repository_id).order_by("-date_added")
- group = Group.objects.get_from_cache(id=group_id)
- if (
- group.level is not logging.INFO # Don't comment on info level issues
- and installation is not None
- and repo.exists()
- and repo.get().provider in PR_COMMENT_SUPPORTED_PROVIDERS
- ):
- queue_comment_task_if_needed(commit, group_owner, repo.get(), installation)
- else:
- logger.info(
- "github.pr_comment.incorrect_repo_config",
- extra={"organization_id": project.organization_id},
- )
+ installation.queue_comment_task_if_needed(project, commit, group_owner, group_id)
ProjectOwnership.handle_auto_assignment(
project_id=project.id,
diff --git a/src/sentry/tasks/embeddings_grouping/utils.py b/src/sentry/tasks/embeddings_grouping/utils.py
index 3aed6f6594e5b..bc9c00cc2efb3 100644
--- a/src/sentry/tasks/embeddings_grouping/utils.py
+++ b/src/sentry/tasks/embeddings_grouping/utils.py
@@ -374,7 +374,6 @@ def get_events_from_nodestore(
CreateGroupingRecordData(
group_id=group_id,
project_id=project.id,
- message=filter_null_from_string(event.title),
exception_type=(
filter_null_from_string(exception_type) if exception_type else None
),
@@ -634,9 +633,7 @@ def lookup_group_data_stacktrace_bulk(
else:
bulk_data = _make_nodestore_call(project, list(node_id_to_group_data.keys()))
- with sentry_sdk.start_span(
- op="lookup_event_bulk.loop", description="lookup_event_bulk.loop"
- ):
+ with sentry_sdk.start_span(op="lookup_event_bulk.loop", name="lookup_event_bulk.loop"):
for node_id, data in bulk_data.items():
if node_id in node_id_to_group_data:
event_id, group_id = (
@@ -650,7 +647,7 @@ def lookup_group_data_stacktrace_bulk(
with sentry_sdk.start_span(
op="lookup_event_bulk.individual_lookup",
- description="lookup_event_bulk.individual_lookup",
+ name="lookup_event_bulk.individual_lookup",
):
# look up individually any that may have failed during bulk lookup
for node_id, (event_id, group_id) in node_id_to_group_data.items():
diff --git a/src/sentry/tasks/on_demand_metrics.py b/src/sentry/tasks/on_demand_metrics.py
index 374c479d81585..2b38c0956ff1c 100644
--- a/src/sentry/tasks/on_demand_metrics.py
+++ b/src/sentry/tasks/on_demand_metrics.py
@@ -126,7 +126,10 @@ def schedule_on_demand_check() -> None:
for (widget_query_id,) in RangeQuerySetWrapper(
DashboardWidgetQuery.objects.filter(
- widget__widget_type=DashboardWidgetTypes.DISCOVER
+ widget__widget_type__in=[
+ DashboardWidgetTypes.DISCOVER,
+ DashboardWidgetTypes.TRANSACTION_LIKE,
+ ]
).values_list("id"),
result_value_getter=lambda item: item[0],
):
diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py
index a1aca3af81357..3b6d171c9f13f 100644
--- a/src/sentry/tasks/post_process.py
+++ b/src/sentry/tasks/post_process.py
@@ -176,6 +176,7 @@ def _capture_group_stats(job: PostProcessJob) -> None:
metrics.incr("events.unique", tags={"platform": platform}, skip_internal=False)
+@sentry_sdk.trace
def should_issue_owners_ratelimit(project_id: int, group_id: int, organization_id: int | None):
"""
Make sure that we do not accept more groups than the enforced_limit at the project level.
@@ -204,109 +205,87 @@ def should_issue_owners_ratelimit(project_id: int, group_id: int, organization_i
return len(groups) > enforced_limit
+@metrics.wraps("post_process.handle_owner_assignment")
+@sentry_sdk.trace
def handle_owner_assignment(job):
if job["is_reprocessed"]:
return
- with sentry_sdk.start_span(op="tasks.post_process_group.handle_owner_assignment"):
- try:
- from sentry.models.groupowner import (
- ASSIGNEE_DOES_NOT_EXIST_DURATION,
- ASSIGNEE_EXISTS_DURATION,
- ASSIGNEE_EXISTS_KEY,
- ISSUE_OWNERS_DEBOUNCE_DURATION,
- ISSUE_OWNERS_DEBOUNCE_KEY,
- )
- from sentry.models.projectownership import ProjectOwnership
-
- event = job["event"]
- project, group = event.project, event.group
- # We want to debounce owner assignment when:
- # - GroupOwner of type Ownership Rule || CodeOwner exist with TTL 1 day
- # - we tried to calculate and could not find issue owners with TTL 1 day
- # - an Assignee has been set with TTL of infinite
- with metrics.timer("post_process.handle_owner_assignment"):
- with sentry_sdk.start_span(op="post_process.handle_owner_assignment.ratelimited"):
- if should_issue_owners_ratelimit(
- project_id=project.id,
- group_id=group.id,
- organization_id=event.project.organization_id,
- ):
- metrics.incr("sentry.task.post_process.handle_owner_assignment.ratelimited")
- return
+ from sentry.models.groupowner import (
+ ASSIGNEE_DOES_NOT_EXIST_DURATION,
+ ASSIGNEE_EXISTS_DURATION,
+ ASSIGNEE_EXISTS_KEY,
+ ISSUE_OWNERS_DEBOUNCE_DURATION,
+ ISSUE_OWNERS_DEBOUNCE_KEY,
+ )
+ from sentry.models.projectownership import ProjectOwnership
- with sentry_sdk.start_span(
- op="post_process.handle_owner_assignment.cache_set_assignee"
- ):
- # Is the issue already assigned to a team or user?
- assignee_key = ASSIGNEE_EXISTS_KEY(group.id)
- assignees_exists = cache.get(assignee_key)
- if assignees_exists is None:
- assignees_exists = group.assignee_set.exists()
- # Cache for 1 day if it's assigned. We don't need to move that fast.
- cache.set(
- assignee_key,
- assignees_exists,
- (
- ASSIGNEE_EXISTS_DURATION
- if assignees_exists
- else ASSIGNEE_DOES_NOT_EXIST_DURATION
- ),
- )
+ event = job["event"]
+ project, group = event.project, event.group
+ # We want to debounce owner assignment when:
+ # - GroupOwner of type Ownership Rule || CodeOwner exist with TTL 1 day
+ # - we tried to calculate and could not find issue owners with TTL 1 day
+ # - an Assignee has been set with TTL of infinite
+
+ if should_issue_owners_ratelimit(
+ project_id=project.id,
+ group_id=group.id,
+ organization_id=event.project.organization_id,
+ ):
+ metrics.incr("sentry.task.post_process.handle_owner_assignment.ratelimited")
+ return
- if assignees_exists:
- metrics.incr(
- "sentry.task.post_process.handle_owner_assignment.assignee_exists"
- )
- return
+ # Is the issue already assigned to a team or user?
+ assignee_key = ASSIGNEE_EXISTS_KEY(group.id)
+ assignees_exists = cache.get(assignee_key)
+ if assignees_exists is None:
+ assignees_exists = group.assignee_set.exists()
+ # Cache for 1 day if it's assigned. We don't need to move that fast.
+ cache.set(
+ assignee_key,
+ assignees_exists,
+ (ASSIGNEE_EXISTS_DURATION if assignees_exists else ASSIGNEE_DOES_NOT_EXIST_DURATION),
+ )
- with sentry_sdk.start_span(
- op="post_process.handle_owner_assignment.debounce_issue_owners"
- ):
- issue_owners_key = ISSUE_OWNERS_DEBOUNCE_KEY(group.id)
- debounce_issue_owners = cache.get(issue_owners_key)
+ if assignees_exists:
+ metrics.incr("sentry.task.post_process.handle_owner_assignment.assignee_exists")
+ return
- if debounce_issue_owners:
- metrics.incr("sentry.tasks.post_process.handle_owner_assignment.debounce")
- return
+ issue_owners_key = ISSUE_OWNERS_DEBOUNCE_KEY(group.id)
+ debounce_issue_owners = cache.get(issue_owners_key)
- with metrics.timer("post_process.process_owner_assignments.duration"):
- with sentry_sdk.start_span(
- op="post_process.handle_owner_assignment.get_issue_owners"
- ):
- if killswitch_matches_context(
- "post_process.get-autoassign-owners",
- {
- "project_id": project.id,
- },
- ):
- # see ProjectOwnership.get_issue_owners
- issue_owners: Sequence[tuple[Rule, Sequence[Team | RpcUser], str]] = []
- else:
- issue_owners = ProjectOwnership.get_issue_owners(project.id, event.data)
-
- # Cache for 1 day after we calculated. We don't need to move that fast.
- cache.set(
- issue_owners_key,
- True,
- ISSUE_OWNERS_DEBOUNCE_DURATION,
- )
+ if debounce_issue_owners:
+ metrics.incr("sentry.tasks.post_process.handle_owner_assignment.debounce")
+ return
- with sentry_sdk.start_span(
- op="post_process.handle_owner_assignment.handle_group_owners"
- ):
- if issue_owners:
- try:
- handle_group_owners(project, group, issue_owners)
- except Exception:
- logger.exception("Failed to store group owners")
- else:
- handle_invalid_group_owners(group)
+ if killswitch_matches_context(
+ "post_process.get-autoassign-owners",
+ {
+ "project_id": project.id,
+ },
+ ):
+ # see ProjectOwnership.get_issue_owners
+ issue_owners: Sequence[tuple[Rule, Sequence[Team | RpcUser], str]] = []
+ handle_invalid_group_owners(group)
+ else:
+ issue_owners = ProjectOwnership.get_issue_owners(project.id, event.data)
+ # Cache for 1 day after we calculated. We don't need to move that fast.
+ cache.set(
+ issue_owners_key,
+ True,
+ ISSUE_OWNERS_DEBOUNCE_DURATION,
+ )
+ if issue_owners:
+ try:
+ handle_group_owners(project, group, issue_owners)
except Exception:
- logger.exception("Failed to handle owner assignments")
+ logger.exception("Failed to store group owners")
+ else:
+ handle_invalid_group_owners(group)
+@sentry_sdk.trace
def handle_invalid_group_owners(group):
from sentry.models.groupowner import GroupOwner, GroupOwnerType
@@ -322,6 +301,7 @@ def handle_invalid_group_owners(group):
)
+@sentry_sdk.trace
def handle_group_owners(
project: Project,
group: Group,
@@ -1160,7 +1140,7 @@ def process_service_hooks(job: PostProcessJob) -> None:
if job["is_reprocessed"]:
return
- from sentry.tasks.servicehooks import process_service_hook
+ from sentry.sentry_apps.tasks.service_hooks import process_service_hook
event, has_alert = job["event"], job["has_alert"]
@@ -1179,7 +1159,7 @@ def process_resource_change_bounds(job: PostProcessJob) -> None:
if job["is_reprocessed"]:
return
- from sentry.tasks.sentry_apps import process_resource_change_bound
+ from sentry.sentry_apps.tasks.sentry_apps import process_resource_change_bound
event, is_new = job["event"], job["group_state"]["is_new"]
diff --git a/src/sentry/tasks/process_buffer.py b/src/sentry/tasks/process_buffer.py
index 73d69a23a0732..7dde9e6cec273 100644
--- a/src/sentry/tasks/process_buffer.py
+++ b/src/sentry/tasks/process_buffer.py
@@ -30,7 +30,7 @@ def process_pending() -> None:
try:
with lock.acquire():
- buffer.process_pending()
+ buffer.backend.process_pending()
except UnableToAcquireLock as error:
logger.warning("process_pending.fail", extra={"error": error})
@@ -48,7 +48,7 @@ def process_pending_batch() -> None:
try:
with lock.acquire():
- buffer.process_batch()
+ buffer.backend.process_batch()
except UnableToAcquireLock as error:
logger.warning("process_pending_batch.fail", extra={"error": error})
@@ -62,7 +62,7 @@ def process_incr(**kwargs):
sentry_sdk.set_tag("model", kwargs.get("model", "Unknown"))
- buffer.process(**kwargs)
+ buffer.backend.process(**kwargs)
def buffer_incr(model, *args, **kwargs):
@@ -91,4 +91,4 @@ def buffer_incr_task(app_label, model_name, args, kwargs):
sentry_sdk.set_tag("model", model_name)
- buffer.incr(apps.get_model(app_label=app_label, model_name=model_name), *args, **kwargs)
+ buffer.backend.incr(apps.get_model(app_label=app_label, model_name=model_name), *args, **kwargs)
diff --git a/src/sentry/tasks/relocation.py b/src/sentry/tasks/relocation.py
index 5b32f248a6636..a983e7af79bc3 100644
--- a/src/sentry/tasks/relocation.py
+++ b/src/sentry/tasks/relocation.py
@@ -1144,6 +1144,10 @@ def camel_to_snake_keep_underscores(value):
artifacts=convert_dict_key_case(cb_conf["artifacts"], camel_to_snake_keep_underscores),
timeout=convert_dict_key_case(cb_conf["timeout"], camel_to_snake_keep_underscores),
options=convert_dict_key_case(cb_conf["options"], camel_to_snake_keep_underscores),
+ tags=[
+ f"relocation-into-{get_local_region().name}",
+ f"relocation-id-{uuid}",
+ ],
)
response = cb_client.create_build(project_id=gcp_project_id(), build=build)
diff --git a/src/sentry/tasks/sentry_apps.py b/src/sentry/tasks/sentry_apps.py
index 6fc7abf4cb6de..a144e7d35ebef 100644
--- a/src/sentry/tasks/sentry_apps.py
+++ b/src/sentry/tasks/sentry_apps.py
@@ -1,96 +1,26 @@
-from __future__ import annotations
-
-import logging
-from collections import defaultdict
from collections.abc import Mapping
from typing import Any
-from celery import current_task
-from django.urls import reverse
-from requests.exceptions import RequestException
+from celery import Task
-from sentry import analytics
-from sentry.api.serializers import AppPlatformEvent, serialize
-from sentry.constants import SentryAppInstallationStatus
-from sentry.eventstore.models import Event, GroupEvent
-from sentry.hybridcloud.rpc.caching import region_caching_service
-from sentry.models.activity import Activity
-from sentry.models.group import Group
-from sentry.models.organization import Organization
-from sentry.models.organizationmapping import OrganizationMapping
-from sentry.models.project import Project
-from sentry.sentry_apps.models.sentry_app import VALID_EVENTS, SentryApp
-from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
-from sentry.sentry_apps.models.servicehook import ServiceHook, ServiceHookProject
-from sentry.sentry_apps.services.app.service import (
- app_service,
- get_by_application_id,
- get_installation,
+from sentry.eventstore.models import Event
+from sentry.sentry_apps.tasks.sentry_apps import CONTROL_TASK_OPTIONS, TASK_OPTIONS
+from sentry.sentry_apps.tasks.sentry_apps import build_comment_webhook as new_build_comment_webhook
+from sentry.sentry_apps.tasks.sentry_apps import clear_region_cache as new_clear_region_cache
+from sentry.sentry_apps.tasks.sentry_apps import (
+ create_or_update_service_hooks_for_sentry_app as new_create_or_update_service_hooks_for_sentry_app,
)
-from sentry.shared_integrations.exceptions import ApiHostError, ApiTimeoutError, ClientError
-from sentry.silo.base import SiloMode
-from sentry.tasks.base import instrumented_task, retry
-from sentry.users.services.user.service import user_service
-from sentry.utils import metrics
-from sentry.utils.http import absolute_uri
-from sentry.utils.sentry_apps import send_and_save_webhook_request
-from sentry.utils.sentry_apps.service_hook_manager import (
- create_or_update_service_hooks_for_installation,
+from sentry.sentry_apps.tasks.sentry_apps import installation_webhook as new_installation_webhook
+from sentry.sentry_apps.tasks.sentry_apps import (
+ process_resource_change_bound as new_process_resource_change_bound,
)
-
-logger = logging.getLogger("sentry.tasks.sentry_apps")
-
-TASK_OPTIONS = {
- "queue": "app_platform",
- "default_retry_delay": (60 * 5), # Five minutes.
- "max_retries": 3,
- "record_timing": True,
- "silo_mode": SiloMode.REGION,
-}
-CONTROL_TASK_OPTIONS = {
- "queue": "app_platform.control",
- "default_retry_delay": (60 * 5), # Five minutes.
- "max_retries": 3,
- "silo_mode": SiloMode.CONTROL,
-}
-
-retry_decorator = retry(
- on=(RequestException, ApiHostError, ApiTimeoutError),
- ignore=(ClientError,),
+from sentry.sentry_apps.tasks.sentry_apps import retry_decorator
+from sentry.sentry_apps.tasks.sentry_apps import send_alert_event as new_send_alert_event
+from sentry.sentry_apps.tasks.sentry_apps import (
+ send_resource_change_webhook as new_send_resource_change_webhook,
)
-
-# We call some models by a different name, publicly, than their class name.
-# For example the model Group is called "Issue" in the UI. We want the Service
-# Hook events to match what we externally call these primitives.
-RESOURCE_RENAMES = {"Group": "issue"}
-
-TYPES = {"Group": Group, "Error": Event, "Comment": Activity}
-
-
-def _webhook_event_data(event, group_id, project_id):
- project = Project.objects.get_from_cache(id=project_id)
- organization = Organization.objects.get_from_cache(id=project.organization_id)
-
- event_context = event.as_dict()
- event_context["url"] = absolute_uri(
- reverse(
- "sentry-api-0-project-event-details",
- args=[project.organization.slug, project.slug, event.event_id],
- )
- )
-
- event_context["web_url"] = absolute_uri(
- reverse(
- "sentry-organization-event-detail", args=[organization.slug, group_id, event.event_id]
- )
- )
-
- # The URL has a regex OR in it ("|") which means `reverse` cannot generate
- # a valid URL (it can't know which option to pick). We have to manually
- # create this URL for, that reason.
- event_context["issue_url"] = absolute_uri(f"/api/0/issues/{group_id}/")
- event_context["issue_id"] = str(group_id)
- return event_context
+from sentry.sentry_apps.tasks.sentry_apps import workflow_notification as new_workflow_notification
+from sentry.tasks.base import instrumented_task
@instrumented_task(name="sentry.tasks.sentry_apps.send_alert_event", **TASK_OPTIONS)
@@ -102,357 +32,76 @@ def send_alert_event(
additional_payload_key: str | None = None,
additional_payload: Mapping[str, Any] | None = None,
) -> None:
- """
- When an incident alert is triggered, send incident data to the SentryApp's webhook.
- :param event: The `Event` for which to build a payload.
- :param rule: The AlertRule that was triggered.
- :param sentry_app_id: The SentryApp to notify.
- :param additional_payload_key: The key used to attach additional data to the webhook payload
- :param additional_payload: The extra data attached to the payload body at the key specified by `additional_payload_key`.
- :return:
- """
- group = event.group
- project = Project.objects.get_from_cache(id=group.project_id)
- organization = Organization.objects.get_from_cache(id=project.organization_id)
-
- extra = {
- "sentry_app_id": sentry_app_id,
- "project_slug": project.slug,
- "organization_slug": organization.slug,
- "rule": rule,
- }
-
- sentry_app = app_service.get_sentry_app_by_id(id=sentry_app_id)
- if sentry_app is None:
- logger.info("event_alert_webhook.missing_sentry_app", extra=extra)
- return
-
- installations = app_service.get_many(
- filter=dict(
- organization_id=organization.id,
- app_ids=[sentry_app.id],
- status=SentryAppInstallationStatus.INSTALLED,
- )
+ new_send_alert_event(
+ event=event,
+ rule=rule,
+ sentry_app_id=sentry_app_id,
+ additional_payload_key=additional_payload_key,
+ additional_payload=additional_payload,
)
- if not installations:
- logger.info("event_alert_webhook.missing_installation", extra=extra)
- return
- (install,) = installations
-
- event_context = _webhook_event_data(event, group.id, project.id)
-
- data = {"event": event_context, "triggered_rule": rule}
-
- # Attach extra payload to the webhook
- if additional_payload_key and additional_payload:
- data[additional_payload_key] = additional_payload
-
- request_data = AppPlatformEvent(
- resource="event_alert", action="triggered", install=install, data=data
- )
-
- send_and_save_webhook_request(sentry_app, request_data)
-
- # On success, record analytic event for Alert Rule UI Component
- if request_data.data.get("issue_alert"):
- analytics.record(
- "alert_rule_ui_component_webhook.sent",
- organization_id=organization.id,
- sentry_app_id=sentry_app_id,
- event=f"{request_data.resource}.{request_data.action}",
- )
-
-
-def _process_resource_change(action, sender, instance_id, retryer=None, *args, **kwargs):
- # The class is serialized as a string when enqueueing the class.
- model = TYPES[sender]
- # The Event model has different hooks for the different event types. The sender
- # determines which type eg. Error and therefore the 'name' eg. error
- if issubclass(model, Event):
- if not kwargs.get("instance"):
- extra = {"sender": sender, "action": action, "event_id": instance_id}
- logger.info("process_resource_change.event_missing_event", extra=extra)
- return
- name = sender.lower()
- else:
- # Some resources are named differently than their model. eg. Group vs Issue.
- # Looks up the human name for the model. Defaults to the model name.
- name = RESOURCE_RENAMES.get(model.__name__, model.__name__.lower())
-
- # By default, use Celery's `current_task` but allow a value to be passed for the
- # bound Task.
- retryer = retryer or current_task
-
- # We may run into a race condition where this task executes before the
- # transaction that creates the Group has committed.
- try:
- if issubclass(model, Event):
- # XXX:(Meredith): Passing through the entire event was an intentional choice
- # to avoid having to query NodeStore again for data we had previously in
- # post_process. While this is not ideal, changing this will most likely involve
- # an overhaul of how we do things in post_process, not just this task alone.
- instance = kwargs.get("instance")
- else:
- instance = model.objects.get(id=instance_id)
- except model.DoesNotExist as e:
- # Explicitly requeue the task, so we don't report this to Sentry until
- # we hit the max number of retries.
- return retryer.retry(exc=e)
-
- event = f"{name}.{action}"
-
- if event not in VALID_EVENTS:
- return
-
- org = None
-
- if isinstance(instance, (Group, Event, GroupEvent)):
- org = Organization.objects.get_from_cache(
- id=Project.objects.get_from_cache(id=instance.project_id).organization_id
- )
-
- installations = filter(
- lambda i: event in i.sentry_app.events,
- app_service.get_installed_for_organization(organization_id=org.id),
- )
-
- for installation in installations:
- data = {}
- if isinstance(instance, Event) or isinstance(instance, GroupEvent):
- data[name] = _webhook_event_data(instance, instance.group_id, instance.project_id)
- else:
- data[name] = serialize(instance)
-
- # Trigger a new task for each webhook
- send_resource_change_webhook.delay(installation_id=installation.id, event=event, data=data)
@instrumented_task("sentry.tasks.process_resource_change_bound", bind=True, **TASK_OPTIONS)
@retry_decorator
-def process_resource_change_bound(self, action, sender, instance_id, *args, **kwargs):
- _process_resource_change(action, sender, instance_id, retryer=self, *args, **kwargs)
+def process_resource_change_bound(
+ self: Task, action: str, sender: str, instance_id: int, **kwargs: Any
+) -> None:
+ new_process_resource_change_bound(
+ action=action, sender=sender, instance_id=instance_id, **kwargs
+ )
@instrumented_task(name="sentry.tasks.sentry_apps.installation_webhook", **CONTROL_TASK_OPTIONS)
@retry_decorator
-def installation_webhook(installation_id, user_id, *args, **kwargs):
- from sentry.mediators.sentry_app_installations.installation_notifier import InstallationNotifier
-
- extra = {"installation_id": installation_id, "user_id": user_id}
- try:
- # we should send the webhook for pending installations on the install event in case that's part of the workflow
- install = SentryAppInstallation.objects.get(id=installation_id)
- except SentryAppInstallation.DoesNotExist:
- logger.info("installation_webhook.missing_installation", extra=extra)
- return
-
- user = user_service.get_user(user_id=user_id)
- if not user:
- logger.info("installation_webhook.missing_user", extra=extra)
- return
-
- InstallationNotifier.run(install=install, user=user, action="created")
+def installation_webhook(installation_id: int, user_id: int, *args: Any, **kwargs: Any) -> None:
+ new_installation_webhook(installation_id=installation_id, user_id=user_id, *args, **kwargs)
@instrumented_task(
name="sentry.sentry_apps.tasks.installations.clear_region_cache", **CONTROL_TASK_OPTIONS
)
def clear_region_cache(sentry_app_id: int, region_name: str) -> None:
- try:
- sentry_app = SentryApp.objects.get(id=sentry_app_id)
- except SentryApp.DoesNotExist:
- return
-
- # When a sentry app's definition changes purge cache for all the installations.
- # This could get slow for large applications, but generally big applications don't change often.
- install_query = SentryAppInstallation.objects.filter(
- sentry_app=sentry_app,
- ).values("id", "organization_id")
-
- # There isn't a constraint on org : sentryapp so we have to handle lists
- install_map: dict[int, list[int]] = defaultdict(list)
- for install_row in install_query:
- install_map[install_row["organization_id"]].append(install_row["id"])
-
- # Clear application_id cache
- region_caching_service.clear_key(
- key=get_by_application_id.key_from(sentry_app.application_id), region_name=region_name
- )
-
- # Limit our operations to the region this outbox is for.
- # This could be a single query if we use raw_sql.
- region_query = OrganizationMapping.objects.filter(
- organization_id__in=list(install_map.keys()), region_name=region_name
- ).values("organization_id")
- for region_row in region_query:
- installs = install_map[region_row["organization_id"]]
- for install_id in installs:
- region_caching_service.clear_key(
- key=get_installation.key_from(install_id), region_name=region_name
- )
+ new_clear_region_cache(sentry_app_id=sentry_app_id, region_name=region_name)
@instrumented_task(name="sentry.tasks.sentry_apps.workflow_notification", **TASK_OPTIONS)
@retry_decorator
-def workflow_notification(installation_id, issue_id, type, user_id, *args, **kwargs):
- webhook_data = get_webhook_data(installation_id, issue_id, user_id)
- if not webhook_data:
- return
- install, issue, user = webhook_data
- data = kwargs.get("data", {})
- data.update({"issue": serialize(issue)})
- send_webhooks(installation=install, event=f"issue.{type}", data=data, actor=user)
- analytics.record(
- f"sentry_app.issue.{type}",
- user_id=user_id,
- group_id=issue_id,
+def workflow_notification(
+ installation_id: int, issue_id: int, type: str, user_id: int, *args: Any, **kwargs: Any
+) -> None:
+ new_workflow_notification(
installation_id=installation_id,
+ issue_id=issue_id,
+ type=type,
+ user_id=user_id,
+ *args,
+ **kwargs,
)
@instrumented_task(name="sentry.tasks.sentry_apps.build_comment_webhook", **TASK_OPTIONS)
@retry_decorator
-def build_comment_webhook(installation_id, issue_id, type, user_id, *args, **kwargs):
- webhook_data = get_webhook_data(installation_id, issue_id, user_id)
- if not webhook_data:
- return
- install, _, user = webhook_data
- data = kwargs.get("data", {})
- project_slug = data.get("project_slug")
- comment_id = data.get("comment_id")
- payload = {
- "comment_id": data.get("comment_id"),
- "issue_id": issue_id,
- "project_slug": data.get("project_slug"),
- "timestamp": data.get("timestamp"),
- "comment": data.get("comment"),
- }
- send_webhooks(installation=install, event=type, data=payload, actor=user)
- # `type` is comment.created, comment.updated, or comment.deleted
- analytics.record(
- type,
- user_id=user_id,
- group_id=issue_id,
- project_slug=project_slug,
+def build_comment_webhook(
+ installation_id: int, issue_id: int, type: str, user_id: int, *args: Any, **kwargs: Any
+) -> None:
+ new_build_comment_webhook(
installation_id=installation_id,
- comment_id=comment_id,
+ issue_id=issue_id,
+ type=type,
+ user_id=user_id,
+ *args,
+ **kwargs,
)
-def get_webhook_data(installation_id, issue_id, user_id):
- extra = {"installation_id": installation_id, "issue_id": issue_id}
- install = app_service.installation_by_id(id=installation_id)
- if not install:
- logger.info("workflow_notification.missing_installation", extra=extra)
- return
-
- try:
- issue = Group.objects.get(id=issue_id)
- except Group.DoesNotExist:
- logger.info("workflow_notification.missing_issue", extra=extra)
- return
-
- user = None
- if user_id:
- user = user_service.get_user(user_id=user_id)
- if not user:
- logger.info("workflow_notification.missing_user", extra=extra)
-
- return (install, issue, user)
-
-
@instrumented_task("sentry.tasks.send_process_resource_change_webhook", **TASK_OPTIONS)
@retry_decorator
-def send_resource_change_webhook(installation_id, event, data, *args, **kwargs):
- installation = app_service.installation_by_id(id=installation_id)
- if not installation:
- logger.info(
- "send_process_resource_change_webhook.missing_installation",
- extra={"installation_id": installation_id, "event": event},
- )
- return
-
- send_webhooks(installation, event, data=data)
-
- metrics.incr("resource_change.processed", sample_rate=1.0, tags={"change_event": event})
-
-
-def notify_sentry_app(event, futures):
- for f in futures:
- if not f.kwargs.get("sentry_app"):
- continue
-
- extra_kwargs = {
- "additional_payload_key": None,
- "additional_payload": None,
- }
- # If the future comes from a rule with a UI component form in the schema, append the issue alert payload
- settings = f.kwargs.get("schema_defined_settings")
- if settings:
- extra_kwargs["additional_payload_key"] = "issue_alert"
- extra_kwargs["additional_payload"] = {
- "id": f.rule.id,
- "title": f.rule.label,
- "sentry_app_id": f.kwargs["sentry_app"].id,
- "settings": settings,
- }
-
- send_alert_event.delay(
- event=event,
- rule=f.rule.label,
- sentry_app_id=f.kwargs["sentry_app"].id,
- **extra_kwargs,
- )
-
-
-def send_webhooks(installation, event, **kwargs):
- try:
- servicehook = ServiceHook.objects.get(
- organization_id=installation.organization_id, actor_id=installation.id
- )
- except ServiceHook.DoesNotExist:
- logger.info(
- "send_webhooks.missing_servicehook",
- extra={"installation_id": installation.id, "event": event},
- )
- return
-
- if event not in servicehook.events:
- return
-
- # The service hook applies to all projects if there are no
- # ServiceHookProject records. Otherwise we want check if
- # the event is within the allowed projects.
- project_limited = ServiceHookProject.objects.filter(service_hook_id=servicehook.id).exists()
-
- # TODO(nola): This is disabled for now, because it could potentially affect internal integrations w/ error.created
- # # If the event is error.created & the request is going out to the Org that owns the Sentry App,
- # # Make sure we don't send the request, to prevent potential infinite loops
- # if (
- # event == "error.created"
- # and installation.organization_id == installation.sentry_app.owner_id
- # ):
- # # We just want to exclude error.created from the project that the integration lives in
- # # Need to first implement project mapping for integration partners
- # metrics.incr(
- # "webhook_request.dropped",
- # tags={"sentry_app": installation.sentry_app.id, "event": event},
- # )
- # return
-
- if not project_limited:
- resource, action = event.split(".")
-
- kwargs["resource"] = resource
- kwargs["action"] = action
- kwargs["install"] = installation
-
- request_data = AppPlatformEvent(**kwargs)
- send_and_save_webhook_request(
- installation.sentry_app,
- request_data,
- servicehook.sentry_app.webhook_url,
- )
+def send_resource_change_webhook(
+ installation_id: int, event: str, data: dict[str, Any], *args: Any, **kwargs: Any
+) -> None:
+ new_send_resource_change_webhook(
+ installation_id=installation_id, event=event, data=data, *args, **kwargs
+ )
@instrumented_task(
@@ -461,10 +110,6 @@ def send_webhooks(installation, event, **kwargs):
def create_or_update_service_hooks_for_sentry_app(
sentry_app_id: int, webhook_url: str, events: list[str], **kwargs: dict
) -> None:
- installations = SentryAppInstallation.objects.filter(sentry_app_id=sentry_app_id)
- for installation in installations:
- create_or_update_service_hooks_for_installation(
- installation=installation,
- events=events,
- webhook_url=webhook_url,
- )
+ new_create_or_update_service_hooks_for_sentry_app(
+ sentry_app_id=sentry_app_id, webhook_url=webhook_url, events=events, **kwargs
+ )
diff --git a/src/sentry/tasks/servicehooks.py b/src/sentry/tasks/servicehooks.py
index 8fb42b5d30b9d..bb780370321f3 100644
--- a/src/sentry/tasks/servicehooks.py
+++ b/src/sentry/tasks/servicehooks.py
@@ -1,29 +1,6 @@
-from time import time
-
-from sentry.api.serializers import serialize
-from sentry.http import safe_urlopen
-from sentry.sentry_apps.models.servicehook import ServiceHook
+from sentry.sentry_apps.tasks.service_hooks import process_service_hook as new_process_service_hook
from sentry.silo.base import SiloMode
from sentry.tasks.base import instrumented_task, retry
-from sentry.tsdb.base import TSDBModel
-from sentry.utils import json
-
-
-def get_payload_v0(event):
- group = event.group
- project = group.project
-
- group_context = serialize(group)
- group_context["url"] = group.get_absolute_url()
-
- event_context = serialize(event)
- event_context["url"] = f"{group.get_absolute_url()}events/{event.event_id}/"
- data = {
- "project": {"slug": project.slug, "name": project.name},
- "group": group_context,
- "event": event_context,
- }
- return data
@instrumented_task(
@@ -34,27 +11,4 @@ def get_payload_v0(event):
)
@retry
def process_service_hook(servicehook_id, event, **kwargs):
- try:
- servicehook = ServiceHook.objects.get(id=servicehook_id)
- except ServiceHook.DoesNotExist:
- return
-
- if servicehook.version == 0:
- payload = get_payload_v0(event)
- else:
- raise NotImplementedError
-
- from sentry import tsdb
-
- tsdb.backend.incr(TSDBModel.servicehook_fired, servicehook.id)
-
- headers = {
- "Content-Type": "application/json",
- "X-ServiceHook-Timestamp": str(int(time())),
- "X-ServiceHook-GUID": servicehook.guid,
- "X-ServiceHook-Signature": servicehook.build_signature(json.dumps(payload)),
- }
-
- safe_urlopen(
- url=servicehook.url, data=json.dumps(payload), headers=headers, timeout=5, verify_ssl=False
- )
+ new_process_service_hook(servicehook_id=servicehook_id, event=event, **kwargs)
diff --git a/src/sentry/tasks/store.py b/src/sentry/tasks/store.py
index b1ef03986bfe1..c2fe594214409 100644
--- a/src/sentry/tasks/store.py
+++ b/src/sentry/tasks/store.py
@@ -9,7 +9,6 @@
import orjson
import sentry_sdk
-from django.conf import settings
from sentry_relay.processing import StoreNormalizer
from sentry import options, reprocessing2
@@ -589,58 +588,6 @@ def _do_save_event(
},
)
- time_synthetic_monitoring_event(data, project_id, start_time)
-
-
-def time_synthetic_monitoring_event(
- data: Mapping[str, Any], project_id: int, start_time: float | None
-) -> bool:
- """
- For special events produced by the recurring synthetic monitoring
- functions, emit timing metrics for:
-
- - "events.synthetic-monitoring.time-to-ingest-total" - Total time with
- the client submission latency included. Rely on timestamp provided by
- client as part of the event payload.
-
- - "events.synthetic-monitoring.time-to-process" - Processing time inside
- by sentry. `start_time` is added to the payload by the system entrypoint
- (relay).
-
- If an event was produced by synthetic monitoring and metrics emitted,
- returns `True` otherwise returns `False`.
- """
- sm_project_id = getattr(settings, "SENTRY_SYNTHETIC_MONITORING_PROJECT_ID", None)
- if sm_project_id is None or project_id != sm_project_id:
- return False
-
- extra = data.get("extra", {}).get("_sentry_synthetic_monitoring")
- if not extra:
- return False
-
- now = time()
- tags = {
- "target": extra["target"],
- "source_region": extra["source_region"],
- "source": extra["source"],
- }
-
- metrics.timing(
- "events.synthetic-monitoring.time-to-ingest-total",
- now - data["timestamp"],
- tags=tags,
- sample_rate=1.0,
- )
-
- if start_time:
- metrics.timing(
- "events.synthetic-monitoring.time-to-process",
- now - start_time,
- tags=tags,
- sample_rate=1.0,
- )
- return True
-
@instrumented_task(
name="sentry.tasks.store.save_event",
@@ -662,7 +609,6 @@ def save_event(
@instrumented_task(
name="sentry.tasks.store.save_event_transaction",
- queue="events.save_event_transaction",
time_limit=65,
soft_time_limit=60,
silo_mode=SiloMode.REGION,
diff --git a/src/sentry/tasks/summaries/weekly_reports.py b/src/sentry/tasks/summaries/weekly_reports.py
index a3f6d464eb792..bc6b79a874c38 100644
--- a/src/sentry/tasks/summaries/weekly_reports.py
+++ b/src/sentry/tasks/summaries/weekly_reports.py
@@ -320,7 +320,7 @@ def send_email(self, template_ctx: Mapping[str, Any], user_id: int) -> None:
user_project_count=template_ctx["user_project_count"],
)
- # TODO see if we can use the UUID to track if the email was sent or not
+ # TODO: see if we can use the UUID to track if the email was sent or not
logger.info(
"weekly_report.send_email",
extra={
diff --git a/src/sentry/tasks/unmerge.py b/src/sentry/tasks/unmerge.py
index 53850bc5112f4..2dee827bddbbd 100644
--- a/src/sentry/tasks/unmerge.py
+++ b/src/sentry/tasks/unmerge.py
@@ -124,9 +124,6 @@ def _generate_culprit(event):
else data.get("first_release", None)
),
"times_seen": lambda caches, data, event: data["times_seen"] + 1,
- "score": lambda caches, data, event: Group.calculate_score(
- data["times_seen"] + 1, data["last_seen"]
- ),
}
diff --git a/src/sentry/tasks/update_user_reports.py b/src/sentry/tasks/update_user_reports.py
index 20faa44af937a..0075f46dd8e02 100644
--- a/src/sentry/tasks/update_user_reports.py
+++ b/src/sentry/tasks/update_user_reports.py
@@ -5,8 +5,12 @@
import sentry_sdk
from django.utils import timezone
-from sentry import eventstore, features
-from sentry.feedback.usecases.create_feedback import FeedbackCreationSource, shim_to_feedback
+from sentry import eventstore, quotas
+from sentry.feedback.usecases.create_feedback import (
+ FeedbackCreationSource,
+ is_in_feedback_denylist,
+ shim_to_feedback,
+)
from sentry.models.project import Project
from sentry.models.userreport import UserReport
from sentry.silo.base import SiloMode
@@ -24,8 +28,12 @@
)
def update_user_reports(**kwargs: Any) -> None:
now = timezone.now()
- end = kwargs.get("end", now + timedelta(minutes=5)) # +5 minutes just to catch clock skew
start = kwargs.get("start", now - timedelta(days=1))
+ end = kwargs.get("end", now + timedelta(minutes=5)) # +5 minutes just to catch clock skew
+
+ # The event query time range is [start - event_lookback, end].
+ event_lookback = kwargs.get("event_lookback", timedelta(days=1))
+
# Filter for user reports where there was no event associated with them at
# ingestion time
user_reports = UserReport.objects.filter(
@@ -54,11 +62,16 @@ def update_user_reports(**kwargs: Any) -> None:
event_ids = [r.event_id for r in reports]
report_by_event = {r.event_id: r for r in reports}
events = []
+
+ event_start = start - event_lookback
+ if retention := quotas.backend.get_event_retention(organization=project.organization):
+ event_start = max(event_start, now - timedelta(days=retention))
+
for event_id_chunk in chunked(event_ids, MAX_EVENTS):
snuba_filter = eventstore.Filter(
project_ids=[project_id],
event_ids=event_id_chunk,
- start=start - timedelta(days=1), # we go one extra day back for events
+ start=event_start,
end=end,
)
try:
@@ -77,9 +90,7 @@ def update_user_reports(**kwargs: Any) -> None:
for event in events:
report = report_by_event.get(event.event_id)
if report:
- if features.has(
- "organizations:user-feedback-ingest", project.organization, actor=None
- ):
+ if not is_in_feedback_denylist(project.organization):
logger.info(
"update_user_reports.shim_to_feedback",
extra={"report_id": report.id, "event_id": event.event_id},
diff --git a/src/sentry/templates/sentry/emails/secret-scanning/body.html b/src/sentry/templates/sentry/emails/secret-scanning/body.html
new file mode 100644
index 0000000000000..6d878b97bad19
--- /dev/null
+++ b/src/sentry/templates/sentry/emails/secret-scanning/body.html
@@ -0,0 +1,17 @@
+{% extends "sentry/emails/base.html" %}
+
+{% load i18n %}
+
+{% block main %}
+ {{ token_type }} exposed
+ Your Sentry {{ token_type }} was found publicly on the internet. We recommend revoking this token immediately, as exposed tokens pose a security risk to your account.
+
+Name: {{ token_name }}
+Token: {{ token_redacted }}
+SHA256: {{ hashed_token }}
+
+Source: {{ exposed_source }}
+URL: {{ exposed_url }}
+Date: {{ datetime|date:"N j, Y, P e" }}
+ Read more about Sentry Auth Tokens .
+{% endblock %}
diff --git a/src/sentry/templates/sentry/emails/secret-scanning/body.txt b/src/sentry/templates/sentry/emails/secret-scanning/body.txt
new file mode 100644
index 0000000000000..4f0c01d488ac7
--- /dev/null
+++ b/src/sentry/templates/sentry/emails/secret-scanning/body.txt
@@ -0,0 +1,15 @@
+{{ token_type }} exposed
+
+Your Sentry {{ token_type }} was found publicly on the internet. We recommend revoking this token immediately, as exposed tokens pose a security risk to your account:
+{{ revoke_url }}
+
+Name: {{ token_name }}
+Token: {{ token_redacted }}
+SHA256: {{ hashed_token }}
+
+Source: {{ exposed_source }}
+URL: {{ exposed_url }}
+Date: {{ datetime|date:"N j, Y, P e" }}
+
+Read more about Sentry Auth Tokens:
+https://docs.sentry.io/account/auth-tokens/
diff --git a/src/sentry/templates/sentry/js-sdk-loader.js.tmpl b/src/sentry/templates/sentry/js-sdk-loader.js.tmpl
index 132191c9677c1..326d5fe73e8ec 100644
--- a/src/sentry/templates/sentry/js-sdk-loader.js.tmpl
+++ b/src/sentry/templates/sentry/js-sdk-loader.js.tmpl
@@ -153,6 +153,12 @@
// Cleanup to allow garbage collection
_window.sentryOnLoad = undefined;
}
+ }
+ catch (o_O) {
+ console.error('Error while calling `sentryOnLoad` handler:');
+ console.error(o_O);
+ }
+ try {
// We have to make sure to call all callbacks first
for (var i = 0; i < onLoadCallbacks.length; i++) {
if (typeof onLoadCallbacks[i] === 'function') {
diff --git a/src/sentry/templates/sentry/js-sdk-loader.min.js.tmpl b/src/sentry/templates/sentry/js-sdk-loader.min.js.tmpl
index 08e2c9fdf581e..65db81b8c3a88 100644
--- a/src/sentry/templates/sentry/js-sdk-loader.min.js.tmpl
+++ b/src/sentry/templates/sentry/js-sdk-loader.min.js.tmpl
@@ -1 +1 @@
-{% load sentry_helpers %}!function(n,e,r,t,i,o,a,c,s){for(var u=s,f=0;f-1){u&&"no"===document.scripts[f].getAttribute("data-lazy")&&(u=!1);break}var p=[];function l(n){return"e"in n}function d(n){return"p"in n}function _(n){return"f"in n}var v=[];function y(n){u&&(l(n)||d(n)||_(n)&&n.f.indexOf("capture")>-1||_(n)&&n.f.indexOf("showReportDialog")>-1)&&m(),v.push(n)}function g(){y({e:[].slice.call(arguments)})}function h(n){y({p:n})}function E(){try{n.SENTRY_SDK_SOURCE="loader";var e=n[i],o=e.init;e.init=function(i){n.removeEventListener(r,g),n.removeEventListener(t,h);var a=c;for(var s in i)Object.prototype.hasOwnProperty.call(i,s)&&(a[s]=i[s]);!function(n,e){var r=n.integrations||[];if(!Array.isArray(r))return;var t=r.map((function(n){return n.name}));n.tracesSampleRate&&-1===t.indexOf("BrowserTracing")&&(e.browserTracingIntegration?r.push(e.browserTracingIntegration({enableInp:!0})):e.BrowserTracing&&r.push(new e.BrowserTracing));(n.replaysSessionSampleRate||n.replaysOnErrorSampleRate)&&-1===t.indexOf("Replay")&&(e.replayIntegration?r.push(e.replayIntegration()):e.Replay&&r.push(new e.Replay));n.integrations=r}(a,e),o(a)},setTimeout((function(){return function(e){try{"function"==typeof n.sentryOnLoad&&(n.sentryOnLoad(),n.sentryOnLoad=void 0);for(var r=0;r-1){u&&"no"===document.scripts[f].getAttribute("data-lazy")&&(u=!1);break}var p=[];function l(n){return"e"in n}function d(n){return"p"in n}function _(n){return"f"in n}var v=[];function y(n){u&&(l(n)||d(n)||_(n)&&n.f.indexOf("capture")>-1||_(n)&&n.f.indexOf("showReportDialog")>-1)&&L(),v.push(n)}function h(){y({e:[].slice.call(arguments)})}function g(n){y({p:n})}function E(){try{n.SENTRY_SDK_SOURCE="loader";var e=n[o],i=e.init;e.init=function(o){n.removeEventListener(r,h),n.removeEventListener(t,g);var a=c;for(var s in o)Object.prototype.hasOwnProperty.call(o,s)&&(a[s]=o[s]);!function(n,e){var r=n.integrations||[];if(!Array.isArray(r))return;var t=r.map((function(n){return n.name}));n.tracesSampleRate&&-1===t.indexOf("BrowserTracing")&&(e.browserTracingIntegration?r.push(e.browserTracingIntegration({enableInp:!0})):e.BrowserTracing&&r.push(new e.BrowserTracing));(n.replaysSessionSampleRate||n.replaysOnErrorSampleRate)&&-1===t.indexOf("Replay")&&(e.replayIntegration?r.push(e.replayIntegration()):e.Replay&&r.push(new e.Replay));n.integrations=r}(a,e),i(a)},setTimeout((function(){return function(e){try{"function"==typeof n.sentryOnLoad&&(n.sentryOnLoad(),n.sentryOnLoad=void 0)}catch(n){console.error("Error while calling `sentryOnLoad` handler:"),console.error(n)}try{for(var r=0;r
{% if banner_choice == 0 %}
- New workshop: Fix Your Frontend with Sentry on Sept. 24.  
RSVP.
+ Want to connect with the folks building Sentry?  
Join us on Discord.
{% elif banner_choice == 1 %}
- New workshop: Fix Your Frontend with Sentry on Sept. 24.  
RSVP.
+ Want to connect with the folks building Sentry?  
Join us on Discord.
{% endif %}
diff --git a/src/sentry/templates/sentry/setup-wizard.html b/src/sentry/templates/sentry/setup-wizard.html
index e83962c62e8ff..5f7dfd759bf1b 100644
--- a/src/sentry/templates/sentry/setup-wizard.html
+++ b/src/sentry/templates/sentry/setup-wizard.html
@@ -25,6 +25,7 @@
props: {
hash: {{ hash|to_json|safe }},
organizations: {{ organizations|to_json|safe }},
+ enableProjectSelection: {{ enableProjectSelection|to_json|safe }},
},
});
diff --git a/src/sentry/templates/sentry/toolbar/iframe.html b/src/sentry/templates/sentry/toolbar/iframe.html
index 1aa3c02c9cdca..868ae73dec935 100644
--- a/src/sentry/templates/sentry/toolbar/iframe.html
+++ b/src/sentry/templates/sentry/toolbar/iframe.html
@@ -1,15 +1,166 @@
+{% comment %}
+Template returned for requests to /iframe. The iframe serves as a proxy for Sentry API requests.
+Required context variables:
+- referrer: string. HTTP header from the request object.
+- state: string. One of: `logged-out`, `missing-project`, `invalid-domain` or `success`.
+- logging: any. If the value is truthy in JavaScript then debug logging will be enabled.
+- organization_slug: string. The org named in the url params
+- project_id_or_slug: string | int. The project named in the url params
+{% endcomment %}
{% load sentry_helpers %}
{% load sentry_assets %}
-
-
-
-
-
- {% script %}
-
- {% endscript %}
-
-
+
+
+ Sentry DevToolbar iFrame
+
+
+
+
+
+ {% script %}
+
+ {% endscript %}
+
+{% comment %}
+No need to close `body`. If we do then middleware will inject some extra markup
+we don't need. Browsers can figure out when it missing and deal with it.
+{% endcomment %}
diff --git a/src/sentry/templates/sentry/toolbar/login-success.html b/src/sentry/templates/sentry/toolbar/login-success.html
index 10ec6ee3ac917..90529a776a2a2 100644
--- a/src/sentry/templates/sentry/toolbar/login-success.html
+++ b/src/sentry/templates/sentry/toolbar/login-success.html
@@ -1,25 +1,47 @@
-{% load sentry_helpers %}
+{# Auth redirect template for Dev Toolbar. Returned after successfully logging in to a requested organization. #}
{% load sentry_assets %}
-
-
-
-
-
-
-
You are logged in!
-
If this window stays open, check the console for errors.
-
- {% script %}
-
- {% endscript %}
-
+
+
+ Sentry - Login Success
+
+
+
+
+
You are logged in!
+
This window will automatically close after 3 seconds. If not then check the console for errors.
+
+
+
+ {% script %}
+
+ {% endscript %}
+
diff --git a/src/sentry/templatetags/sentry_helpers.py b/src/sentry/templatetags/sentry_helpers.py
index 4115246663bd9..cb35982177941 100644
--- a/src/sentry/templatetags/sentry_helpers.py
+++ b/src/sentry/templatetags/sentry_helpers.py
@@ -22,8 +22,7 @@
register = template.Library()
-truncatechars = register.filter(stringfilter(truncatechars))
-truncatechars.is_safe = True
+truncatechars = register.filter(stringfilter(truncatechars), is_safe=True)
@register.filter
@@ -254,7 +253,7 @@ def date(dt, arg=None):
@register.simple_tag
def percent(value, total, format=None):
if not (value and total):
- result = 0
+ result = 0.0
else:
result = int(value) / float(total) * 100
diff --git a/src/sentry/templatetags/sentry_plugins.py b/src/sentry/templatetags/sentry_plugins.py
deleted file mode 100644
index 1936199fcc9ba..0000000000000
--- a/src/sentry/templatetags/sentry_plugins.py
+++ /dev/null
@@ -1,43 +0,0 @@
-from django import template
-
-from sentry.api.serializers.models.plugin import is_plugin_deprecated
-from sentry.plugins.base import Annotation, plugins
-from sentry.utils.safe import safe_execute
-
-register = template.Library()
-
-
-@register.filter
-def get_actions(group, request):
- project = group.project
-
- action_list = []
- for plugin in plugins.for_project(project, version=1):
- results = safe_execute(plugin.actions, request, group, action_list)
-
- if not results:
- continue
-
- action_list = results
-
- for plugin in plugins.for_project(project, version=2):
- for action in safe_execute(plugin.get_actions, request, group) or ():
- action_list.append(action)
-
- return [(a[0], a[1]) for a in action_list]
-
-
-@register.filter
-def get_annotations(group, request=None) -> list[dict[str, str]]:
- project = group.project
-
- annotation_list = []
- for plugin in plugins.for_project(project, version=2):
- if is_plugin_deprecated(plugin, project):
- continue
- for value in safe_execute(plugin.get_annotations, group=group) or ():
- annotation = safe_execute(Annotation, **value)
- if annotation:
- annotation_list.append(annotation)
-
- return annotation_list
diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py
index 97c39f023bfad..87d28fd6db66b 100644
--- a/src/sentry/testutils/cases.py
+++ b/src/sentry/testutils/cases.py
@@ -1452,7 +1452,6 @@ def __wrap_group(self, group):
"last_seen",
"first_seen",
"data",
- "score",
"project_id",
"time_spent_total",
"time_spent_count",
@@ -1474,7 +1473,6 @@ def __wrap_group(self, group):
self.to_snuba_time_format(group.last_seen),
self.to_snuba_time_format(group.first_seen),
group.data,
- group.score,
group.project.id,
group.time_spent_total,
group.time_spent_count,
@@ -1553,6 +1551,7 @@ def store_segment(
payload["tags"] = tags
if transaction_id:
payload["event_id"] = transaction_id
+ payload["segment_id"] = transaction_id[:16]
if profile_id:
payload["profile_id"] = profile_id
if measurements:
@@ -1594,6 +1593,8 @@ def store_indexed_span(
store_metrics_summary: Mapping[str, Sequence[Mapping[str, Any]]] | None = None,
group: str = "00",
category: str | None = None,
+ organization_id: int = 1,
+ is_eap: bool = False,
):
if span_id is None:
span_id = self._random_span_id()
@@ -1602,7 +1603,7 @@ def store_indexed_span(
payload = {
"project_id": project_id,
- "organization_id": 1,
+ "organization_id": organization_id,
"span_id": span_id,
"trace_id": trace_id,
"duration_ms": int(duration),
@@ -1628,6 +1629,7 @@ def store_indexed_span(
}
if transaction_id:
payload["event_id"] = transaction_id
+ payload["segment_id"] = transaction_id[:16]
if profile_id:
payload["profile_id"] = profile_id
if store_metrics_summary:
@@ -1640,7 +1642,7 @@ def store_indexed_span(
# We want to give the caller the possibility to store only a summary since the database does not deduplicate
# on the span_id which makes the assumptions of a unique span_id in the database invalid.
if not store_only_summary:
- self.store_span(payload)
+ self.store_span(payload, is_eap=is_eap)
if "_metrics_summary" in payload:
self.store_metrics_summary(payload)
@@ -2725,6 +2727,8 @@ def assert_serialized_widget_query(self, data, widget_data_source):
assert data["columns"] == widget_data_source.columns
if "fieldAliases" in data:
assert data["fieldAliases"] == widget_data_source.field_aliases
+ if "selectedAggregate" in data:
+ assert data["selectedAggregate"] == widget_data_source.selected_aggregate
def get_widgets(self, dashboard_id):
return DashboardWidget.objects.filter(dashboard_id=dashboard_id).order_by("order")
diff --git a/src/sentry/testutils/factories.py b/src/sentry/testutils/factories.py
index cf9b019fcee39..8eefba329bf02 100644
--- a/src/sentry/testutils/factories.py
+++ b/src/sentry/testutils/factories.py
@@ -75,7 +75,6 @@
from sentry.integrations.models.repository_project_path_config import RepositoryProjectPathConfig
from sentry.integrations.types import ExternalProviders
from sentry.issues.grouptype import get_group_type_by_type_id
-from sentry.mediators.token_exchange.grant_exchanger import GrantExchanger
from sentry.models.activity import Activity
from sentry.models.apikey import ApiKey
from sentry.models.apitoken import ApiToken
@@ -114,7 +113,6 @@
from sentry.models.organizationmemberteam import OrganizationMemberTeam
from sentry.models.organizationslugreservation import OrganizationSlugReservation
from sentry.models.orgauthtoken import OrgAuthToken
-from sentry.models.platformexternalissue import PlatformExternalIssue
from sentry.models.project import Project
from sentry.models.projectbookmark import ProjectBookmark
from sentry.models.projectcodeowners import ProjectCodeOwners
@@ -136,6 +134,7 @@
SentryAppInstallationTokenCreator,
)
from sentry.sentry_apps.logic import SentryAppCreator
+from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
from sentry.sentry_apps.models.sentry_app_installation_for_provider import (
@@ -144,6 +143,7 @@
from sentry.sentry_apps.models.servicehook import ServiceHook
from sentry.sentry_apps.services.app.serial import serialize_sentry_app_installation
from sentry.sentry_apps.services.hook import hook_service
+from sentry.sentry_apps.token_exchange.grant_exchanger import GrantExchanger
from sentry.signals import project_created
from sentry.silo.base import SiloMode
from sentry.snuba.dataset import Dataset
@@ -171,11 +171,17 @@
from sentry.utils import loremipsum
from sentry.utils.performance_issues.performance_problem import PerformanceProblem
from sentry.workflow_engine.models import (
+ Action,
+ DataCondition,
+ DataConditionGroup,
+ DataConditionGroupAction,
DataSource,
DataSourceDetector,
Detector,
+ DetectorState,
+ DetectorWorkflow,
Workflow,
- WorkflowAction,
+ WorkflowDataConditionGroup,
)
from social_auth.models import UserSocialAuth
@@ -946,16 +952,22 @@ def store_event(
data,
project_id: int,
assert_no_errors: bool = True,
- event_type: EventType = EventType.DEFAULT,
+ default_event_type: EventType | None = None,
sent_at: datetime | None = None,
) -> Event:
"""
Like `create_event`, but closer to how events are actually
ingested. Prefer to use this method over `create_event`
"""
- if event_type == EventType.ERROR:
+
+ # this creates a basic message event
+ if default_event_type == EventType.DEFAULT:
data.update({"stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"])})
+ # this creates an error event
+ elif default_event_type == EventType.ERROR:
+ data.update({"exception": [{"value": "BadError"}]})
+
manager = EventManager(data, sent_at=sent_at)
manager.normalize()
if assert_no_errors:
@@ -1202,12 +1214,13 @@ def create_sentry_app_installation(
):
assert install.api_grant is not None
assert install.sentry_app.application is not None
- GrantExchanger.run(
+ assert install.sentry_app.proxy_user is not None
+ GrantExchanger(
install=rpc_install,
code=install.api_grant.code,
client_id=install.sentry_app.application.client_id,
user=install.sentry_app.proxy_user,
- )
+ ).run()
install = SentryAppInstallation.objects.get(id=install.id)
return install
@@ -1973,6 +1986,7 @@ def create_uptime_subscription(
@staticmethod
def create_project_uptime_subscription(
project: Project,
+ env: Environment | None,
uptime_subscription: UptimeSubscription,
mode: ProjectUptimeSubscriptionMode,
name: str,
@@ -1990,6 +2004,7 @@ def create_project_uptime_subscription(
return ProjectUptimeSubscription.objects.create(
uptime_subscription=uptime_subscription,
project=project,
+ environment=env,
mode=mode,
name=name,
owner_team_id=owner_team_id,
@@ -2065,13 +2080,34 @@ def create_workflow(
@staticmethod
@assume_test_silo_mode(SiloMode.REGION)
- def create_workflowaction(
+ def create_data_condition_group(
+ **kwargs,
+ ) -> DataConditionGroup:
+ return DataConditionGroup.objects.create(**kwargs)
+
+ @staticmethod
+ @assume_test_silo_mode(SiloMode.REGION)
+ def create_workflow_data_condition_group(
workflow: Workflow | None = None,
+ condition_group: DataConditionGroup | None = None,
**kwargs,
- ) -> WorkflowAction:
+ ) -> WorkflowDataConditionGroup:
if workflow is None:
workflow = Factories.create_workflow()
- return WorkflowAction.objects.create(workflow=workflow, **kwargs)
+
+ if not condition_group:
+ condition_group = Factories.create_data_condition_group()
+
+ return WorkflowDataConditionGroup.objects.create(
+ workflow=workflow, condition_group=condition_group
+ )
+
+ @staticmethod
+ @assume_test_silo_mode(SiloMode.REGION)
+ def create_data_condition(
+ **kwargs,
+ ) -> DataCondition:
+ return DataCondition.objects.create(**kwargs)
@staticmethod
@assume_test_silo_mode(SiloMode.REGION)
@@ -2103,9 +2139,24 @@ def create_detector(
if name is None:
name = petname.generate(2, " ", letters=10).title()
return Detector.objects.create(
- organization=organization, name=name, owner_user_id=owner_user_id, owner_team=owner_team
+ organization=organization,
+ name=name,
+ owner_user_id=owner_user_id,
+ owner_team=owner_team,
+ **kwargs,
)
+ @staticmethod
+ @assume_test_silo_mode(SiloMode.REGION)
+ def create_detector_state(
+ detector: Detector | None = None,
+ **kwargs,
+ ) -> DetectorState:
+ if detector is None:
+ detector = Factories.create_detector()
+
+ return DetectorState.objects.create(detector=detector, **kwargs)
+
@staticmethod
@assume_test_silo_mode(SiloMode.REGION)
def create_data_source_detector(
@@ -2118,3 +2169,36 @@ def create_data_source_detector(
if detector is None:
detector = Factories.create_detector()
return DataSourceDetector.objects.create(data_source=data_source, detector=detector)
+
+ @staticmethod
+ @assume_test_silo_mode(SiloMode.REGION)
+ def create_action(**kwargs) -> Action:
+ return Action.objects.create(**kwargs)
+
+ @staticmethod
+ @assume_test_silo_mode(SiloMode.REGION)
+ def create_detector_workflow(
+ detector: Detector | None = None,
+ workflow: Workflow | None = None,
+ **kwargs,
+ ) -> DetectorWorkflow:
+ if detector is None:
+ detector = Factories.create_detector()
+ if workflow is None:
+ workflow = Factories.create_workflow()
+ return DetectorWorkflow.objects.create(detector=detector, workflow=workflow, **kwargs)
+
+ @staticmethod
+ @assume_test_silo_mode(SiloMode.REGION)
+ def create_data_condition_group_action(
+ action: Action | None = None,
+ condition_group: DataConditionGroup | None = None,
+ **kwargs,
+ ) -> DataConditionGroupAction:
+ if action is None:
+ action = Factories.create_action()
+ if condition_group is None:
+ condition_group = Factories.create_data_condition_group()
+ return DataConditionGroupAction.objects.create(
+ action=action, condition_group=condition_group, **kwargs
+ )
diff --git a/src/sentry/testutils/fixtures.py b/src/sentry/testutils/fixtures.py
index 63b1fe891185b..e9226424f2333 100644
--- a/src/sentry/testutils/fixtures.py
+++ b/src/sentry/testutils/fixtures.py
@@ -14,6 +14,7 @@
from sentry.integrations.models.integration import Integration
from sentry.integrations.models.organization_integration import OrganizationIntegration
from sentry.models.activity import Activity
+from sentry.models.environment import Environment
from sentry.models.grouprelease import GroupRelease
from sentry.models.organization import Organization
from sentry.models.organizationmember import OrganizationMember
@@ -43,6 +44,7 @@
from sentry.users.models.identity import Identity, IdentityProvider
from sentry.users.models.user import User
from sentry.users.services.user import RpcUser
+from sentry.workflow_engine.models import DataSource, Detector, Workflow
class Fixtures:
@@ -633,21 +635,43 @@ def create_dashboard_widget(self, *args, **kwargs):
def create_dashboard_widget_query(self, *args, **kwargs):
return Factories.create_dashboard_widget_query(*args, **kwargs)
- def create_workflow(self, *args, **kwargs):
- return Factories.create_workflow(*args, **kwargs)
+ def create_workflow_action(self, *args, **kwargs) -> Workflow:
+ return Factories.create_workflow_action(*args, **kwargs)
- def create_workflowaction(self, *args, **kwargs):
- return Factories.create_workflowaction(*args, **kwargs)
+ def create_workflow(self, *args, **kwargs) -> Workflow:
+ return Factories.create_workflow(*args, **kwargs)
- def create_data_source(self, *args, **kwargs):
+ def create_data_source(self, *args, **kwargs) -> DataSource:
return Factories.create_data_source(*args, **kwargs)
- def create_detector(self, *args, **kwargs):
+ def create_data_condition(self, *args, **kwargs):
+ return Factories.create_data_condition(*args, **kwargs)
+
+ def create_detector(self, *args, **kwargs) -> Detector:
return Factories.create_detector(*args, **kwargs)
+ def create_detector_state(self, *args, **kwargs) -> Detector:
+ return Factories.create_detector_state(*args, **kwargs)
+
def create_data_source_detector(self, *args, **kwargs):
return Factories.create_data_source_detector(*args, **kwargs)
+ def create_data_condition_group(self, *args, **kwargs):
+ return Factories.create_data_condition_group(*args, **kwargs)
+
+ def create_data_condition_group_action(self, *args, **kwargs):
+ return Factories.create_data_condition_group_action(*args, **kwargs)
+
+ def create_detector_workflow(self, *args, **kwargs):
+ return Factories.create_detector_workflow(*args, **kwargs)
+
+ def create_workflow_data_condition_group(self, *args, **kwargs):
+ return Factories.create_workflow_data_condition_group(*args, **kwargs)
+
+ # workflow_engine action
+ def create_action(self, *args, **kwargs):
+ return Factories.create_action(*args, **kwargs)
+
def create_uptime_subscription(
self,
type: str = "test",
@@ -667,7 +691,7 @@ def create_uptime_subscription(
if date_updated is None:
date_updated = timezone.now()
if headers is None:
- headers = {}
+ headers = []
return Factories.create_uptime_subscription(
type=type,
@@ -688,6 +712,7 @@ def create_uptime_subscription(
def create_project_uptime_subscription(
self,
project: Project | None = None,
+ env: Environment | None = None,
uptime_subscription: UptimeSubscription | None = None,
mode=ProjectUptimeSubscriptionMode.AUTO_DETECTED_ACTIVE,
name="Test Name",
@@ -696,11 +721,14 @@ def create_project_uptime_subscription(
) -> ProjectUptimeSubscription:
if project is None:
project = self.project
+ if env is None:
+ env = self.environment
if uptime_subscription is None:
uptime_subscription = self.create_uptime_subscription()
return Factories.create_project_uptime_subscription(
project,
+ env,
uptime_subscription,
mode,
name,
diff --git a/src/sentry/testutils/helpers/backups.py b/src/sentry/testutils/helpers/backups.py
index 7b6f60ec7d0bb..0f1d38134c234 100644
--- a/src/sentry/testutils/helpers/backups.py
+++ b/src/sentry/testutils/helpers/backups.py
@@ -65,6 +65,7 @@
from sentry.models.authprovider import AuthProvider
from sentry.models.counter import Counter
from sentry.models.dashboard import Dashboard, DashboardTombstone
+from sentry.models.dashboard_permissions import DashboardPermissions
from sentry.models.dashboard_widget import (
DashboardWidget,
DashboardWidgetQuery,
@@ -111,6 +112,7 @@
from sentry.users.models.userip import UserIP
from sentry.users.models.userrole import UserRole, UserRoleUser
from sentry.utils import json
+from sentry.workflow_engine.models import Action, DataConditionGroup
__all__ = [
"export_to_file",
@@ -532,8 +534,11 @@ def create_exhaustive_organization(
# Dashboard
dashboard = Dashboard.objects.create(
- title=f"Dashboard 1 for {slug}", created_by_id=owner_id, organization=org
+ title=f"Dashboard 1 for {slug}",
+ created_by_id=owner_id,
+ organization=org,
)
+ DashboardPermissions.objects.create(is_creator_only_editable=False, dashboard=dashboard)
widget = DashboardWidget.objects.create(
dashboard=dashboard,
order=1,
@@ -608,13 +613,57 @@ def create_exhaustive_organization(
access_end=timezone.now() + timedelta(days=1),
)
+ # Setup a test 'Issue Rule' and 'Automation'
workflow = self.create_workflow(organization=org)
- self.create_workflowaction(workflow=workflow)
- self.create_workflow(organization=org)
- self.create_data_source_detector(
- self.create_data_source(organization=org),
- self.create_detector(organization=org),
+ detector = self.create_detector(organization=org)
+ self.create_detector_workflow(detector=detector, workflow=workflow)
+ self.create_detector_state(detector=detector)
+
+ notification_condition_group = self.create_data_condition_group(
+ logic_type=DataConditionGroup.Type.ANY,
+ organization=org,
+ )
+
+ send_notification_action = self.create_action(type=Action.Type.Notification, data="")
+ self.create_data_condition_group_action(
+ action=send_notification_action,
+ condition_group=notification_condition_group,
+ )
+
+ # TODO @saponifi3d: Update comparison to be DetectorState.Critical
+ self.create_data_condition(
+ condition="eq",
+ comparison="critical",
+ type="WorkflowCondition",
+ condition_result="True",
+ condition_group=notification_condition_group,
+ )
+
+ self.create_workflow_data_condition_group(
+ workflow=workflow, condition_group=notification_condition_group
+ )
+
+ data_source = self.create_data_source(organization=org)
+
+ self.create_data_source_detector(data_source, detector)
+ detector_conditions = self.create_data_condition_group(
+ logic_type=DataConditionGroup.Type.ALL,
+ organization=org,
+ )
+
+ # TODO @saponifi3d: Create or define trigger workflow action type
+ trigger_workflows_action = self.create_action(type=Action.Type.TriggerWorkflow, data="")
+ self.create_data_condition_group_action(
+ action=trigger_workflows_action, condition_group=detector_conditions
+ )
+ self.create_data_condition(
+ condition="eq",
+ comparison="critical",
+ type="DetectorCondition",
+ condition_result="True",
+ condition_group=detector_conditions,
)
+ detector.workflow_condition_group = detector_conditions
return org
diff --git a/src/sentry/testutils/helpers/datetime.py b/src/sentry/testutils/helpers/datetime.py
index c68d1db640b2c..da1af95d8c5c5 100644
--- a/src/sentry/testutils/helpers/datetime.py
+++ b/src/sentry/testutils/helpers/datetime.py
@@ -8,7 +8,7 @@
__all__ = ["iso_format", "before_now", "timestamp_format"]
-def iso_format(date):
+def iso_format(date: datetime) -> str:
return date.isoformat()[:19]
diff --git a/src/sentry/testutils/helpers/features.py b/src/sentry/testutils/helpers/features.py
index a7cf343f21c71..13df0462f3479 100644
--- a/src/sentry/testutils/helpers/features.py
+++ b/src/sentry/testutils/helpers/features.py
@@ -2,7 +2,7 @@
import functools
import logging
-from collections.abc import Generator, Mapping
+from collections.abc import Generator, Mapping, Sequence
from contextlib import contextmanager
from unittest.mock import patch
@@ -24,7 +24,7 @@
@contextmanager
-def Feature(names):
+def Feature(names: str | Sequence[str] | dict[str, bool]) -> Generator[None]:
"""
Control whether a feature is enabled.
@@ -102,14 +102,18 @@ def features_override(name, *args, **kwargs):
logger.info("Flag defaulting to %s: %s", default_value, repr(name))
return default_value
- def batch_features_override(_feature_names, projects=None, organization=None, *args, **kwargs):
+ def batch_features_override(
+ _feature_names: Sequence[str], projects=None, organization=None, *args, **kwargs
+ ):
feature_results = {name: names[name] for name in _feature_names if name in names}
default_feature_names = [name for name in _feature_names if name not in names]
- default_feature_results = {}
+ default_feature_results: dict[str, dict[str, bool | None]] = {}
if default_feature_names:
- default_feature_results = default_batch_has(
+ defaults = default_batch_has(
default_feature_names, projects=projects, organization=organization, **kwargs
)
+ if defaults:
+ default_feature_results.update(defaults)
if projects:
results = {}
@@ -122,13 +126,13 @@ def batch_features_override(_feature_names, projects=None, organization=None, *a
return results
elif organization:
result_key = f"organization:{organization.id}"
- results = {**feature_results, **default_feature_results[result_key]}
- results = {
+ results_for_org = {**feature_results, **default_feature_results[result_key]}
+ results_for_org = {
name: resolve_feature_name_value_for_org(organization, val)
- for name, val in results.items()
+ for name, val in results_for_org.items()
if name.startswith("organization")
}
- return {result_key: results}
+ return {result_key: results_for_org}
with patch("sentry.features.has") as features_has:
features_has.side_effect = features_override
diff --git a/src/sentry/testutils/helpers/on_demand.py b/src/sentry/testutils/helpers/on_demand.py
index fa149f633757c..ae131847340d9 100644
--- a/src/sentry/testutils/helpers/on_demand.py
+++ b/src/sentry/testutils/helpers/on_demand.py
@@ -20,6 +20,7 @@ def create_widget(
dashboard: Dashboard | None = None,
widget: DashboardWidget | None = None,
discover_widget_split: int | None = None,
+ widget_type: int = DashboardWidgetTypes.DISCOVER,
) -> tuple[DashboardWidgetQuery, DashboardWidget, Dashboard]:
columns = columns or []
dashboard = dashboard or Dashboard.objects.create(
@@ -31,7 +32,7 @@ def create_widget(
widget = widget or DashboardWidget.objects.create(
dashboard=dashboard,
order=order,
- widget_type=DashboardWidgetTypes.DISCOVER,
+ widget_type=widget_type,
display_type=DashboardWidgetDisplayTypes.LINE_CHART,
discover_widget_split=discover_widget_split,
)
diff --git a/src/sentry/testutils/hybrid_cloud.py b/src/sentry/testutils/hybrid_cloud.py
index f07b5937eded1..20105205ecac5 100644
--- a/src/sentry/testutils/hybrid_cloud.py
+++ b/src/sentry/testutils/hybrid_cloud.py
@@ -14,9 +14,12 @@
from django.db.backends.base.base import BaseDatabaseWrapper
from sentry.db.postgres.transactions import in_test_transaction_enforcement
+from sentry.deletions.models.scheduleddeletion import (
+ BaseScheduledDeletion,
+ get_regional_scheduled_deletion,
+)
from sentry.models.organizationmember import OrganizationMember
from sentry.models.organizationmembermapping import OrganizationMemberMapping
-from sentry.models.scheduledeletion import BaseScheduledDeletion, get_regional_scheduled_deletion
from sentry.silo.base import SiloMode
from sentry.testutils.silo import assume_test_silo_mode
diff --git a/src/sentry/testutils/pytest/fixtures.py b/src/sentry/testutils/pytest/fixtures.py
index 150f94da9c54e..fb50304db1b01 100644
--- a/src/sentry/testutils/pytest/fixtures.py
+++ b/src/sentry/testutils/pytest/fixtures.py
@@ -9,8 +9,10 @@
import os
import re
import sys
+from collections.abc import Callable, Generator
from concurrent.futures import ThreadPoolExecutor
from string import Template
+from typing import Any, Protocol
import pytest
import requests
@@ -193,14 +195,29 @@ def read_snapshot_file(reference_file: str) -> tuple[str, str]:
return (header, refval)
+InequalityComparator = Callable[[str, str], bool | str]
+default_comparator = lambda refval, output: refval != output
+
+
+class InstaSnapshotter(Protocol):
+ def __call__(
+ self,
+ output: str | Any,
+ reference_file: str | None = None,
+ subname: str | None = None,
+ inequality_comparator: InequalityComparator = default_comparator,
+ ) -> None:
+ ...
+
+
@pytest.fixture
-def insta_snapshot(request, log):
+def insta_snapshot(request: pytest.FixtureRequest) -> Generator[InstaSnapshotter]:
def inner(
- output,
- reference_file=None,
- subname=None,
- inequality_comparator=lambda refval, output: refval != output,
- ):
+ output: str | Any,
+ reference_file: str | None = None,
+ subname: str | None = None,
+ inequality_comparator: InequalityComparator = default_comparator,
+ ) -> None:
from sentry.testutils.silo import strip_silo_mode_test_suffix
if reference_file is None:
diff --git a/tests/sentry/remote_config/__init__.py b/src/sentry/toolbar/__init__.py
similarity index 100%
rename from tests/sentry/remote_config/__init__.py
rename to src/sentry/toolbar/__init__.py
diff --git a/src/sentry/toolbar/iframe_view.py b/src/sentry/toolbar/iframe_view.py
deleted file mode 100644
index c76a93cccb195..0000000000000
--- a/src/sentry/toolbar/iframe_view.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from typing import Any
-
-from django.http import HttpRequest, HttpResponse
-
-from sentry.web.frontend.base import OrganizationView, region_silo_view
-
-
-@region_silo_view
-class IframeView(OrganizationView):
- # TODO: For perms check. This is just an example and we might not need it.
- # required_scope = "org:read,org:integrations"
-
- security_headers = {"X-Frame-Options": "ALLOWALL"}
-
- def respond(self, template: str, context: dict[str, Any] | None = None, status: int = 200):
- response = super().respond(template, context=context, status=status)
- for header, val in IframeView.security_headers.items():
- response[header] = val
- return response
-
- def handle_auth_required(self, request: HttpRequest, *args, **kwargs):
- # Override redirects to login
- if request.method == "GET":
- self.default_context = {}
- return self.respond("sentry/toolbar/iframe.html", status=401)
- return HttpResponse(status=401)
-
- def handle_permission_required(self, request: HttpRequest, *args, **kwargs):
- # Override redirects to login
- if request.method == "GET":
- self.default_context = {}
- return self.respond("sentry/toolbar/iframe.html", status=403)
- return HttpResponse(status=403)
-
- def get(self, request: HttpRequest, organization, project_id_or_slug):
- return self.respond("sentry/toolbar/iframe.html", status=200)
diff --git a/src/sentry/toolbar/login_success_view.py b/src/sentry/toolbar/login_success_view.py
deleted file mode 100644
index 5725aaa1f5643..0000000000000
--- a/src/sentry/toolbar/login_success_view.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from django.http import HttpRequest
-
-from sentry.web.frontend.base import OrganizationView, region_silo_view
-
-
-@region_silo_view
-class LoginSuccessView(OrganizationView):
- def get(self, request: HttpRequest, organization, project_id_or_slug):
- return self.respond("sentry/toolbar/login-success.html", status=200)
diff --git a/src/sentry/toolbar/utils/url.py b/src/sentry/toolbar/utils/url.py
new file mode 100644
index 0000000000000..abd27de0227fb
--- /dev/null
+++ b/src/sentry/toolbar/utils/url.py
@@ -0,0 +1,89 @@
+import re
+from urllib.parse import ParseResult, urlparse
+
+# Lets break down the regexp:
+# 0. Anchor the start of the string with `^`
+# 1. The Scheme section: `((?Phttps?)?://)?`
+# - optionally we'll capture `http://` or `https://` or `://`
+# - if we have `http` or `https` then we'll put them in the group named "scheme"
+# 2. The hostname section: `(?P[^:/?]+)`
+# - capture all characters but stop at the first of `:` or `/` or `?`
+# - put the string in a group named "hostname"
+# 3. The post section: `(:(?P[^/?]+))?`
+# - optionally we'll capture all the characters starting with `:` and ending before `/` or `?`
+# - put the part after `:` in a group named `port`
+# 4. Match anything else: `.*$`
+# - This is everything after `/` or `?` which we might've found in step #2 or #3
+# - If there is a match or not, we don't name it. It will be ignored.
+#
+# Test and view it with tools like these:
+# https://regex101.com/r/rWQyb9/1
+# https://regex-vis.com/?r=%5E%28%28https%3F%29%3F%3A%2F%2F%29%3F%28%5B%5E%3A%2F%3F%5D%2B%29%28%3A%28%5B%5E%2F%3F%5D%2B%29%29%3F.*%24
+pattern = re.compile("^((?Phttps?)?://)?(?P[^:/?]+)(:(?P[^/?]+))?.*$", re.I)
+
+
+def url_matches(source: ParseResult, target: str) -> bool:
+ """
+ Matches a referrer url with a user-provided one. Checks 3 fields:
+ * hostname: must equal target.hostname. The first subdomain in target may be a wildcard "*".
+ * port: must equal target.port, unless it is excluded from target.
+ * scheme: must equal target.scheme, unless it is excluded from target.
+ Note both url's path is ignored.
+ """
+
+ match = re.match(pattern, target)
+ if not match:
+ return False
+
+ scheme = match.group("scheme")
+ hostname = match.group("hostname")
+ port = match.group("port")
+
+ if not source.hostname or not hostname:
+ return False
+
+ is_wildcard_scheme = scheme == "://" or scheme is None
+ if not is_wildcard_scheme and source.scheme != scheme:
+ return False
+
+ is_wildcard_subdomain = hostname.startswith("*.") or hostname.startswith(".")
+ if is_wildcard_subdomain:
+ source_root = source.hostname.split(".", 1)[1]
+ target_root = hostname.split(".", 1)[1]
+ if source_root != target_root:
+ return False
+ elif source.hostname != hostname:
+ return False
+
+ is_default_port = port is None
+ source_port = _get_port(source)
+ if not is_default_port and source_port != port:
+ return False
+
+ return True
+
+
+def _get_port(parsed: ParseResult) -> str:
+ if parsed.port:
+ return str(parsed.port)
+ elif parsed.scheme == "http":
+ return "80"
+ elif parsed.scheme == "https":
+ return "443"
+ return ""
+
+
+def is_origin_allowed(referrer: str, allowed_origins: list[str]) -> bool:
+ # Empty referrer is always invalid
+ if not referrer:
+ return False
+
+ # The input referrer must be a well-formed url with a valid scheme.
+ if not referrer.startswith("http://") and not referrer.startswith("https://"):
+ return False
+
+ parsed_referrer = urlparse(referrer)
+ for origin in allowed_origins:
+ if url_matches(parsed_referrer, origin):
+ return True
+ return False
diff --git a/tests/sentry/remote_config/endpoints/__init__.py b/src/sentry/toolbar/views/__init__.py
similarity index 100%
rename from tests/sentry/remote_config/endpoints/__init__.py
rename to src/sentry/toolbar/views/__init__.py
diff --git a/src/sentry/toolbar/views/iframe_view.py b/src/sentry/toolbar/views/iframe_view.py
new file mode 100644
index 0000000000000..ba0ff1c5115fd
--- /dev/null
+++ b/src/sentry/toolbar/views/iframe_view.py
@@ -0,0 +1,73 @@
+from typing import Any
+
+from django.http import HttpRequest, HttpResponse
+from django.http.response import HttpResponseBase
+
+from sentry.models.organization import Organization
+from sentry.models.project import Project
+from sentry.toolbar.utils.url import is_origin_allowed
+from sentry.web.frontend.base import ProjectView, region_silo_view
+
+TEMPLATE = "sentry/toolbar/iframe.html"
+
+
+def _get_referrer(request) -> str | None:
+ # 1 R is because of legacy http reasons: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referer
+ return request.META.get("HTTP_REFERER")
+
+
+@region_silo_view
+class IframeView(ProjectView):
+ default_context = {}
+
+ def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponseBase:
+ self.organization_slug = kwargs.get("organization_slug", "")
+ self.project_id_or_slug = kwargs.get("project_id_or_slug", "")
+ return super().dispatch(request, *args, **kwargs)
+
+ def handle_disabled_member(self, organization: Organization) -> HttpResponse:
+ return self._respond_with_state("logged-out")
+
+ def handle_not_2fa_compliant(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
+ return self._respond_with_state("logged-out")
+
+ def handle_sudo_required(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
+ return self._respond_with_state("logged-out")
+
+ def handle_auth_required(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
+ return self._respond_with_state("logged-out")
+
+ def handle_permission_required(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
+ return self._respond_with_state("missing-project")
+
+ def get(
+ self, request: HttpRequest, organization: Organization, project: Project, *args, **kwargs
+ ) -> HttpResponse:
+ referrer = _get_referrer(request) or ""
+ allowed_origins: list[str] = project.get_option("sentry:toolbar_allowed_origins")
+
+ if referrer and is_origin_allowed(referrer, allowed_origins):
+ return self._respond_with_state("success")
+
+ return self._respond_with_state("invalid-domain")
+
+ def _respond_with_state(self, state: str):
+ response = self.respond(
+ TEMPLATE,
+ status=200, # always return 200 so the html will render inside the iframe
+ context={
+ "referrer": _get_referrer(self.request) or "",
+ "state": state,
+ "logging": self.request.GET.get("logging", ""),
+ "organization_slug": self.organization_slug,
+ "project_id_or_slug": self.project_id_or_slug,
+ },
+ )
+
+ referrer = _get_referrer(self.request) or ""
+
+ # This is an alternative to @csp_replace - we need to use this pattern to access the referrer.
+ response._csp_replace = {"frame-ancestors": [referrer.strip("/") or "'none'"]} # type: ignore[attr-defined]
+ response["X-Frame-Options"] = "DENY" if referrer == "" else "ALLOWALL"
+
+ return response
diff --git a/src/sentry/toolbar/views/login_success_view.py b/src/sentry/toolbar/views/login_success_view.py
new file mode 100644
index 0000000000000..e581bed6e4f7b
--- /dev/null
+++ b/src/sentry/toolbar/views/login_success_view.py
@@ -0,0 +1,21 @@
+from django.conf import settings
+from django.http import HttpRequest
+
+from sentry.web.frontend.base import OrganizationView, region_silo_view
+
+TEMPLATE = "sentry/toolbar/login-success.html"
+
+session_cookie_name = settings.SESSION_COOKIE_NAME
+
+
+@region_silo_view
+class LoginSuccessView(OrganizationView):
+ def get(self, request: HttpRequest, organization, project_id_or_slug):
+ return self.respond(
+ TEMPLATE,
+ status=200,
+ context={
+ "delay": int(request.GET.get("delay", 3000)),
+ "cookie": f"{session_cookie_name}={request.COOKIES.get(session_cookie_name)}",
+ },
+ )
diff --git a/src/sentry/tsdb/base.py b/src/sentry/tsdb/base.py
index 85d46c6987e9e..176e713145a84 100644
--- a/src/sentry/tsdb/base.py
+++ b/src/sentry/tsdb/base.py
@@ -122,6 +122,7 @@ class BaseTSDB(Service):
"get_most_frequent_series",
"get_frequency_series",
"get_frequency_totals",
+ "get_distinct_counts_totals_with_conditions",
]
)
@@ -554,6 +555,25 @@ def get_distinct_counts_totals(
"""
raise NotImplementedError
+ def get_distinct_counts_totals_with_conditions(
+ self,
+ model: TSDBModel,
+ keys: Sequence[int],
+ start: datetime,
+ end: datetime | None = None,
+ rollup: int | None = None,
+ environment_id: int | None = None,
+ use_cache: bool = False,
+ jitter_value: int | None = None,
+ tenant_ids: dict[str, int | str] | None = None,
+ referrer_suffix: str | None = None,
+ conditions: list[dict[str, Any]] | None = None,
+ ) -> dict[int, Any]:
+ """
+ Count distinct items during a time range with conditions.
+ """
+ raise NotImplementedError
+
def get_distinct_counts_union(
self,
model: TSDBModel,
diff --git a/src/sentry/tsdb/dummy.py b/src/sentry/tsdb/dummy.py
index a4ce1eba59a99..3370876e9a83e 100644
--- a/src/sentry/tsdb/dummy.py
+++ b/src/sentry/tsdb/dummy.py
@@ -76,6 +76,23 @@ def get_distinct_counts_totals(
self.validate_arguments([model], [environment_id])
return {k: 0 for k in keys}
+ def get_distinct_counts_totals_with_conditions(
+ self,
+ model,
+ keys: Sequence[int],
+ start,
+ end=None,
+ rollup=None,
+ environment_id=None,
+ use_cache=False,
+ jitter_value=None,
+ tenant_ids=None,
+ referrer_suffix=None,
+ conditions=None,
+ ):
+ self.validate_arguments([model], [environment_id])
+ return 0
+
def get_distinct_counts_union(
self,
model: TSDBModel,
diff --git a/src/sentry/tsdb/redissnuba.py b/src/sentry/tsdb/redissnuba.py
index ecef8fc7d7fcc..266e612fbec13 100644
--- a/src/sentry/tsdb/redissnuba.py
+++ b/src/sentry/tsdb/redissnuba.py
@@ -30,6 +30,7 @@ def dont_do_this(callargs):
"get_sums": (READ, single_model_argument),
"get_distinct_counts_series": (READ, single_model_argument),
"get_distinct_counts_totals": (READ, single_model_argument),
+ "get_distinct_counts_totals_with_conditions": (READ, single_model_argument),
"get_distinct_counts_union": (READ, single_model_argument),
"get_most_frequent": (READ, single_model_argument),
"get_most_frequent_series": (READ, single_model_argument),
diff --git a/src/sentry/tsdb/snuba.py b/src/sentry/tsdb/snuba.py
index 29fd5225efc9d..50d58b1e3cff3 100644
--- a/src/sentry/tsdb/snuba.py
+++ b/src/sentry/tsdb/snuba.py
@@ -800,6 +800,38 @@ def get_distinct_counts_totals(
referrer_suffix=referrer_suffix,
)
+ def get_distinct_counts_totals_with_conditions(
+ self,
+ model: TSDBModel,
+ keys: Sequence[int],
+ start: datetime,
+ end: datetime | None = None,
+ rollup: int | None = None,
+ environment_id: int | None = None,
+ use_cache: bool = False,
+ jitter_value: int | None = None,
+ tenant_ids: dict[str, int | str] | None = None,
+ referrer_suffix: str | None = None,
+ conditions: list[dict[str, Any]] | None = None,
+ ) -> dict[int, Any]:
+ """
+ Count distinct items during a time range with conditions.
+ """
+ return self.get_data(
+ model,
+ keys,
+ start,
+ end,
+ rollup,
+ [environment_id] if environment_id is not None else None,
+ aggregation="uniq",
+ use_cache=use_cache,
+ jitter_value=jitter_value,
+ tenant_ids=tenant_ids,
+ referrer_suffix=referrer_suffix,
+ conditions=conditions,
+ )
+
def get_distinct_counts_union(
self, model, keys, start, end=None, rollup=None, environment_id=None, tenant_ids=None
):
diff --git a/src/sentry/types/region.py b/src/sentry/types/region.py
index 50a8614d1875a..2fb0c7944804b 100644
--- a/src/sentry/types/region.py
+++ b/src/sentry/types/region.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from collections.abc import Collection, Container, Iterable
+from collections.abc import Collection, Iterable
from enum import Enum
from typing import Any
from urllib.parse import urljoin
@@ -321,7 +321,7 @@ def _find_orgs_for_user(user_id: int) -> set[int]:
@control_silo_function
-def find_regions_for_orgs(org_ids: Container[int]) -> set[str]:
+def find_regions_for_orgs(org_ids: Iterable[int]) -> set[str]:
from sentry.models.organizationmapping import OrganizationMapping
if SiloMode.get_current_mode() == SiloMode.MONOLITH:
diff --git a/src/sentry/uptime/consumers/results_consumer.py b/src/sentry/uptime/consumers/results_consumer.py
index ea3ace1031a9d..7af6259ddacd8 100644
--- a/src/sentry/uptime/consumers/results_consumer.py
+++ b/src/sentry/uptime/consumers/results_consumer.py
@@ -69,7 +69,6 @@ def build_active_consecutive_status_key(
class UptimeResultProcessor(ResultProcessor[CheckResult, UptimeSubscription]):
subscription_model = UptimeSubscription
- topic_for_codec = Topic.UPTIME_RESULTS
def get_subscription_id(self, result: CheckResult) -> str:
return result["subscription_id"]
@@ -333,3 +332,4 @@ def has_reached_status_threshold(
class UptimeResultsStrategyFactory(ResultsStrategyFactory[CheckResult, UptimeSubscription]):
result_processor_cls = UptimeResultProcessor
+ topic_for_codec = Topic.UPTIME_RESULTS
diff --git a/src/sentry/uptime/detectors/tasks.py b/src/sentry/uptime/detectors/tasks.py
index 279feec3987fa..1e37a61d45799 100644
--- a/src/sentry/uptime/detectors/tasks.py
+++ b/src/sentry/uptime/detectors/tasks.py
@@ -41,6 +41,8 @@
URL_MIN_PERCENT = 0.05
# Default value for how often we should run these subscriptions when onboarding them
ONBOARDING_SUBSCRIPTION_INTERVAL_SECONDS = int(timedelta(minutes=60).total_seconds())
+# Default timeout for auto-detected uptime monitors
+ONBOARDING_SUBSCRIPTION_TIMEOUT_MS = 10_000
logger = logging.getLogger("sentry.uptime-url-autodetection")
@@ -244,8 +246,12 @@ def monitor_url_for_project(project: Project, url: str):
)
get_or_create_project_uptime_subscription(
project,
+ # TODO(epurkhiser): This is where we would put the environment object
+ # from autodetection if we decide to do that.
+ environment=None,
url=url,
interval_seconds=ONBOARDING_SUBSCRIPTION_INTERVAL_SECONDS,
+ timeout_ms=ONBOARDING_SUBSCRIPTION_TIMEOUT_MS,
mode=ProjectUptimeSubscriptionMode.AUTO_DETECTED_ONBOARDING,
)
metrics.incr("uptime.detectors.candidate_url.monitor_created", sample_rate=1.0)
diff --git a/src/sentry/uptime/endpoints/project_uptime_alert_details.py b/src/sentry/uptime/endpoints/project_uptime_alert_details.py
index ec019ac8f6645..65e008add79b1 100644
--- a/src/sentry/uptime/endpoints/project_uptime_alert_details.py
+++ b/src/sentry/uptime/endpoints/project_uptime_alert_details.py
@@ -92,6 +92,7 @@ def put(
instance=uptime_subscription,
context={
"organization": project.organization,
+ "project": project,
"access": request.access,
"request": request,
},
diff --git a/src/sentry/uptime/endpoints/project_uptime_alert_index.py b/src/sentry/uptime/endpoints/project_uptime_alert_index.py
index 76bc4b0d469d3..978a8822fbb1b 100644
--- a/src/sentry/uptime/endpoints/project_uptime_alert_index.py
+++ b/src/sentry/uptime/endpoints/project_uptime_alert_index.py
@@ -2,11 +2,11 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry import features
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
from sentry.api.bases import ProjectEndpoint
+from sentry.api.bases.project import ProjectAlertRulePermission
from sentry.api.serializers import serialize
from sentry.apidocs.constants import (
RESPONSE_BAD_REQUEST,
@@ -27,6 +27,7 @@ class ProjectUptimeAlertIndexEndpoint(ProjectEndpoint):
"POST": ApiPublishStatus.EXPERIMENTAL,
}
owner = ApiOwner.CRONS
+ permission_classes = (ProjectAlertRulePermission,)
@extend_schema(
operation_id="Create an Uptime Monitor",
@@ -44,10 +45,6 @@ def post(self, request: Request, project: Project) -> Response:
"""
Create a new monitor.
"""
- if not features.has(
- "organizations:uptime-api-create-update", project.organization, actor=request.user
- ):
- return Response(status=404)
validator = UptimeMonitorValidator(
data=request.data,
context={
diff --git a/src/sentry/uptime/endpoints/serializers.py b/src/sentry/uptime/endpoints/serializers.py
index 6d1e4e66d9232..96bfc573f411b 100644
--- a/src/sentry/uptime/endpoints/serializers.py
+++ b/src/sentry/uptime/endpoints/serializers.py
@@ -12,6 +12,7 @@
class ProjectUptimeSubscriptionSerializerResponse(TypedDict):
id: str
projectSlug: str
+ environment: str | None
name: str
status: int
mode: int
@@ -32,7 +33,7 @@ def __init__(self, expand=None):
def get_attrs(
self, item_list: Sequence[ProjectUptimeSubscription], user: Any, **kwargs: Any
) -> MutableMapping[Any, Any]:
- prefetch_related_objects(item_list, "uptime_subscription", "project")
+ prefetch_related_objects(item_list, "uptime_subscription", "project", "environment")
owners = list(filter(None, [item.owner for item in item_list]))
owners_serialized = serialize(
Actor.resolve_many(owners, filter_none=False), user, ActorSerializer()
@@ -58,6 +59,7 @@ def serialize(
return {
"id": str(obj.id),
"projectSlug": obj.project.slug,
+ "environment": obj.environment.name if obj.environment else None,
"name": obj.name or f"Uptime Monitoring for {obj.uptime_subscription.url}",
"status": obj.uptime_status,
"mode": obj.mode,
diff --git a/src/sentry/uptime/endpoints/validators.py b/src/sentry/uptime/endpoints/validators.py
index 41a287b3f3fe8..9a3634baa3e50 100644
--- a/src/sentry/uptime/endpoints/validators.py
+++ b/src/sentry/uptime/endpoints/validators.py
@@ -10,6 +10,7 @@
from sentry.api.fields import ActorField
from sentry.api.serializers.rest_framework import CamelSnakeSerializer
from sentry.auth.superuser import is_active_superuser
+from sentry.models.environment import Environment
from sentry.uptime.detectors.url_extraction import extract_domain_parts
from sentry.uptime.models import ProjectUptimeSubscription, ProjectUptimeSubscriptionMode
from sentry.uptime.subscriptions.subscriptions import (
@@ -35,6 +36,16 @@
SUPPORTED_HTTP_METHODS = ["GET", "POST", "HEAD", "PUT", "DELETE", "PATCH", "OPTIONS"]
MAX_REQUEST_SIZE_BYTES = 1000
+# This matches the jsonschema for the check config
+VALID_INTERVALS = [
+ timedelta(minutes=1),
+ timedelta(minutes=5),
+ timedelta(minutes=10),
+ timedelta(minutes=20),
+ timedelta(minutes=30),
+ timedelta(minutes=60),
+]
+
HEADERS_LIST_SCHEMA = {
"type": "array",
"items": {
@@ -47,13 +58,17 @@
}
-def compute_http_request_size(method: str, url: str, headers: Sequence[tuple[str, str]], body: str):
+def compute_http_request_size(
+ method: str, url: str, headers: Sequence[tuple[str, str]], body: str | None
+):
request_line_size = len(f"{method} {url} HTTP/1.1\r\n")
headers_size = sum(
len(key) + len(value.encode("utf-8")) + len("\r\n") for key, value in headers
)
- body_size = len(body.encode("utf-8"))
- return request_line_size + headers_size + len("\r\n") + body_size
+ body_size = 0
+ if body is not None:
+ body_size = len(body.encode("utf-8")) + len("\r\n")
+ return request_line_size + headers_size + body_size
@extend_schema_serializer()
@@ -64,30 +79,41 @@ class UptimeMonitorValidator(CamelSnakeSerializer):
help_text="Name of the uptime monitor",
)
owner = ActorField(
- required=True,
+ required=False,
allow_null=True,
help_text="The ID of the team or user that owns the uptime monitor. (eg. user:51 or team:6)",
)
+ environment = serializers.CharField(
+ max_length=64,
+ required=False,
+ allow_null=True,
+ help_text="Name of the environment",
+ )
url = URLField(required=True, max_length=255)
- interval_seconds = serializers.IntegerField(
- required=True, min_value=60, max_value=int(timedelta(days=1).total_seconds())
+ interval_seconds = serializers.ChoiceField(
+ required=True, choices=[int(i.total_seconds()) for i in VALID_INTERVALS]
+ )
+ timeout_ms = serializers.IntegerField(
+ required=True,
+ min_value=1000,
+ max_value=30_000,
)
mode = serializers.IntegerField(required=False)
method = serializers.ChoiceField(
required=False, choices=list(zip(SUPPORTED_HTTP_METHODS, SUPPORTED_HTTP_METHODS))
)
headers = serializers.JSONField(required=False)
- body = serializers.CharField(required=False)
+ body = serializers.CharField(required=False, allow_null=True)
def validate(self, attrs):
headers = []
method = "GET"
- body = ""
+ body = None
url = ""
if self.instance:
headers = self.instance.uptime_subscription.headers
method = self.instance.uptime_subscription.method
- body = self.instance.uptime_subscription.body or ""
+ body = self.instance.uptime_subscription.body
url = self.instance.uptime_subscription.url
request_size = compute_http_request_size(
@@ -134,17 +160,27 @@ def validate_mode(self, mode):
)
def create(self, validated_data):
+ if validated_data.get("environment") is not None:
+ environment = Environment.get_or_create(
+ project=self.context["project"],
+ name=validated_data["environment"],
+ )
+ else:
+ environment = None
+
method_headers_body = {
k: v for k, v in validated_data.items() if k in {"method", "headers", "body"}
}
try:
uptime_monitor, created = get_or_create_project_uptime_subscription(
project=self.context["project"],
+ environment=environment,
url=validated_data["url"],
interval_seconds=validated_data["interval_seconds"],
+ timeout_ms=validated_data["timeout_ms"],
name=validated_data["name"],
mode=validated_data.get("mode", ProjectUptimeSubscriptionMode.MANUAL),
- owner=validated_data["owner"],
+ owner=validated_data.get("owner"),
**method_headers_body,
)
except MaxManualUptimeSubscriptionsReached:
@@ -171,19 +207,32 @@ def update(self, instance: ProjectUptimeSubscription, data):
if "interval_seconds" in data
else instance.uptime_subscription.interval_seconds
)
+ timeout_ms = (
+ data["timeout_ms"] if "timeout_ms" in data else instance.uptime_subscription.timeout_ms
+ )
method = data["method"] if "method" in data else instance.uptime_subscription.method
headers = data["headers"] if "headers" in data else instance.uptime_subscription.headers
body = data["body"] if "body" in data else instance.uptime_subscription.body
name = data["name"] if "name" in data else instance.name
owner = data["owner"] if "owner" in data else instance.owner
+ if "environment" in data:
+ environment = Environment.get_or_create(
+ project=self.context["project"],
+ name=data["environment"],
+ )
+ else:
+ environment = instance.environment
+
if "mode" in data:
raise serializers.ValidationError("Mode can only be specified on creation (for now)")
update_project_uptime_subscription(
uptime_monitor=instance,
+ environment=environment,
url=url,
interval_seconds=interval_seconds,
+ timeout_ms=timeout_ms,
method=method,
headers=headers,
body=body,
diff --git a/src/sentry/uptime/issue_platform.py b/src/sentry/uptime/issue_platform.py
index a74e652812ee6..9b72df0aa80da 100644
--- a/src/sentry/uptime/issue_platform.py
+++ b/src/sentry/uptime/issue_platform.py
@@ -80,6 +80,7 @@ def build_occurrence_from_result(
culprit="", # TODO: The url?
detection_time=datetime.now(timezone.utc),
level="error",
+ assignee=project_subscription.owner,
)
@@ -88,8 +89,13 @@ def build_event_data_for_occurrence(
project_subscription: ProjectUptimeSubscription,
occurrence: IssueOccurrence,
):
+ # Default environment when it hasn't been configured
+ env = "prod"
+ if project_subscription.environment:
+ env = project_subscription.environment.name
+
return {
- "environment": "prod", # TODO: Include the environment here when we have it
+ "environment": env,
"event_id": occurrence.event_id,
"fingerprint": occurrence.fingerprint,
"platform": "other",
diff --git a/src/sentry/uptime/migrations/0014_add_uptime_enviromnet.py b/src/sentry/uptime/migrations/0014_add_uptime_enviromnet.py
new file mode 100644
index 0000000000000..a4d8ba9f26f78
--- /dev/null
+++ b/src/sentry/uptime/migrations/0014_add_uptime_enviromnet.py
@@ -0,0 +1,41 @@
+# Generated by Django 5.1.1 on 2024-09-30 16:23
+
+import django.db.models.deletion
+from django.db import migrations
+
+import sentry.db.models.fields.foreignkey
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0768_fix_old_group_first_seen_dates"),
+ ("uptime", "0013_uptime_subscription_new_unique"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="projectuptimesubscription",
+ name="environment",
+ field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ db_constraint=False,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="sentry.environment",
+ ),
+ ),
+ ]
diff --git a/src/sentry/uptime/migrations/0015_headers_deafult_empty_list.py b/src/sentry/uptime/migrations/0015_headers_deafult_empty_list.py
new file mode 100644
index 0000000000000..1226be4d8b055
--- /dev/null
+++ b/src/sentry/uptime/migrations/0015_headers_deafult_empty_list.py
@@ -0,0 +1,34 @@
+# Generated by Django 5.1.1 on 2024-10-02 19:09
+
+from django.db import migrations
+
+import sentry.db.models.fields.jsonfield
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("uptime", "0014_add_uptime_enviromnet"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="uptimesubscription",
+ name="headers",
+ field=sentry.db.models.fields.jsonfield.JSONField(db_default=[], default=dict),
+ ),
+ ]
diff --git a/src/sentry/uptime/migrations/0016_translate_uptime_object_headers_to_lists.py b/src/sentry/uptime/migrations/0016_translate_uptime_object_headers_to_lists.py
new file mode 100644
index 0000000000000..278170b716b14
--- /dev/null
+++ b/src/sentry/uptime/migrations/0016_translate_uptime_object_headers_to_lists.py
@@ -0,0 +1,45 @@
+# Generated by Django 5.1.1 on 2024-10-02 16:06
+from django.db import migrations
+from django.db.backends.base.schema import BaseDatabaseSchemaEditor
+from django.db.migrations.state import StateApps
+
+from sentry.new_migrations.migrations import CheckedMigration
+from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
+
+
+def migrate_header_objects_to_lists(
+ apps: StateApps,
+ schema_editor: BaseDatabaseSchemaEditor,
+) -> None:
+ UptimeSubscription = apps.get_model("uptime", "UptimeSubscription")
+ for sub in RangeQuerySetWrapperWithProgressBar(UptimeSubscription.objects.filter(headers={})):
+ sub.headers = []
+ sub.save()
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = True
+
+ dependencies = [
+ ("uptime", "0015_headers_deafult_empty_list"),
+ ]
+
+ operations = [
+ migrations.RunPython(
+ migrate_header_objects_to_lists,
+ migrations.RunPython.noop,
+ hints={"tables": ["uptime_uptimesubscription"]},
+ ),
+ ]
diff --git a/src/sentry/uptime/migrations/0017_unique_on_timeout.py b/src/sentry/uptime/migrations/0017_unique_on_timeout.py
new file mode 100644
index 0000000000000..cce1a75157d03
--- /dev/null
+++ b/src/sentry/uptime/migrations/0017_unique_on_timeout.py
@@ -0,0 +1,48 @@
+# Generated by Django 5.1.1 on 2024-10-08 19:37
+
+import django.db.models.functions.comparison
+import django.db.models.functions.text
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("uptime", "0016_translate_uptime_object_headers_to_lists"),
+ ]
+
+ operations = [
+ migrations.RemoveConstraint(
+ model_name="uptimesubscription",
+ name="uptime_uptimesubscription_unique_subscription_check",
+ ),
+ migrations.AddConstraint(
+ model_name="uptimesubscription",
+ constraint=models.UniqueConstraint(
+ models.F("url"),
+ models.F("interval_seconds"),
+ models.F("timeout_ms"),
+ models.F("method"),
+ django.db.models.functions.text.MD5("headers"),
+ django.db.models.functions.comparison.Coalesce(
+ django.db.models.functions.text.MD5("body"), models.Value("")
+ ),
+ name="uptime_uptimesubscription_unique_subscription_check",
+ ),
+ ),
+ ]
diff --git a/src/sentry/uptime/models.py b/src/sentry/uptime/models.py
index 20a34e666c370..1ed3dc2aa3867 100644
--- a/src/sentry/uptime/models.py
+++ b/src/sentry/uptime/models.py
@@ -54,7 +54,7 @@ class UptimeSubscription(BaseRemoteSubscription, DefaultFieldsModelExisting):
# HTTP method to perform the check with
method = models.CharField(max_length=20, db_default="GET")
# HTTP headers to send when performing the check
- headers = JSONField(json_dumps=headers_json_encoder, db_default={})
+ headers = JSONField(json_dumps=headers_json_encoder, db_default=[])
# HTTP body to send when performing the check
body = models.TextField(null=True)
@@ -71,6 +71,7 @@ class Meta:
models.UniqueConstraint(
"url",
"interval_seconds",
+ "timeout_ms",
"method",
MD5("headers"),
Coalesce(MD5("body"), Value("")),
@@ -100,6 +101,9 @@ class ProjectUptimeSubscription(DefaultFieldsModelExisting):
__relocation_scope__ = RelocationScope.Excluded
project = FlexibleForeignKey("sentry.Project")
+ environment = FlexibleForeignKey(
+ "sentry.Environment", db_index=True, db_constraint=False, null=True
+ )
uptime_subscription = FlexibleForeignKey("uptime.UptimeSubscription", on_delete=models.PROTECT)
mode = models.SmallIntegerField(default=ProjectUptimeSubscriptionMode.MANUAL.value)
uptime_status = models.PositiveSmallIntegerField(default=UptimeStatus.OK.value)
diff --git a/src/sentry/uptime/subscriptions/subscriptions.py b/src/sentry/uptime/subscriptions/subscriptions.py
index 5412a4b311e0d..a8d580d1312e2 100644
--- a/src/sentry/uptime/subscriptions/subscriptions.py
+++ b/src/sentry/uptime/subscriptions/subscriptions.py
@@ -7,6 +7,7 @@
from django.db.models.expressions import Value
from django.db.models.functions import MD5, Coalesce
+from sentry.models.environment import Environment
from sentry.models.project import Project
from sentry.types.actor import Actor
from sentry.uptime.detectors.url_extraction import extract_domain_parts
@@ -27,8 +28,6 @@
UPTIME_SUBSCRIPTION_TYPE = "uptime_monitor"
MAX_AUTO_SUBSCRIPTIONS_PER_ORG = 1
MAX_MANUAL_SUBSCRIPTIONS_PER_ORG = 100
-# Default timeout for all subscriptions
-DEFAULT_SUBSCRIPTION_TIMEOUT_MS = 10000
class MaxManualUptimeSubscriptionsReached(ValueError):
@@ -38,6 +37,7 @@ class MaxManualUptimeSubscriptionsReached(ValueError):
def retrieve_uptime_subscription(
url: str,
interval_seconds: int,
+ timeout_ms: int,
method: str,
headers: Sequence[tuple[str, str]],
body: str | None,
@@ -45,7 +45,10 @@ def retrieve_uptime_subscription(
try:
subscription = (
UptimeSubscription.objects.filter(
- url=url, interval_seconds=interval_seconds, method=method
+ url=url,
+ interval_seconds=interval_seconds,
+ timeout_ms=timeout_ms,
+ method=method,
)
.annotate(
headers_md5=MD5("headers", output_field=TextField()),
@@ -65,7 +68,7 @@ def retrieve_uptime_subscription(
def get_or_create_uptime_subscription(
url: str,
interval_seconds: int,
- timeout_ms: int = DEFAULT_SUBSCRIPTION_TIMEOUT_MS,
+ timeout_ms: int,
method: str = "GET",
headers: Sequence[tuple[str, str]] | None = None,
body: str | None = None,
@@ -80,7 +83,9 @@ def get_or_create_uptime_subscription(
# domain.
result = extract_domain_parts(url)
- subscription = retrieve_uptime_subscription(url, interval_seconds, method, headers, body)
+ subscription = retrieve_uptime_subscription(
+ url, interval_seconds, timeout_ms, method, headers, body
+ )
created = False
if subscription is None:
@@ -101,7 +106,7 @@ def get_or_create_uptime_subscription(
except IntegrityError:
# Handle race condition where we tried to retrieve an existing subscription while it was being created
subscription = retrieve_uptime_subscription(
- url, interval_seconds, method, headers, body
+ url, interval_seconds, timeout_ms, method, headers, body
)
if subscription is None:
@@ -142,9 +147,10 @@ def delete_uptime_subscription(uptime_subscription: UptimeSubscription):
def get_or_create_project_uptime_subscription(
project: Project,
+ environment: Environment | None,
url: str,
interval_seconds: int,
- timeout_ms: int = DEFAULT_SUBSCRIPTION_TIMEOUT_MS,
+ timeout_ms: int,
method: str = "GET",
headers: Sequence[tuple[str, str]] | None = None,
body: str | None = None,
@@ -174,6 +180,7 @@ def get_or_create_project_uptime_subscription(
owner_team_id = owner.id
return ProjectUptimeSubscription.objects.get_or_create(
project=project,
+ environment=environment,
uptime_subscription=uptime_subscription,
mode=mode.value,
name=name,
@@ -184,8 +191,10 @@ def get_or_create_project_uptime_subscription(
def update_project_uptime_subscription(
uptime_monitor: ProjectUptimeSubscription,
+ environment: Environment | None,
url: str,
interval_seconds: int,
+ timeout_ms: int,
method: str,
headers: Sequence[tuple[str, str]],
body: str | None,
@@ -197,7 +206,7 @@ def update_project_uptime_subscription(
"""
cur_uptime_subscription = uptime_monitor.uptime_subscription
new_uptime_subscription = get_or_create_uptime_subscription(
- url, interval_seconds, cur_uptime_subscription.timeout_ms, method, headers, body
+ url, interval_seconds, timeout_ms, method, headers, body
)
updated_subscription = cur_uptime_subscription.id != new_uptime_subscription.id
@@ -217,6 +226,7 @@ def update_project_uptime_subscription(
owner_user_id = None
uptime_monitor.update(
+ environment=environment,
uptime_subscription=new_uptime_subscription,
name=name,
mode=mode,
diff --git a/src/sentry/users/api/endpoints/user_authenticator_details.py b/src/sentry/users/api/endpoints/user_authenticator_details.py
index f0dd463623587..731ce1a811802 100644
--- a/src/sentry/users/api/endpoints/user_authenticator_details.py
+++ b/src/sentry/users/api/endpoints/user_authenticator_details.py
@@ -131,7 +131,7 @@ def put(
:auth required:
"""
- # TODO temporary solution for both renaming and regenerating recovery code.
+ # TODO: temporary solution for both renaming and regenerating recovery code.
# Need to find new home for regenerating recovery codes as it doesn't really do what put is supposed to do
try:
authenticator = Authenticator.objects.get(user=user, id=auth_id)
diff --git a/src/sentry/users/api/serializers/user.py b/src/sentry/users/api/serializers/user.py
index 87f2b7a54bdbd..4d987a9bc19c6 100644
--- a/src/sentry/users/api/serializers/user.py
+++ b/src/sentry/users/api/serializers/user.py
@@ -61,7 +61,7 @@ class _Identity(TypedDict):
class _UserOptions(TypedDict):
theme: str # TODO: enum/literal for theme options
language: str
- stacktraceOrder: int # TODO enum/literal
+ stacktraceOrder: int # TODO: enum/literal
defaultIssueEvent: str
timezone: str
clock24Hours: bool
diff --git a/src/sentry/utils/celery.py b/src/sentry/utils/celery.py
index 05e27fec298bf..f576a099bcb8a 100644
--- a/src/sentry/utils/celery.py
+++ b/src/sentry/utils/celery.py
@@ -1,9 +1,74 @@
+from collections.abc import Mapping, MutableSequence, Sequence
from random import randint
from typing import Any
from celery.schedules import crontab
+from kombu import Queue
+
+from sentry.conf.types.celery import SplitQueueSize, SplitQueueTaskRoute
def crontab_with_minute_jitter(*args: Any, **kwargs: Any) -> crontab:
kwargs["minute"] = randint(0, 59)
return crontab(*args, **kwargs)
+
+
+def build_queue_names(base_name: str, quantity: int) -> Sequence[str]:
+ ret = []
+ for index in range(quantity):
+ name = f"{base_name}_{index + 1}"
+ ret.append(name)
+ return ret
+
+
+def _build_queues(base: str, quantity: int) -> Sequence[Queue]:
+ return [Queue(name=name, routing_key=name) for name in build_queue_names(base, quantity)]
+
+
+def make_split_task_queues(config: Mapping[str, SplitQueueTaskRoute]) -> list[Queue]:
+ """
+ Generates the split queues definitions from the mapping between
+ a task name and a config expressed as `SplitQueueTaskRoute`.
+ """
+ ret: list[Queue] = []
+ for conf in config.values():
+ if "queues_config" in conf:
+ ret.extend(_build_queues(conf["default_queue"], conf["queues_config"]["total"]))
+ return ret
+
+
+def make_split_queues(config: Mapping[str, SplitQueueSize]) -> Sequence[Queue]:
+ """
+ Generates the split queue definitions from the mapping between
+ base queue and split queue config.
+ """
+ ret: MutableSequence[Queue] = []
+ for base_name, conf in config.items():
+ ret.extend(_build_queues(base_name, conf["total"]))
+
+ return ret
+
+
+def safe_append(queues: MutableSequence[Queue], queue: Queue) -> None:
+ """
+ We define queues as lists in the configuration and we allow override
+ of the config per environment.
+ Unfortunately if you add twice a queue with the same name to the celery
+ config. Celery just creates the queue twice. This can be an undesired behavior
+ depending on the Celery backend. So this method allows to add queues to
+ a list without duplications.
+ """
+ existing_queue_names = {q.name for q in queues}
+ if queue.name not in existing_queue_names:
+ queues.append(queue)
+
+
+def safe_extend(queues: MutableSequence[Queue], to_add: Sequence[Queue]) -> None:
+ """
+ Like `safe_append` but it works like extend adding multiple queues
+ to the config.
+ """
+ existing_queue_names = {q.name for q in queues}
+ for q in to_add:
+ if q.name not in existing_queue_names:
+ queues.append(q)
diff --git a/src/sentry/utils/eventuser.py b/src/sentry/utils/eventuser.py
index a8580f98da68f..99d342e0597ea 100644
--- a/src/sentry/utils/eventuser.py
+++ b/src/sentry/utils/eventuser.py
@@ -7,7 +7,7 @@
from datetime import datetime
from functools import cached_property
from ipaddress import IPv4Address, IPv6Address, ip_address
-from typing import Any
+from typing import Any, TypedDict
from django.db.models import QuerySet
from snuba_sdk import (
@@ -87,6 +87,15 @@ def get_ip_address_conditions(ip_addresses: list[str]) -> list[Condition]:
return conditions
+class SerializedEventUser(TypedDict):
+ id: str
+ username: str | None
+ email: str | None
+ name: str | None
+ ipAddress: str | None
+ avatarUrl: str | None
+
+
@dataclass
class EventUser:
project_id: int | None
@@ -334,7 +343,7 @@ def iter_attributes(self):
for key in KEYWORD_MAP.keys():
yield key, getattr(self, key)
- def serialize(self):
+ def serialize(self) -> SerializedEventUser:
return {
"id": str(self.id) if self.id else str(self.user_ident),
"username": self.username,
diff --git a/src/sentry/utils/flag.py b/src/sentry/utils/flag.py
index a19a4829fffdf..7a878a0c7f3c5 100644
--- a/src/sentry/utils/flag.py
+++ b/src/sentry/utils/flag.py
@@ -4,7 +4,7 @@
flag_manager = ContextVar("flag_manager") # type: ignore[var-annotated]
-def initialize_flag_manager(capacity: int = 10) -> None:
+def initialize_flag_manager(capacity: int = 100) -> None:
flag_manager.set(FlagManager(capacity=capacity))
diff --git a/src/sentry/utils/github.py b/src/sentry/utils/github.py
new file mode 100644
index 0000000000000..9c8eab15f1171
--- /dev/null
+++ b/src/sentry/utils/github.py
@@ -0,0 +1,45 @@
+import base64
+import binascii
+from typing import Any
+
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import ec
+from pydantic import BaseModel
+
+from sentry import options
+
+from .github_client import GitHubClient
+
+
+class GitHubKeysPayload(BaseModel):
+ public_keys: list[dict[str, Any]]
+
+
+def verify_signature(payload: str, signature: str, key_id: str, subpath: str) -> None:
+ if not payload or not signature or not key_id:
+ raise ValueError("Invalid payload, signature, or key_id")
+
+ client_id = options.get("github-login.client-id")
+ client_secret = options.get("github-login.client-secret")
+ client = GitHubClient(client_id=client_id, client_secret=client_secret)
+ response = client.get(f"/meta/public_keys/{subpath}")
+ keys = GitHubKeysPayload.parse_obj(response)
+
+ public_key = next((k for k in keys.public_keys if k["key_identifier"] == key_id), None)
+ if not public_key:
+ raise ValueError("No public key found matching key identifier")
+
+ key = serialization.load_pem_public_key(public_key["key"].encode())
+
+ if not isinstance(key, ec.EllipticCurvePublicKey):
+ raise ValueError("Invalid public key type")
+
+ try:
+ # Decode the base64 signature to bytes
+ signature_bytes = base64.b64decode(signature)
+ key.verify(signature_bytes, payload.encode(), ec.ECDSA(hashes.SHA256()))
+ except InvalidSignature:
+ raise ValueError("Signature does not match payload")
+ except binascii.Error:
+ raise ValueError("Invalid signature encoding")
diff --git a/src/sentry/utils/github_client.py b/src/sentry/utils/github_client.py
new file mode 100644
index 0000000000000..37a079c545338
--- /dev/null
+++ b/src/sentry/utils/github_client.py
@@ -0,0 +1,80 @@
+from requests.exceptions import HTTPError
+
+from sentry.http import build_session
+from sentry.utils import json
+
+
+class ApiError(Exception):
+ code = None
+ json = None
+ xml = None
+
+ def __init__(self, text, code=None):
+ if code is not None:
+ self.code = code
+ self.text = text
+ # TODO(dcramer): pull in XML support from Jira
+ if text:
+ try:
+ self.json = json.loads(text)
+ except (json.JSONDecodeError, ValueError):
+ self.json = None
+ else:
+ self.json = None
+ super().__init__(text[:128])
+
+ @classmethod
+ def from_response(cls, response):
+ if response.status_code == 401:
+ return ApiUnauthorized(response.text)
+ return cls(response.text, response.status_code)
+
+
+class ApiUnauthorized(ApiError):
+ code = 401
+
+
+class GitHubClient:
+ ApiError = ApiError
+
+ url = "https://api.github.com"
+
+ def __init__(self, url=None, token=None, client_id=None, client_secret=None):
+ if url is not None:
+ self.url = url.rstrip("/")
+ self.token = token
+ self.client_id = client_id
+ self.client_secret = client_secret
+
+ def _request(self, method, path, headers=None, data=None, params=None, auth=None):
+ with build_session() as session:
+ try:
+ resp = getattr(session, method.lower())(
+ url=f"{self.url}{path}",
+ headers=headers,
+ json=data,
+ params=params,
+ allow_redirects=True,
+ auth=auth,
+ )
+ resp.raise_for_status()
+ except HTTPError as e:
+ raise ApiError.from_response(e.response)
+ return resp.json()
+
+ def request(self, method, path, data=None, params=None, auth=None):
+ headers = {"Accept": "application/vnd.github.valkyrie-preview+json"}
+
+ if self.token:
+ headers.setdefault("Authorization", f"token {self.token}")
+
+ elif auth is None and self.client_id and self.client_secret:
+ auth = (self.client_id, self.client_secret)
+
+ return self._request(method, path, headers=headers, data=data, params=params, auth=auth)
+
+ def get(self, *args, **kwargs):
+ return self.request("GET", *args, **kwargs)
+
+ def post(self, *args, **kwargs):
+ return self.request("POST", *args, **kwargs)
diff --git a/src/sentry/utils/kvstore/bigtable.py b/src/sentry/utils/kvstore/bigtable.py
index 824f191cd0f1b..bc7f38c3bff6f 100644
--- a/src/sentry/utils/kvstore/bigtable.py
+++ b/src/sentry/utils/kvstore/bigtable.py
@@ -6,6 +6,7 @@
from threading import Lock
from typing import Any
+import sentry_sdk
from django.utils import timezone
from google.api_core import exceptions, retry
from google.cloud import bigtable
@@ -114,7 +115,8 @@ def _get_table(self, admin: bool = False) -> Table:
return table
def get(self, key: str) -> bytes | None:
- row = self._get_table().read_row(key)
+ with sentry_sdk.start_span(op="bigtable.get"):
+ row = self._get_table().read_row(key)
if row is None:
return None
diff --git a/src/sentry/utils/performance_issues/detectors/consecutive_db_detector.py b/src/sentry/utils/performance_issues/detectors/consecutive_db_detector.py
index 907b2cbf112ce..4307e668b631f 100644
--- a/src/sentry/utils/performance_issues/detectors/consecutive_db_detector.py
+++ b/src/sentry/utils/performance_issues/detectors/consecutive_db_detector.py
@@ -125,7 +125,7 @@ def _store_performance_problem(self) -> None:
self.stored_problems[fingerprint] = PerformanceProblem(
fingerprint,
"db",
- desc=query, # TODO - figure out which query to use for description
+ desc=query, # TODO: figure out which query to use for description
type=PerformanceConsecutiveDBQueriesGroupType,
cause_span_ids=cause_span_ids,
parent_span_ids=None,
diff --git a/src/sentry/utils/performance_issues/detectors/http_overhead_detector.py b/src/sentry/utils/performance_issues/detectors/http_overhead_detector.py
index 7e4eb0f7f3b62..697a1e1a38d3f 100644
--- a/src/sentry/utils/performance_issues/detectors/http_overhead_detector.py
+++ b/src/sentry/utils/performance_issues/detectors/http_overhead_detector.py
@@ -65,11 +65,13 @@ def visit_span(self, span: Span) -> None:
url = span_data.get("url", "")
span_start = span.get("start_timestamp", 0) * 1000
- request_start = span_data.get("http.request.request_start", 0) * 1000
+ request_start = span_data.get("http.request.request_start", 0)
if not url or not span_start or not request_start:
return
+ request_start *= 1000
+
if url.startswith("/"):
location = "/"
else:
diff --git a/src/sentry/utils/performance_issues/detectors/io_main_thread_detector.py b/src/sentry/utils/performance_issues/detectors/io_main_thread_detector.py
index 5755e56109f94..b4919bf0a144e 100644
--- a/src/sentry/utils/performance_issues/detectors/io_main_thread_detector.py
+++ b/src/sentry/utils/performance_issues/detectors/io_main_thread_detector.py
@@ -193,7 +193,8 @@ def _is_io_on_main_thread(self, span: Span) -> bool:
data = span.get("data", {})
if data is None:
return False
- file_path = data.get("file.path", "").lower()
+ file_path = (data.get("file.path") or "").lower()
+
if any(glob_match(file_path, ignored_pattern) for ignored_pattern in self.IGNORED_LIST):
return False
# doing is True since the value can be any type
diff --git a/src/sentry/utils/performance_issues/detectors/large_payload_detector.py b/src/sentry/utils/performance_issues/detectors/large_payload_detector.py
index 755e4c319b9bd..a4c2dd37cbf31 100644
--- a/src/sentry/utils/performance_issues/detectors/large_payload_detector.py
+++ b/src/sentry/utils/performance_issues/detectors/large_payload_detector.py
@@ -54,6 +54,10 @@ def visit_span(self, span: Span) -> None:
return
payload_size_threshold = self.settings.get("payload_size_threshold")
+
+ if isinstance(encoded_body_size, str):
+ encoded_body_size = int(encoded_body_size)
+
if encoded_body_size > payload_size_threshold:
self._store_performance_problem(span)
diff --git a/src/sentry/utils/performance_issues/performance_detection.py b/src/sentry/utils/performance_issues/performance_detection.py
index db1f02b915b78..dd7571b5e98e6 100644
--- a/src/sentry/utils/performance_issues/performance_detection.py
+++ b/src/sentry/utils/performance_issues/performance_detection.py
@@ -124,9 +124,7 @@ def detect_performance_problems(
sentry_sdk.set_tag("_did_analyze_performance_issue", "true")
with (
metrics.timer("performance.detect_performance_issue", sample_rate=0.01),
- sentry_sdk.start_span(
- op="py.detect_performance_issue", description="none"
- ) as sdk_span,
+ sentry_sdk.start_span(op="py.detect_performance_issue", name="none") as sdk_span,
):
return _detect_performance_problems(
data, sdk_span, project, is_standalone_spans=is_standalone_spans
@@ -338,10 +336,10 @@ def _detect_performance_problems(
) -> list[PerformanceProblem]:
event_id = data.get("event_id", None)
- with sentry_sdk.start_span(op="function", description="get_detection_settings"):
+ with sentry_sdk.start_span(op="function", name="get_detection_settings"):
detection_settings = get_detection_settings(project.id)
- with sentry_sdk.start_span(op="initialize", description="PerformanceDetector"):
+ with sentry_sdk.start_span(op="initialize", name="PerformanceDetector"):
detectors: list[PerformanceDetector] = [
detector_class(detection_settings, data)
for detector_class in DETECTOR_CLASSES
@@ -350,11 +348,11 @@ def _detect_performance_problems(
for detector in detectors:
with sentry_sdk.start_span(
- op="function", description=f"run_detector_on_data.{detector.type.value}"
+ op="function", name=f"run_detector_on_data.{detector.type.value}"
):
run_detector_on_data(detector, data)
- with sentry_sdk.start_span(op="function", description="report_metrics_for_detectors"):
+ with sentry_sdk.start_span(op="function", name="report_metrics_for_detectors"):
# Metrics reporting only for detection, not created issues.
report_metrics_for_detectors(
data,
@@ -368,7 +366,7 @@ def _detect_performance_problems(
organization = project.organization
problems: list[PerformanceProblem] = []
- with sentry_sdk.start_span(op="performance_detection", description="is_creation_allowed"):
+ with sentry_sdk.start_span(op="performance_detection", name="is_creation_allowed"):
for detector in detectors:
if all(
[
diff --git a/src/sentry/utils/performance_issues/performance_problem.py b/src/sentry/utils/performance_issues/performance_problem.py
index ecd5b4a276b65..11c978539f346 100644
--- a/src/sentry/utils/performance_issues/performance_problem.py
+++ b/src/sentry/utils/performance_issues/performance_problem.py
@@ -18,7 +18,7 @@ class PerformanceProblem:
# The actual bad spans
offender_span_ids: Sequence[str]
# Evidence to be used for the group
- # TODO make evidence_data and evidence_display required once all detectors have been migrated to platform
+ # TODO: make evidence_data and evidence_display required once all detectors have been migrated to platform
# We can't make it required until we stop loading these from nodestore via EventPerformanceProblem,
# since there's legacy data in there that won't have these fields.
# So until we disable transaction based perf issues we'll need to keep this optional.
diff --git a/src/sentry/utils/prompts.py b/src/sentry/utils/prompts.py
index b1aa15112077a..07241483be6fd 100644
--- a/src/sentry/utils/prompts.py
+++ b/src/sentry/utils/prompts.py
@@ -20,6 +20,7 @@
"data_consent_priority": {"required_fields": ["organization_id"]},
"issue_replay_inline_onboarding": {"required_fields": ["organization_id", "project_id"]},
"issue_feedback_hidden": {"required_fields": ["organization_id", "project_id"]},
+ "issue_views_add_view_banner": {"required_fields": ["organization_id"]},
}
diff --git a/src/sentry/utils/registry.py b/src/sentry/utils/registry.py
new file mode 100644
index 0000000000000..1174cb876ac86
--- /dev/null
+++ b/src/sentry/utils/registry.py
@@ -0,0 +1,35 @@
+from __future__ import annotations
+
+from typing import Generic, TypeVar
+
+
+class AlreadyRegisteredError(ValueError):
+ pass
+
+
+class NoRegistrationExistsError(ValueError):
+ pass
+
+
+T = TypeVar("T")
+
+
+class Registry(Generic[T]):
+ def __init__(self):
+ self.registrations: dict[str, T] = {}
+
+ def register(self, key: str):
+ def inner(item: T) -> T:
+ if key in self.registrations:
+ raise AlreadyRegisteredError(
+ f"A registration already exists for {key}: {self.registrations[key]}"
+ )
+ self.registrations[key] = item
+ return item
+
+ return inner
+
+ def get(self, key: str) -> T:
+ if key not in self.registrations:
+ raise NoRegistrationExistsError(f"No registration exists for {key}")
+ return self.registrations[key]
diff --git a/src/sentry/utils/retries.py b/src/sentry/utils/retries.py
index 2b144568d6167..28d780846dad7 100644
--- a/src/sentry/utils/retries.py
+++ b/src/sentry/utils/retries.py
@@ -1,6 +1,7 @@
import functools
import itertools
import logging
+import math
import random
import time
from abc import ABC, abstractmethod
@@ -66,6 +67,49 @@ def delay(attempt: int) -> float:
return delay
+def sigmoid_delay(offset: int = -5, midpoint: int = 0, step: int = 1) -> Callable[[int], float]:
+ """
+ Returns an S-Curve function.
+
+ A sigmoid is the intersection of these two behaviors:
+ `while(true): retry() # immediate retry`
+ and
+ `while(true): sleep(1); retry() # static-wait then retry`
+
+ The intersection of these two worlds is an exponential function which
+ gradually ramps the program up to (or down to) a stable state (the s-curve).
+ The sharpness of the curse is controlled with step. A step of 0 flattens the
+ curve. A step of infinity turns the curve into a step change (a vertical
+ line).
+
+ The sigmoid is more difficult to intuit than a simple exponential delay but it
+ allows you to cap the maximum amount of time you're willing to wait between
+ retries. The cap is _always_ 1 second regardless of the value of the other
+ arguments. If you want to wait longer than one second multiply the result of
+ the function by something!
+
+ Consider this program:
+ [sigmoid_delay()(i) for i in range(-5, 5)]
+ is equivalent to:
+ [0.006, 0.017, 0.0474, 0.119, 0.268, 0.5, 0.731, 0.880, 0.952, 0.982]
+
+ You get the same results with:
+ [sigmoid_delay()(i) for i in range(10)]
+ except the window has changed:
+ [0.5, 0.731, 0.880, 0.952, 0.982, ...]
+
+ Now you see further along the curve. This explains the utility of the `offset`
+ parameter. The offset allows you to slide along the window. A smaller offset
+ gives you faster retries. A larger offset gives you slower retries. An offset
+ pushed too far past the midpoint reduces this function to a static wait.
+ """
+
+ def delay(attempt: int) -> float:
+ return 1 / (1 + math.exp(-step * ((attempt + offset) - midpoint)))
+
+ return delay
+
+
class ConditionalRetryPolicy(RetryPolicy):
"""
A basic policy that can be used to retry a callable based on the result
diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py
index 9c5cdbe02ebe9..1d0a6c05462ae 100644
--- a/src/sentry/utils/sdk.py
+++ b/src/sentry/utils/sdk.py
@@ -47,13 +47,13 @@
"outcomes_consumer.py",
)
-# Tasks not included here are not sampled
-# If a parent task schedules other tasks you should add it in here or the child
-# tasks will not be sampled
+# Tasks not included here are sampled with `SENTRY_BACKEND_APM_SAMPLING`.
+# If a parent task schedules other tasks, rates propagate to the children.
SAMPLED_TASKS = {
"sentry.tasks.send_ping": settings.SAMPLED_DEFAULT_RATE,
"sentry.tasks.store.process_event": settings.SENTRY_PROCESS_EVENT_APM_SAMPLING,
"sentry.tasks.store.process_event_from_reprocessing": settings.SENTRY_PROCESS_EVENT_APM_SAMPLING,
+ "sentry.tasks.store.save_event": settings.SENTRY_PROCESS_EVENT_APM_SAMPLING,
"sentry.tasks.store.save_event_transaction": settings.SENTRY_PROCESS_EVENT_APM_SAMPLING,
"sentry.tasks.process_suspect_commits": settings.SENTRY_SUSPECT_COMMITS_APM_SAMPLING,
"sentry.tasks.process_commit_context": settings.SENTRY_SUSPECT_COMMITS_APM_SAMPLING,
@@ -65,22 +65,27 @@
"sentry.tasks.relay.invalidate_project_config": settings.SENTRY_RELAY_TASK_APM_SAMPLING,
"sentry.ingest.transaction_clusterer.tasks.spawn_clusterers": settings.SENTRY_RELAY_TASK_APM_SAMPLING,
"sentry.ingest.transaction_clusterer.tasks.cluster_projects": settings.SENTRY_RELAY_TASK_APM_SAMPLING,
- "sentry.tasks.process_buffer.process_incr": 0.01,
+ "sentry.tasks.process_buffer.process_incr": 0.1 * settings.SENTRY_BACKEND_APM_SAMPLING,
"sentry.replays.tasks.delete_recording_segments": settings.SAMPLED_DEFAULT_RATE,
"sentry.replays.tasks.delete_replay_recording_async": settings.SAMPLED_DEFAULT_RATE,
"sentry.tasks.summaries.weekly_reports.schedule_organizations": 1.0,
- "sentry.tasks.summaries.weekly_reports.prepare_organization_report": 0.1,
- "sentry.profiles.task.process_profile": 0.01,
+ "sentry.tasks.summaries.weekly_reports.prepare_organization_report": 0.1
+ * settings.SENTRY_BACKEND_APM_SAMPLING,
+ "sentry.profiles.task.process_profile": 0.1 * settings.SENTRY_BACKEND_APM_SAMPLING,
"sentry.tasks.derive_code_mappings.process_organizations": settings.SAMPLED_DEFAULT_RATE,
"sentry.tasks.derive_code_mappings.derive_code_mappings": settings.SAMPLED_DEFAULT_RATE,
"sentry.monitors.tasks.clock_pulse": 1.0,
"sentry.tasks.auto_enable_codecov": settings.SAMPLED_DEFAULT_RATE,
- "sentry.dynamic_sampling.tasks.boost_low_volume_projects": 0.2,
- "sentry.dynamic_sampling.tasks.boost_low_volume_transactions": 0.2,
- "sentry.dynamic_sampling.tasks.recalibrate_orgs": 0.2,
- "sentry.dynamic_sampling.tasks.sliding_window_org": 0.2,
- "sentry.dynamic_sampling.tasks.custom_rule_notifications": 0.2,
- "sentry.dynamic_sampling.tasks.clean_custom_rule_notifications": 0.2,
+ "sentry.dynamic_sampling.tasks.boost_low_volume_projects": 0.2
+ * settings.SENTRY_BACKEND_APM_SAMPLING,
+ "sentry.dynamic_sampling.tasks.boost_low_volume_transactions": 0.2
+ * settings.SENTRY_BACKEND_APM_SAMPLING,
+ "sentry.dynamic_sampling.tasks.recalibrate_orgs": 0.2 * settings.SENTRY_BACKEND_APM_SAMPLING,
+ "sentry.dynamic_sampling.tasks.sliding_window_org": 0.2 * settings.SENTRY_BACKEND_APM_SAMPLING,
+ "sentry.dynamic_sampling.tasks.custom_rule_notifications": 0.2
+ * settings.SENTRY_BACKEND_APM_SAMPLING,
+ "sentry.dynamic_sampling.tasks.clean_custom_rule_notifications": 0.2
+ * settings.SENTRY_BACKEND_APM_SAMPLING,
"sentry.tasks.embeddings_grouping.backfill_seer_grouping_records_for_project": 1.0,
}
@@ -173,6 +178,10 @@ def get_project_key():
def traces_sampler(sampling_context):
+ # dont sample warmup requests
+ if sampling_context.get("wsgi_environ", {}).get("PATH_INFO") == "/_warmup/":
+ return 0.0
+
# Apply sample_rate from custom_sampling_context
custom_sample_rate = sampling_context.get("sample_rate")
if custom_sample_rate is not None:
@@ -271,6 +280,9 @@ def _get_sdk_options() -> tuple[SdkConfig, Dsns]:
sdk_options["release"] = (
f"backend@{sdk_options['release']}" if "release" in sdk_options else None
)
+ sdk_options.setdefault("_experiments", {}).update(
+ transport_http2=True,
+ )
# Modify SENTRY_SDK_CONFIG in your deployment scripts to specify your desired DSN
dsns = Dsns(
@@ -603,7 +615,7 @@ def bind_organization_context(organization: Organization | RpcOrganization) -> N
scope = Scope.get_isolation_scope()
# XXX(dcramer): this is duplicated in organizationContext.jsx on the frontend
- with sentry_sdk.start_span(op="other", description="bind_organization_context"):
+ with sentry_sdk.start_span(op="other", name="bind_organization_context"):
# This can be used to find errors that may have been mistagged
check_tag_for_scope_bleed("organization.slug", organization.slug)
diff --git a/src/sentry/utils/sentry_apps/webhooks.py b/src/sentry/utils/sentry_apps/webhooks.py
index 8e35192a329b4..094e6b5c5a685 100644
--- a/src/sentry/utils/sentry_apps/webhooks.py
+++ b/src/sentry/utils/sentry_apps/webhooks.py
@@ -21,7 +21,7 @@
from sentry.utils.sentry_apps import SentryAppWebhookRequestsBuffer
if TYPE_CHECKING:
- from sentry.api.serializers import AppPlatformEvent
+ from sentry.sentry_apps.api.serializers.app_platform_event import AppPlatformEvent
from sentry.sentry_apps.services.app.model import RpcSentryApp
diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py
index 82b1f61f7d4ff..83814252f2aec 100644
--- a/src/sentry/utils/snuba.py
+++ b/src/sentry/utils/snuba.py
@@ -13,17 +13,15 @@
from copy import deepcopy
from datetime import datetime, timedelta, timezone
from hashlib import sha1
-from typing import Any, Protocol, TypeVar
+from typing import Any
from urllib.parse import urlparse
-import sentry_protos.snuba.v1alpha.request_common_pb2
import sentry_sdk
import sentry_sdk.scope
import urllib3
from dateutil.parser import parse as parse_datetime
from django.conf import settings
from django.core.cache import cache
-from google.protobuf.message import Message as ProtobufMessage
from snuba_sdk import DeleteQuery, MetricsQuery, Request
from snuba_sdk.legacy import json_to_snql
@@ -138,6 +136,7 @@ def log_snuba_info(content):
"transaction": "segment_name",
"transaction.id": "transaction_id",
"segment.id": "segment_id",
+ "transaction.span_id": "segment_id",
"transaction.op": "transaction_op",
"user": "user",
"user.id": "sentry_tags[user.id]",
@@ -177,6 +176,7 @@ def log_snuba_info(content):
SPAN_EAP_COLUMN_MAP = {
"id": "span_id",
"span_id": "span_id", # ideally this would be temporary, but unfortunately its heavily hardcoded in the FE
+ "parent_span": "parent_span_id",
"organization.id": "organization_id",
"project": "project_id",
"project.id": "project_id",
@@ -200,14 +200,24 @@ def log_snuba_info(content):
"timestamp": "timestamp",
"trace": "trace_id",
"transaction": "segment_name",
+ # `transaction.id` and `segment.id` is going to be replaced by `transaction.span_id` please do not use
+ # transaction.id is "wrong", its pointing to segment_id to return something for the transistion, but represents the
+ # txn event id(32 char uuid). EAP will no longer be storing this.
"transaction.id": "segment_id",
+ "transaction.span_id": "segment_id",
+ "transaction.method": "attr_str[transaction.method]",
"is_transaction": "is_segment",
"segment.id": "segment_id",
# We should be able to delete origin.transaction and just use transaction
"origin.transaction": "segment_name",
+ # Copy paste, unsure if this is truth in production
+ "messaging.destination.name": "attr_str[messaging.destination.name]",
+ "messaging.message.id": "attr_str[messaging.message.id]",
"span.status_code": "attr_str[status_code]",
"replay.id": "attr_str[replay_id]",
"span.ai.pipeline.group": "attr_str[ai_pipeline_group]",
+ "trace.status": "attr_str[trace.status]",
+ "browser.name": "attr_str[browser.name]",
"ai.total_tokens.used": "attr_num[ai_total_tokens_used]",
"ai.total_cost": "attr_num[ai_total_cost]",
}
@@ -281,7 +291,7 @@ def log_snuba_info(content):
Dataset.MetricsSummaries: METRICS_SUMMARIES_COLUMN_MAP,
Dataset.PerformanceMetrics: METRICS_COLUMN_MAP,
Dataset.SpansIndexed: SPAN_COLUMN_MAP,
- Dataset.SpansEAP: SPAN_EAP_COLUMN_MAP,
+ Dataset.EventsAnalyticsPlatform: SPAN_EAP_COLUMN_MAP,
Dataset.IssuePlatform: ISSUE_PLATFORM_MAP,
Dataset.Replays: {},
}
@@ -296,7 +306,7 @@ def log_snuba_info(content):
Dataset.Sessions: SESSIONS_FIELD_LIST,
Dataset.IssuePlatform: list(ISSUE_PLATFORM_MAP.values()),
Dataset.SpansIndexed: list(SPAN_COLUMN_MAP.values()),
- Dataset.SpansEAP: list(SPAN_EAP_COLUMN_MAP.values()),
+ Dataset.EventsAnalyticsPlatform: list(SPAN_EAP_COLUMN_MAP.values()),
Dataset.MetricsSummaries: list(METRICS_SUMMARIES_COLUMN_MAP.values()),
}
@@ -778,7 +788,7 @@ def _prepare_query_params(query_params: SnubaQueryParams, referrer: str | None =
"groupby": query_params.groupby,
"conditions": query_params_conditions,
"aggregations": query_params.aggregations,
- "granularity": query_params.rollup, # TODO name these things the same
+ "granularity": query_params.rollup, # TODO: name these things the same
}
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
@@ -1176,6 +1186,14 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet:
elif error["type"] == "schema":
raise SchemaValidationError(error["message"])
elif error["type"] == "invalid_query":
+ logger.warning(
+ "UnqualifiedQueryError",
+ extra={
+ "error": error["message"],
+ "has_data": "data" in body and body["data"] is not None,
+ "query": snuba_requests_list[index].request.serialize(),
+ },
+ )
raise UnqualifiedQueryError(error["message"])
elif error["type"] == "clickhouse":
raise clickhouse_error_codes_map.get(error["code"], QueryExecutionError)(
@@ -1205,66 +1223,6 @@ def _log_request_query(req: Request) -> None:
)
-RPCResponseType = TypeVar("RPCResponseType", bound=ProtobufMessage)
-
-
-class SnubaRPCRequest(Protocol):
- def SerializeToString(self, deterministic: bool = ...) -> bytes:
- ...
-
- @property
- def meta(self) -> sentry_protos.snuba.v1alpha.request_common_pb2.RequestMeta:
- ...
-
-
-def rpc(req: SnubaRPCRequest, resp_type: type[RPCResponseType]) -> RPCResponseType:
- """
- You want to call a snuba RPC. Here's how you do it:
-
- start_time_proto = ProtobufTimestamp()
- start_time_proto.FromDatetime(start)
- end_time_proto = ProtobufTimestamp()
- end_time_proto.FromDatetime(end)
- aggregate_req = AggregateBucketRequest(
- meta=RequestMeta(
- organization_id=organization.id,
- cogs_category="events_analytics_platform",
- referrer=referrer,
- project_ids=[project.id for project in projects],
- start_timestamp=start_time_proto,
- end_timestamp=end_time_proto,
- ),
- aggregate=AggregateBucketRequest.FUNCTION_SUM,
- filter=TraceItemFilter(
- comparison_filter=ComparisonFilter(
- key=AttributeKey(name="op", type=AttributeKey.Type.TYPE_STRING),
- value=AttributeValue(val_str="ai.run"),
- )
- ),
- granularity_secs=60,
- key=AttributeKey(
- name="duration", type=AttributeKey.TYPE_FLOAT
- ),
- attribute_key_transform_context=AttributeKeyTransformContext(),
- )
- aggregate_resp = snuba.rpc(aggregate_req, AggregateBucketResponse)
- """
- referrer = req.meta.referrer
- with sentry_sdk.start_span(op="snuba_rpc.run", description=req.__class__.__name__) as span:
- span.set_tag("snuba.referrer", referrer)
- http_resp = _snuba_pool.urlopen(
- "POST",
- f"/rpc/{req.__class__.__name__}",
- body=req.SerializeToString(),
- headers={
- "referer": referrer,
- },
- )
- resp = resp_type()
- resp.ParseFromString(http_resp.data)
- return resp
-
-
RawResult = tuple[str, urllib3.response.HTTPResponse, Translator, Translator]
@@ -1332,11 +1290,11 @@ def _raw_delete_query(
# Enter hub such that http spans are properly nested
with timer("delete_query"):
referrer = headers.get("referer", "unknown")
- with sentry_sdk.start_span(op="snuba_delete.validation", description=referrer) as span:
+ with sentry_sdk.start_span(op="snuba_delete.validation", name=referrer) as span:
span.set_tag("snuba.referrer", referrer)
body = request.serialize()
- with sentry_sdk.start_span(op="snuba_delete.run", description=body) as span:
+ with sentry_sdk.start_span(op="snuba_delete.run", name=body) as span:
span.set_tag("snuba.referrer", referrer)
return _snuba_pool.urlopen(
"DELETE", f"/{query.storage_name}", body=body, headers=headers
@@ -1350,11 +1308,11 @@ def _raw_mql_query(request: Request, headers: Mapping[str, str]) -> urllib3.resp
# TODO: This can be changed back to just `serialize` after we remove SnQL support for MetricsQuery
serialized_req = request.serialize()
- with sentry_sdk.start_span(op="snuba_mql.validation", description=referrer) as span:
+ with sentry_sdk.start_span(op="snuba_mql.validation", name=referrer) as span:
span.set_tag("snuba.referrer", referrer)
body = serialized_req
- with sentry_sdk.start_span(op="snuba_mql.run", description=serialized_req) as span:
+ with sentry_sdk.start_span(op="snuba_mql.run", name=serialized_req) as span:
span.set_tag("snuba.referrer", referrer)
return _snuba_pool.urlopen(
"POST", f"/{request.dataset}/mql", body=body, headers=headers
@@ -1367,11 +1325,11 @@ def _raw_snql_query(request: Request, headers: Mapping[str, str]) -> urllib3.res
referrer = headers.get("referer", "")
serialized_req = request.serialize()
- with sentry_sdk.start_span(op="snuba_snql.validation", description=referrer) as span:
+ with sentry_sdk.start_span(op="snuba_snql.validation", name=referrer) as span:
span.set_tag("snuba.referrer", referrer)
body = serialized_req
- with sentry_sdk.start_span(op="snuba_snql.run", description=serialized_req) as span:
+ with sentry_sdk.start_span(op="snuba_snql.run", name=serialized_req) as span:
span.set_tag("snuba.referrer", referrer)
return _snuba_pool.urlopen(
"POST", f"/{request.dataset}/snql", body=body, headers=headers
@@ -1465,7 +1423,7 @@ def _resolve_column(col):
if isinstance(col, int) or isinstance(col, float):
return col
if (
- dataset != Dataset.SpansEAP
+ dataset != Dataset.EventsAnalyticsPlatform
and isinstance(col, str)
and (col.startswith("tags[") or QUOTED_LITERAL_RE.match(col))
):
@@ -1476,7 +1434,7 @@ def _resolve_column(col):
if isinstance(col, (list, tuple)) or col in ("project_id", "group_id"):
return col
- elif dataset == Dataset.SpansEAP:
+ elif dataset == Dataset.EventsAnalyticsPlatform:
if isinstance(col, str) and col.startswith("sentry_tags["):
# Replace the first instance of sentry tags with attr str instead
return col.replace("sentry_tags", "attr_str", 1)
@@ -1508,7 +1466,7 @@ def _resolve_column(col):
span_op_breakdown_name = get_span_op_breakdown_name(col)
if "span_op_breakdowns_key" in DATASETS[dataset] and span_op_breakdown_name:
return f"span_op_breakdowns[{span_op_breakdown_name}]"
- if dataset == Dataset.SpansEAP:
+ if dataset == Dataset.EventsAnalyticsPlatform:
return f"attr_str[{col}]"
return f"tags[{col}]"
@@ -1687,7 +1645,7 @@ def aliased_query_params(
)
-# TODO (evanh) Since we are assuming that all string values are columns,
+# TODO: (evanh) Since we are assuming that all string values are columns,
# this will get tricky if we ever have complex columns where there are
# string arguments to the functions that aren't columns
def resolve_complex_column(col, resolve_func, ignored):
diff --git a/src/sentry/utils/snuba_rpc.py b/src/sentry/utils/snuba_rpc.py
new file mode 100644
index 0000000000000..730a78ebb4cb2
--- /dev/null
+++ b/src/sentry/utils/snuba_rpc.py
@@ -0,0 +1,84 @@
+from __future__ import annotations
+
+from typing import Protocol, TypeVar
+
+import sentry_protos.snuba.v1alpha.request_common_pb2
+import sentry_sdk
+import sentry_sdk.scope
+from google.protobuf.message import Message as ProtobufMessage
+from sentry_protos.snuba.v1.error_pb2 import Error as ErrorProto
+
+from sentry.utils.snuba import SnubaError, _snuba_pool
+
+RPCResponseType = TypeVar("RPCResponseType", bound=ProtobufMessage)
+
+
+class SnubaRPCError(SnubaError):
+ pass
+
+
+class SnubaRPCRequest(Protocol):
+ def SerializeToString(self, deterministic: bool = ...) -> bytes:
+ ...
+
+ @property
+ def meta(self) -> sentry_protos.snuba.v1alpha.request_common_pb2.RequestMeta:
+ ...
+
+
+def rpc(req: SnubaRPCRequest, resp_type: type[RPCResponseType]) -> RPCResponseType:
+ """
+ You want to call a snuba RPC. Here's how you do it:
+
+ start_time_proto = ProtobufTimestamp()
+ start_time_proto.FromDatetime(start)
+ end_time_proto = ProtobufTimestamp()
+ end_time_proto.FromDatetime(end)
+ aggregate_req = AggregateBucketRequest(
+ meta=RequestMeta(
+ organization_id=organization.id,
+ cogs_category="events_analytics_platform",
+ referrer=referrer,
+ project_ids=[project.id for project in projects],
+ start_timestamp=start_time_proto,
+ end_timestamp=end_time_proto,
+ ),
+ aggregate=AggregateBucketRequest.FUNCTION_SUM,
+ filter=TraceItemFilter(
+ comparison_filter=ComparisonFilter(
+ key=AttributeKey(name="op", type=AttributeKey.Type.TYPE_STRING),
+ value=AttributeValue(val_str="ai.run"),
+ )
+ ),
+ granularity_secs=60,
+ key=AttributeKey(
+ name="duration", type=AttributeKey.TYPE_FLOAT
+ ),
+ attribute_key_transform_context=AttributeKeyTransformContext(),
+ )
+ aggregate_resp = snuba.rpc(aggregate_req, AggregateBucketResponse)
+ """
+ referrer = req.meta.referrer
+ with sentry_sdk.start_span(op="snuba_rpc.run", name=req.__class__.__name__) as span:
+ span.set_tag("snuba.referrer", referrer)
+
+ cls = req.__class__
+ class_name = cls.__name__
+ class_version = cls.__module__.split(".", 3)[2]
+
+ http_resp = _snuba_pool.urlopen(
+ "POST",
+ f"/rpc/{class_name}/{class_version}",
+ body=req.SerializeToString(),
+ headers={
+ "referer": referrer,
+ },
+ )
+ if http_resp.status != 200:
+ error = ErrorProto()
+ error.ParseFromString(http_resp.data)
+ raise SnubaRPCError(error)
+
+ resp = resp_type()
+ resp.ParseFromString(http_resp.data)
+ return resp
diff --git a/src/sentry/utils/urls.py b/src/sentry/utils/urls.py
index 1711f006a79a2..b222bdfe68b13 100644
--- a/src/sentry/utils/urls.py
+++ b/src/sentry/utils/urls.py
@@ -73,3 +73,12 @@ def urlsplit_best_effort(s: str) -> tuple[str, str, str, str]:
return scheme, netloc, path, query
else:
return parsed.scheme, parsed.netloc, parsed.path, parsed.query
+
+
+def parse_id_or_slug_param(id_or_slug: str | None) -> tuple[int | None, str | None]:
+ if not id_or_slug:
+ return None, None
+
+ if id_or_slug.isnumeric():
+ return int(id_or_slug), None
+ return None, id_or_slug
diff --git a/src/sentry/web/client_config.py b/src/sentry/web/client_config.py
index 1f4b310f74720..73a9bd1e47c85 100644
--- a/src/sentry/web/client_config.py
+++ b/src/sentry/web/client_config.py
@@ -216,8 +216,6 @@ def tracing_data(self) -> Mapping[str, str]:
@property
def enabled_features(self) -> Iterable[str]:
- if self.last_org and features.has("organizations:react-router-6", self.last_org):
- yield "organizations:react-router-6"
if features.has("organizations:create", actor=self.user):
yield "organizations:create"
if auth.has_user_registration():
diff --git a/src/sentry/web/debug_urls.py b/src/sentry/web/debug_urls.py
index 04d457e434c68..200e13f9954ea 100644
--- a/src/sentry/web/debug_urls.py
+++ b/src/sentry/web/debug_urls.py
@@ -3,6 +3,7 @@
import sentry.web.frontend.debug.mail
from sentry.integrations.web.debug.debug_notify_disable import DebugNotifyDisableView
+from sentry.sentry_apps.web.debug_sentry_app_notify_disable import DebugSentryAppNotifyDisableView
from sentry.web.frontend.debug import debug_auth_views
from sentry.web.frontend.debug.debug_assigned_email import (
DebugAssignedEmailView,
@@ -62,9 +63,6 @@
DebugResolvedInReleaseEmailView,
DebugResolvedInReleaseUpcomingEmailView,
)
-from sentry.web.frontend.debug.debug_sentry_app_notify_disable import (
- DebugSentryAppNotifyDisableView,
-)
from sentry.web.frontend.debug.debug_setup_2fa_email import DebugSetup2faEmailView
from sentry.web.frontend.debug.debug_sso_link_email import (
DebugSsoLinkedEmailView,
diff --git a/src/sentry/web/frontend/cli.py b/src/sentry/web/frontend/cli.py
new file mode 100644
index 0000000000000..597d9b7f037eb
--- /dev/null
+++ b/src/sentry/web/frontend/cli.py
@@ -0,0 +1,161 @@
+from urllib.parse import quote_plus
+
+from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
+
+from sentry.silo.base import control_silo_function
+from sentry.utils import metrics
+
+SCRIPT = r"""#!/bin/sh
+set -eu
+
+# allow overriding the version
+VERSION=${SENTRY_CLI_VERSION:-latest}
+
+PLATFORM=`uname -s`
+ARCH=`uname -m`
+
+case "$PLATFORM" in
+ CYGWIN*) PLATFORM="Windows"
+ ;;
+ MINGW*) PLATFORM="Windows"
+ ;;
+ MSYS*) PLATFORM="Windows"
+ ;;
+ Darwin) ARCH="universal"
+ ;;
+esac
+
+case "$ARCH" in
+ armv6*) ARCH="armv7"
+ ;;
+ armv7*) ARCH="armv7"
+ ;;
+ armv8*) ARCH="aarch64"
+ ;;
+ armv64*) ARCH="aarch64"
+ ;;
+ aarch64*) ARCH="aarch64"
+ ;;
+esac
+
+# If the install directory is not set, set it to a default
+if [ -z ${INSTALL_DIR+x} ]; then
+ INSTALL_DIR=/usr/local/bin
+fi
+if [ -z ${INSTALL_PATH+x} ]; then
+ INSTALL_PATH="${INSTALL_DIR}/sentry-cli"
+fi
+
+DOWNLOAD_URL="https://release-registry.services.sentry.io/apps/sentry-cli/${VERSION}?response=download&arch=${ARCH}&platform=${PLATFORM}&package=sentry-cli"
+
+echo "This script will automatically install sentry-cli (${VERSION}) for you."
+echo "Installation path: ${INSTALL_PATH}"
+if [ "x$(id -u)" = "x0" ]; then
+ echo "Warning: this script is currently running as root. This is dangerous. "
+ echo " Instead run it as normal user. We will sudo as needed."
+fi
+
+if [ -f "$INSTALL_PATH" ]; then
+ echo "error: sentry-cli is already installed."
+ echo " run \"sentry-cli update\" to update to latest version"
+ exit 1
+fi
+
+if ! hash curl 2> /dev/null; then
+ echo "error: you do not have 'curl' installed which is required for this script."
+ exit 1
+fi
+
+TEMP_FILE=`mktemp "${TMPDIR:-/tmp}/.sentrycli.XXXXXXXX"`
+TEMP_HEADER_FILE=`mktemp "${TMPDIR:-/tmp}/.sentrycli-headers.XXXXXXXX"`
+
+cleanup() {
+ rm -f "$TEMP_FILE"
+ rm -f "$TEMP_HEADER_FILE"
+}
+
+trap cleanup EXIT
+HTTP_CODE=$(curl -SL --progress-bar "$DOWNLOAD_URL" -D "$TEMP_HEADER_FILE" --output "$TEMP_FILE" --write-out "%{http_code}")
+if [ ${HTTP_CODE} -lt 200 ] || [ ${HTTP_CODE} -gt 299 ]; then
+ echo "error: your platform and architecture (${PLATFORM}-${ARCH}) is unsupported."
+ exit 1
+fi
+
+for PYTHON in python3 python2 python ''; do
+ if hash "$PYTHON"; then
+ break
+ fi
+done
+
+if [ "$PYTHON" ]; then
+ "$PYTHON" - < /dev/null; then
+ sudo -k sh -c "mkdir -p \"$(dirname "$INSTALL_PATH")\" && mv \"$TEMP_FILE\" \"$INSTALL_PATH\""
+fi
+
+echo "Sucessfully installed $("$INSTALL_PATH" --version)"
+
+VERSION=$("$INSTALL_PATH" --version | awk '{print $2}')
+MAJOR=$(echo "$VERSION" | cut -d. -f1)
+MINOR=$(echo "$VERSION" | cut -d. -f2)
+if (test -d "${HOME}/.oh-my-zsh") 2>/dev/null && [ $MAJOR -eq 2 ] && [ $MINOR -ge 22 ]; then
+ echo 'Detected Oh My Zsh, installing Zsh completions...'
+ if (mkdir -p "${HOME}/.oh-my-zsh/completions") 2>&1 && ("$INSTALL_PATH" completions zsh > "${HOME}/.oh-my-zsh/completions/_sentry_cli") 2>&1; then
+ echo "Successfully installed Zsh completions."
+ else
+ echo 'Warning: failed to install Zsh completions.'
+ fi
+fi
+
+echo 'Done!'
+"""
+
+
+def get_cli(request: HttpRequest) -> HttpResponse:
+ metrics.incr("cli.download_script")
+ return HttpResponse(SCRIPT, content_type="text/plain")
+
+
+@control_silo_function
+def get_cli_download_url(request: HttpRequest, platform: str, arch: str) -> HttpResponseRedirect:
+ url = "https://release-registry.services.sentry.io/apps/sentry-cli/latest?response=download&arch={}&platform={}&package=sentry-cli".format(
+ quote_plus(arch),
+ quote_plus(platform),
+ )
+ return HttpResponseRedirect(url)
diff --git a/src/sentry/web/frontend/debug/debug_incident_activity_email.py b/src/sentry/web/frontend/debug/debug_incident_activity_email.py
index 4d8509fcb5f95..926376b9378c7 100644
--- a/src/sentry/web/frontend/debug/debug_incident_activity_email.py
+++ b/src/sentry/web/frontend/debug/debug_incident_activity_email.py
@@ -17,7 +17,10 @@ def get(self, request: HttpRequest) -> HttpResponse:
id=2, identifier=123, organization=organization, title="Something broke"
)
activity = IncidentActivity(
- incident=incident, user=user, type=IncidentActivityType.COMMENT.value, comment="hi"
+ incident=incident,
+ user_id=user.id,
+ type=IncidentActivityType.COMMENT.value,
+ comment="hi",
)
email = generate_incident_activity_email(activity, user)
return MailPreview(
diff --git a/src/sentry/web/frontend/debug/debug_incident_trigger_email.py b/src/sentry/web/frontend/debug/debug_incident_trigger_email.py
index 53ebfb3b00920..db58f88c1884a 100644
--- a/src/sentry/web/frontend/debug/debug_incident_trigger_email.py
+++ b/src/sentry/web/frontend/debug/debug_incident_trigger_email.py
@@ -41,7 +41,7 @@ def get_context(self, request, incident_trigger_mock, user_option_mock):
organization=organization,
title="Something broke",
alert_rule=alert_rule,
- status=IncidentStatus.CRITICAL,
+ status=IncidentStatus.CRITICAL.value,
)
trigger = AlertRuleTrigger(alert_rule=alert_rule)
diff --git a/src/sentry/web/frontend/debug/debug_mfa_added_email.py b/src/sentry/web/frontend/debug/debug_mfa_added_email.py
index 449e359350f8a..6853396103ca0 100644
--- a/src/sentry/web/frontend/debug/debug_mfa_added_email.py
+++ b/src/sentry/web/frontend/debug/debug_mfa_added_email.py
@@ -1,5 +1,6 @@
import datetime
+from django.contrib.auth.models import AnonymousUser
from django.http import HttpRequest, HttpResponse
from django.views.generic import View
@@ -11,6 +12,9 @@
class DebugMfaAddedEmailView(View):
def get(self, request: HttpRequest) -> HttpResponse:
+ if isinstance(request.user, AnonymousUser):
+ return HttpResponse(status=401)
+
authenticator = Authenticator(id=0, type=3, user_id=request.user.id) # u2f
email = generate_security_email(
diff --git a/src/sentry/web/frontend/debug/debug_mfa_removed_email.py b/src/sentry/web/frontend/debug/debug_mfa_removed_email.py
index 0f868207f8e67..03cb98317c83c 100644
--- a/src/sentry/web/frontend/debug/debug_mfa_removed_email.py
+++ b/src/sentry/web/frontend/debug/debug_mfa_removed_email.py
@@ -1,5 +1,6 @@
import datetime
+from django.contrib.auth.models import AnonymousUser
from django.http import HttpRequest, HttpResponse
from django.views.generic import View
@@ -11,6 +12,9 @@
class DebugMfaRemovedEmailView(View):
def get(self, request: HttpRequest) -> HttpResponse:
+ if isinstance(request.user, AnonymousUser):
+ return HttpResponse(status=401)
+
authenticator = Authenticator(id=0, type=3, user_id=request.user.id) # u2f
email = generate_security_email(
diff --git a/src/sentry/web/frontend/debug/mail.py b/src/sentry/web/frontend/debug/mail.py
index 29136973378cc..2c83f192d9274 100644
--- a/src/sentry/web/frontend/debug/mail.py
+++ b/src/sentry/web/frontend/debug/mail.py
@@ -33,7 +33,7 @@
from sentry.issues.grouptype import NoiseConfig
from sentry.issues.occurrence_consumer import process_event_and_issue_occurrence
from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka
-from sentry.mail.notifications import get_builder_args
+from sentry.mail.notifications import RecipientT, get_builder_args
from sentry.models.activity import Activity
from sentry.models.group import Group, GroupStatus
from sentry.models.organization import Organization
@@ -58,7 +58,6 @@
TEST_FEEDBACK_ISSUE_OCCURENCE,
TEST_ISSUE_OCCURRENCE,
)
-from sentry.types.actor import Actor
from sentry.types.group import GroupSubStatus
from sentry.users.models.lostpasswordhash import LostPasswordHash
from sentry.utils import json, loremipsum
@@ -822,7 +821,7 @@ def org_delete_confirm(request):
# Used to generate debug email views from a notification
def render_preview_email_for_notification(
- notification: BaseNotification, recipient: Actor
+ notification: BaseNotification, recipient: RecipientT
) -> HttpResponse:
shared_context = notification.get_context()
basic_args = get_builder_args(notification, recipient, shared_context)
diff --git a/src/sentry/web/frontend/error_page_embed.py b/src/sentry/web/frontend/error_page_embed.py
index fdfc803460ce4..68c608e88303b 100644
--- a/src/sentry/web/frontend/error_page_embed.py
+++ b/src/sentry/web/frontend/error_page_embed.py
@@ -11,7 +11,7 @@
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
-from sentry import eventstore, features
+from sentry import eventstore
from sentry.feedback.usecases.create_feedback import FeedbackCreationSource, shim_to_feedback
from sentry.models.options.project_option import ProjectOption
from sentry.models.project import Project
@@ -194,12 +194,7 @@ def dispatch(self, request: HttpRequest) -> HttpResponse:
)
project = Project.objects.get(id=report.project_id)
- if (
- features.has(
- "organizations:user-feedback-ingest", project.organization, actor=request.user
- )
- and event is not None
- ):
+ if event is not None:
shim_to_feedback(
{
"name": report.name,
diff --git a/src/sentry/web/frontend/setup_wizard.py b/src/sentry/web/frontend/setup_wizard.py
index 4c54deefa6a6f..bac02865becdb 100644
--- a/src/sentry/web/frontend/setup_wizard.py
+++ b/src/sentry/web/frontend/setup_wizard.py
@@ -6,8 +6,9 @@
from urllib.parse import parse_qsl, urlparse, urlunparse
from django.conf import settings
-from django.http import HttpRequest, HttpResponse
+from django.http import Http404, HttpRequest, HttpResponse, HttpResponseBadRequest
from django.http.response import HttpResponseBase
+from django.shortcuts import get_object_or_404
from sentry.api.endpoints.setup_wizard import SETUP_WIZARD_CACHE_KEY, SETUP_WIZARD_CACHE_TIMEOUT
from sentry.api.serializers import serialize
@@ -19,12 +20,14 @@
from sentry.models.organizationmapping import OrganizationMapping
from sentry.models.organizationmembermapping import OrganizationMemberMapping
from sentry.models.orgauthtoken import OrgAuthToken
+from sentry.projects.services.project.model import RpcProject
from sentry.projects.services.project.service import project_service
-from sentry.projects.services.project_key.model import ProjectKeyRole
+from sentry.projects.services.project_key.model import ProjectKeyRole, RpcProjectKey
from sentry.projects.services.project_key.service import project_key_service
from sentry.types.token import AuthTokenType
from sentry.users.models.user import User
from sentry.users.services.user.model import RpcUser
+from sentry.utils import json
from sentry.utils.http import absolute_uri
from sentry.utils.security.orgauthtoken_token import (
SystemUrlPrefixMissingException,
@@ -59,10 +62,13 @@ def get(self, request: HttpRequest, wizard_hash) -> HttpResponseBase:
This opens a page where with an active session fill stuff into the cache
Redirects to organization whenever cache has been deleted
"""
- context = {"hash": wizard_hash}
- key = f"{SETUP_WIZARD_CACHE_KEY}{wizard_hash}"
+ context = {"hash": wizard_hash, "enableProjectSelection": False}
+ cache_key = f"{SETUP_WIZARD_CACHE_KEY}{wizard_hash}"
+
+ org_slug = request.GET.get("org_slug")
+ project_slug = request.GET.get("project_slug")
- wizard_data = default_cache.get(key)
+ wizard_data = default_cache.get(cache_key)
if wizard_data is None:
return self.redirect_to_org(request)
@@ -74,75 +80,121 @@ def get(self, request: HttpRequest, wizard_hash) -> HttpResponseBase:
status=OrganizationStatus.ACTIVE,
).order_by("-date_created")
- # TODO: Make wizard compatible with hybrid cloud. For now, we combine all region data for these
- # responses, but project names/slugs aren't unique across regions which could confuse some users.
- # Wizard should display region beside project/orgs or have a step to ask which region.
-
# {'us': {'org_ids': [...], 'projects': [...], 'keys': [...]}}
region_data_map = defaultdict(lambda: defaultdict(list))
org_mappings_map = {}
for mapping in org_mappings:
region_data_map[mapping.region_name]["org_ids"].append(mapping.organization_id)
- status = OrganizationStatus(mapping.status)
- serialized_mapping = {
- "id": mapping.organization_id,
- "name": mapping.name,
- "slug": mapping.slug,
- "region": mapping.region_name,
- "status": {"id": status.name.lower(), "name": status.label},
- }
+ serialized_mapping = serialize_org_mapping(mapping)
org_mappings_map[mapping.organization_id] = serialized_mapping
- for region_name, region_data in region_data_map.items():
- org_ids = region_data["org_ids"]
- projects = project_service.get_many_by_organizations(
- region_name=region_name, organization_ids=org_ids
- )
- region_data["projects"] = projects
-
- keys_map = defaultdict(list)
- for region_name, region_data in region_data_map.items():
- project_ids = [rpc_project.id for rpc_project in region_data["projects"]]
- keys = project_key_service.get_project_keys_by_region(
- region_name=region_name,
- project_ids=project_ids,
- role=ProjectKeyRole.store,
+ context["organizations"] = list(org_mappings_map.values())
+
+ # If org_slug and project_slug are provided, we will use them to select the project
+ # If the project is not found or the slugs are not provided, we will show the project selection
+ if org_slug is not None and project_slug is not None:
+ target_org_mapping = next(
+ (mapping for mapping in org_mappings if mapping.slug == org_slug), None
)
- region_data["keys"] = keys
- for key in region_data["keys"]:
- serialized_key = {
- "dsn": {"public": key.dsn_public},
- "isActive": key.is_active,
- }
- keys_map[key.project_id].append(serialized_key)
-
- filled_projects = []
- for region_name, region_data in region_data_map.items():
- for project in region_data["projects"]:
- enriched_project = {
- "slug": project.slug,
- "id": project.id,
- "name": project.name,
- "platform": project.platform,
- "status": STATUS_LABELS.get(project.status, "unknown"),
- }
- # The wizard only reads the a few fields so serializing the mapping should work fine
- enriched_project["organization"] = org_mappings_map[project.organization_id]
- enriched_project["keys"] = keys_map[project.id]
- filled_projects.append(enriched_project)
-
- # Fetching or creating a token
- serialized_token = get_token(org_mappings, request.user)
-
- result = {"apiKeys": serialized_token, "projects": filled_projects}
+ if target_org_mapping is not None:
+ target_project = project_service.get_by_slug(
+ slug=project_slug, organization_id=target_org_mapping.organization_id
+ )
- key = f"{SETUP_WIZARD_CACHE_KEY}{wizard_hash}"
- default_cache.set(key, result, SETUP_WIZARD_CACHE_TIMEOUT)
+ if target_project is not None:
+ cache_data = get_cache_data(
+ mapping=target_org_mapping, project=target_project, user=request.user
+ )
+ default_cache.set(cache_key, cache_data, SETUP_WIZARD_CACHE_TIMEOUT)
- context["organizations"] = list(org_mappings_map.values())
+ context["enableProjectSelection"] = False
+ return render_to_response("sentry/setup-wizard.html", context, request)
+
+ context["enableProjectSelection"] = True
return render_to_response("sentry/setup-wizard.html", context, request)
+ def post(self, request: HttpRequest, wizard_hash=None) -> HttpResponse:
+ """
+ This updates the cache content for a specific hash
+ """
+ json_data = json.loads(request.body)
+ organization_id = json_data.get("organizationId", None)
+ project_id = json_data.get("projectId", None)
+
+ if organization_id is None or project_id is None or wizard_hash is None:
+ return HttpResponseBadRequest()
+
+ member_org_ids = OrganizationMemberMapping.objects.filter(
+ user_id=request.user.id
+ ).values_list("organization_id", flat=True)
+ mapping = get_object_or_404(
+ OrganizationMapping,
+ organization_id=organization_id,
+ organization_id__in=member_org_ids,
+ )
+
+ project = project_service.get_by_id(organization_id=mapping.organization_id, id=project_id)
+ if project is None:
+ raise Http404()
+
+ cache_data = get_cache_data(mapping=mapping, project=project, user=request.user)
+
+ key = f"{SETUP_WIZARD_CACHE_KEY}{wizard_hash}"
+ default_cache.set(key, cache_data, SETUP_WIZARD_CACHE_TIMEOUT)
+ return HttpResponse(status=200)
+
+
+def serialize_org_mapping(mapping: OrganizationMapping):
+ status = OrganizationStatus(mapping.status)
+ return {
+ "id": mapping.organization_id,
+ "name": mapping.name,
+ "slug": mapping.slug,
+ "region": mapping.region_name,
+ "status": {"id": status.name.lower(), "name": status.label},
+ }
+
+
+def serialize_project_key(project_key: RpcProjectKey):
+ return {
+ "dsn": {"public": project_key.dsn_public},
+ "isActive": project_key.is_active,
+ }
+
+
+def serialize_project(project: RpcProject, organization: dict, keys: list[dict]):
+ return {
+ "slug": project.slug,
+ "id": project.id,
+ "name": project.name,
+ "platform": project.platform,
+ "status": STATUS_LABELS.get(project.status, "unknown"),
+ "organization": organization,
+ "keys": keys,
+ }
+
+
+def get_cache_data(mapping: OrganizationMapping, project: RpcProject, user: RpcUser):
+ project_key = project_key_service.get_project_key(
+ organization_id=mapping.organization_id,
+ project_id=project.id,
+ role=ProjectKeyRole.store,
+ )
+ if project_key is None:
+ raise Http404()
+
+ enriched_project = serialize_project(
+ project=project,
+ # The wizard only reads the a few fields so serializing the mapping should work fine
+ organization=serialize_org_mapping(mapping),
+ keys=[serialize_project_key(project_key)],
+ )
+
+ serialized_token = get_org_token(mapping, user)
+
+ return {"apiKeys": serialized_token, "projects": [enriched_project]}
+
def get_token(mappings: list[OrganizationMapping], user: RpcUser):
can_use_org_tokens = len(mappings) == 1
diff --git a/src/sentry/web/urls.py b/src/sentry/web/urls.py
index 18ae9ac1aaec9..f4fcb59ad8972 100644
--- a/src/sentry/web/urls.py
+++ b/src/sentry/web/urls.py
@@ -9,24 +9,29 @@
from django.views.generic import RedirectView
from sentry.api.endpoints.oauth_userinfo import OAuthUserInfoEndpoint
+from sentry.api.endpoints.warmup import WarmupEndpoint
from sentry.auth.providers.saml2.provider import SAML2AcceptACSView, SAML2MetadataView, SAML2SLSView
from sentry.charts.endpoints import serve_chartcuterie_config
from sentry.integrations.web.doc_integration_avatar import DocIntegrationAvatarPhotoView
from sentry.integrations.web.organization_integration_setup import OrganizationIntegrationSetupView
-from sentry.toolbar.iframe_view import IframeView
-from sentry.toolbar.login_success_view import LoginSuccessView
+from sentry.sentry_apps.web.sentryapp_avatar import SentryAppAvatarPhotoView
+from sentry.toolbar.views.iframe_view import IframeView
+from sentry.toolbar.views.login_success_view import LoginSuccessView
from sentry.users.web import accounts
from sentry.users.web.account_identity import AccountIdentityAssociateView
from sentry.users.web.user_avatar import UserAvatarPhotoView
from sentry.web import api
-from sentry.web.frontend import generic
+from sentry.web.frontend import csrf_failure, generic
from sentry.web.frontend.auth_channel_login import AuthChannelLoginView
from sentry.web.frontend.auth_close import AuthCloseView
from sentry.web.frontend.auth_login import AuthLoginView
from sentry.web.frontend.auth_logout import AuthLogoutView
from sentry.web.frontend.auth_organization_login import AuthOrganizationLoginView
from sentry.web.frontend.auth_provider_login import AuthProviderLoginView
+from sentry.web.frontend.cli import get_cli, get_cli_download_url
from sentry.web.frontend.disabled_member_view import DisabledMemberView
+from sentry.web.frontend.error_404 import Error404View
+from sentry.web.frontend.error_500 import Error500View
from sentry.web.frontend.error_page_embed import ErrorPageEmbedView
from sentry.web.frontend.group_event_json import GroupEventJsonView
from sentry.web.frontend.group_plugin_action import GroupPluginActionView
@@ -46,7 +51,6 @@
from sentry.web.frontend.react_page import GenericReactPageView, ReactPageView
from sentry.web.frontend.reactivate_account import ReactivateAccountView
from sentry.web.frontend.release_webhook import ReleaseWebhookView
-from sentry.web.frontend.sentryapp_avatar import SentryAppAvatarPhotoView
from sentry.web.frontend.setup_wizard import SetupWizardView
from sentry.web.frontend.shared_group_details import SharedGroupDetailsView
from sentry.web.frontend.sudo import SudoView
@@ -60,7 +64,23 @@
generic_react_page_view = GenericReactPageView.as_view()
react_page_view = ReactPageView.as_view()
-urlpatterns: list[URLResolver | URLPattern] = []
+urlpatterns: list[URLResolver | URLPattern] = [
+ re_path(
+ r"^500/",
+ Error500View.as_view(),
+ name="error-500",
+ ),
+ re_path(
+ r"^404/",
+ Error404View.as_view(),
+ name="error-404",
+ ),
+ re_path(
+ r"^403-csrf-failure/",
+ csrf_failure.view,
+ name="error-403-csrf-failure",
+ ),
+]
if getattr(settings, "DEBUG_VIEWS", settings.DEBUG):
from sentry.web.debug_urls import urlpatterns as debug_urls
@@ -92,6 +112,13 @@
]
urlpatterns += [
+ # warmup, used to initialize any connections / pre-load
+ # the application so that user initiated requests are faster
+ re_path(
+ r"^_warmup/$",
+ WarmupEndpoint.as_view(),
+ name="sentry-warmup",
+ ),
re_path(
r"^api/(?P[\w_-]+)/crossdomain\.xml$",
api.crossdomain_xml,
@@ -128,6 +155,13 @@
JavaScriptSdkLoader.as_view(),
name="sentry-js-sdk-loader",
),
+ # docs reference this for acquiring the sentry cli
+ re_path(r"^get-cli/$", get_cli, name="get_cli_script"),
+ re_path(
+ r"^get-cli/(?P[^/]+)/(?P[^/]+)/?$",
+ get_cli_download_url,
+ name="get_cli_download_url",
+ ),
# Versioned API
re_path(
r"^api/0/",
diff --git a/src/sentry/workflow_engine/migrations/0006_data_conditions.py b/src/sentry/workflow_engine/migrations/0006_data_conditions.py
new file mode 100644
index 0000000000000..aabaac0d0b846
--- /dev/null
+++ b/src/sentry/workflow_engine/migrations/0006_data_conditions.py
@@ -0,0 +1,209 @@
+# Generated by Django 5.1.1 on 2024-09-26 00:11
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+import sentry.db.models.fields.bounded
+import sentry.db.models.fields.foreignkey
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0767_add_selected_aggregate_to_dashboards_widget_query"),
+ ("workflow_engine", "0005_data_source_detector"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="Action",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("date_updated", models.DateTimeField(auto_now=True)),
+ ("date_added", models.DateTimeField(auto_now_add=True)),
+ ("required", models.BooleanField(default=False)),
+ ("type", models.TextField()),
+ ("data", models.JSONField(default=dict)),
+ ],
+ options={
+ "abstract": False,
+ },
+ ),
+ migrations.CreateModel(
+ name="DataConditionGroup",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("date_updated", models.DateTimeField(auto_now=True)),
+ ("date_added", models.DateTimeField(auto_now_add=True)),
+ ("logic_type", models.CharField(default="any", max_length=200)),
+ (
+ "organization",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
+ ),
+ ),
+ ],
+ options={
+ "abstract": False,
+ },
+ ),
+ migrations.CreateModel(
+ name="DataCondition",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("date_updated", models.DateTimeField(auto_now=True)),
+ ("date_added", models.DateTimeField(auto_now_add=True)),
+ ("condition", models.CharField(max_length=200)),
+ ("comparison", models.JSONField()),
+ ("condition_result", models.JSONField()),
+ ("type", models.CharField(max_length=200)),
+ (
+ "condition_group",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="workflow_engine.dataconditiongroup",
+ ),
+ ),
+ ],
+ options={
+ "abstract": False,
+ },
+ ),
+ migrations.AddField(
+ model_name="detector",
+ name="workflow_condition_group",
+ field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to="workflow_engine.dataconditiongroup",
+ unique=True,
+ ),
+ ),
+ migrations.AddField(
+ model_name="workflow",
+ name="when_condition_group",
+ field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="workflow_engine.dataconditiongroup",
+ ),
+ ),
+ migrations.CreateModel(
+ name="DataConditionGroupAction",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("date_updated", models.DateTimeField(auto_now=True)),
+ ("date_added", models.DateTimeField(auto_now_add=True)),
+ (
+ "action",
+ sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ on_delete=django.db.models.deletion.CASCADE, to="workflow_engine.action"
+ ),
+ ),
+ (
+ "condition_group",
+ sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="workflow_engine.dataconditiongroup",
+ ),
+ ),
+ ],
+ options={
+ "abstract": False,
+ },
+ ),
+ migrations.CreateModel(
+ name="DetectorWorkflow",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("date_updated", models.DateTimeField(auto_now=True)),
+ ("date_added", models.DateTimeField(auto_now_add=True)),
+ (
+ "detector",
+ sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ on_delete=django.db.models.deletion.CASCADE, to="workflow_engine.detector"
+ ),
+ ),
+ (
+ "workflow",
+ sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ on_delete=django.db.models.deletion.CASCADE, to="workflow_engine.workflow"
+ ),
+ ),
+ ],
+ options={
+ "abstract": False,
+ },
+ ),
+ migrations.CreateModel(
+ name="WorkflowDataConditionGroup",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("date_updated", models.DateTimeField(auto_now=True)),
+ ("date_added", models.DateTimeField(auto_now_add=True)),
+ (
+ "condition_group",
+ sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="workflow_engine.dataconditiongroup",
+ unique=True,
+ ),
+ ),
+ (
+ "workflow",
+ sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ on_delete=django.db.models.deletion.CASCADE, to="workflow_engine.workflow"
+ ),
+ ),
+ ],
+ options={
+ "abstract": False,
+ },
+ ),
+ ]
diff --git a/src/sentry/workflow_engine/migrations/0007_loosen_workflow_action_relationship.py b/src/sentry/workflow_engine/migrations/0007_loosen_workflow_action_relationship.py
new file mode 100644
index 0000000000000..11aedf9be5c16
--- /dev/null
+++ b/src/sentry/workflow_engine/migrations/0007_loosen_workflow_action_relationship.py
@@ -0,0 +1,43 @@
+# Generated by Django 5.1.1 on 2024-09-27 17:47
+
+import django.db.models.deletion
+from django.db import migrations
+
+import sentry.db.models.fields.foreignkey
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("workflow_engine", "0006_data_conditions"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="workflowaction",
+ name="workflow",
+ field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ db_constraint=False,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="workflow_engine.workflow",
+ ),
+ ),
+ migrations.SeparateDatabaseAndState(
+ state_operations=[migrations.DeleteModel(name="WorkflowAction")],
+ database_operations=[],
+ ),
+ ]
diff --git a/src/sentry/workflow_engine/migrations/0008_detector_state.py b/src/sentry/workflow_engine/migrations/0008_detector_state.py
new file mode 100644
index 0000000000000..2b4c0a649af18
--- /dev/null
+++ b/src/sentry/workflow_engine/migrations/0008_detector_state.py
@@ -0,0 +1,63 @@
+# Generated by Django 5.1.1 on 2024-09-28 00:10
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+import sentry.db.models.fields.bounded
+import sentry.db.models.fields.foreignkey
+import sentry.workflow_engine.models.detector_state
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("workflow_engine", "0007_loosen_workflow_action_relationship"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="DetectorState",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("date_updated", models.DateTimeField(auto_now=True)),
+ ("date_added", models.DateTimeField(auto_now_add=True)),
+ ("detector_group_key", models.CharField(blank=True, max_length=200, null=True)),
+ ("active", models.BooleanField(default=False)),
+ (
+ "state",
+ models.CharField(
+ default=sentry.workflow_engine.models.detector_state.DetectorStatus["OK"],
+ max_length=200,
+ ),
+ ),
+ (
+ "detector",
+ sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ on_delete=django.db.models.deletion.CASCADE, to="workflow_engine.detector"
+ ),
+ ),
+ ],
+ options={
+ "abstract": False,
+ },
+ ),
+ ]
diff --git a/src/sentry/workflow_engine/migrations/0009_detector_type.py b/src/sentry/workflow_engine/migrations/0009_detector_type.py
new file mode 100644
index 0000000000000..25495c3435ebf
--- /dev/null
+++ b/src/sentry/workflow_engine/migrations/0009_detector_type.py
@@ -0,0 +1,37 @@
+# Generated by Django 5.1.1 on 2024-10-02 22:26
+
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+ # If you're copying this migration don't do this. It's dangerous to disable the checks unless you know what you're
+ # doing.
+ checked = False
+
+ dependencies = [
+ ("workflow_engine", "0008_detector_state"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="detector",
+ name="type",
+ field=models.CharField(max_length=200),
+ preserve_default=False,
+ ),
+ ]
diff --git a/src/sentry/workflow_engine/models/__init__.py b/src/sentry/workflow_engine/models/__init__.py
index ee343230896a5..17dc1b2770049 100644
--- a/src/sentry/workflow_engine/models/__init__.py
+++ b/src/sentry/workflow_engine/models/__init__.py
@@ -1,7 +1,27 @@
-__all__ = ["DataSource", "DataSourceDetector", "Detector", "Workflow", "WorkflowAction"]
+__all__ = [
+ "Action",
+ "DataCondition",
+ "DataConditionGroup",
+ "DataConditionGroupAction",
+ "DataPacket",
+ "DataSource",
+ "DataSourceDetector",
+ "Detector",
+ "DetectorState",
+ "DetectorEvaluationResult",
+ "DetectorWorkflow",
+ "Workflow",
+ "WorkflowDataConditionGroup",
+]
-from .data_source import DataSource
+from .action import Action
+from .data_condition import DataCondition
+from .data_condition_group import DataConditionGroup
+from .data_condition_group_action import DataConditionGroupAction
+from .data_source import DataPacket, DataSource
from .data_source_detector import DataSourceDetector
-from .detector import Detector
+from .detector import Detector, DetectorEvaluationResult
+from .detector_state import DetectorState
+from .detector_workflow import DetectorWorkflow
from .workflow import Workflow
-from .workflow_action import WorkflowAction
+from .workflow_data_condition_group import WorkflowDataConditionGroup
diff --git a/src/sentry/workflow_engine/models/action.py b/src/sentry/workflow_engine/models/action.py
new file mode 100644
index 0000000000000..62d5be7951322
--- /dev/null
+++ b/src/sentry/workflow_engine/models/action.py
@@ -0,0 +1,30 @@
+from django.db import models
+
+from sentry.backup.scopes import RelocationScope
+from sentry.db.models import DefaultFieldsModel, region_silo_model, sane_repr
+
+
+@region_silo_model
+class Action(DefaultFieldsModel):
+ """
+ Actions are actions that can be taken if the conditions of a DataConditionGroup are satisfied.
+ Examples include: detectors emitting events, sending notifications, creating an issue in the Issue Platform, etc.
+ """
+
+ __relocation_scope__ = RelocationScope.Excluded
+ __repr__ = sane_repr("workflow_id", "type")
+
+ # TODO (@saponifi3d): Don't hardcode these values
+ class Type(models.TextChoices):
+ Notification = "SendNotificationAction"
+ TriggerWorkflow = "TriggerWorkflowAction"
+
+ """
+ Required actions cannot be disabled by the user, and will not be displayed in the UI.
+ These actions will be used internally, to trigger other aspects of the system.
+ For example, creating a new issue in the Issue Platform or a detector emitting an event.
+ """
+ required = models.BooleanField(default=False)
+
+ type = models.TextField(choices=Type.choices)
+ data = models.JSONField(default=dict)
diff --git a/src/sentry/workflow_engine/models/data_condition.py b/src/sentry/workflow_engine/models/data_condition.py
new file mode 100644
index 0000000000000..f7b25c669d9e3
--- /dev/null
+++ b/src/sentry/workflow_engine/models/data_condition.py
@@ -0,0 +1,33 @@
+from django.db import models
+
+from sentry.backup.scopes import RelocationScope
+from sentry.db.models import DefaultFieldsModel, region_silo_model, sane_repr
+
+from .data_condition_group import DataConditionGroup
+
+
+@region_silo_model
+class DataCondition(DefaultFieldsModel):
+ """
+ A data condition is a way to specify a logic condition, if the condition is met, the condition_result is returned.
+ """
+
+ __relocation_scope__ = RelocationScope.Organization
+ __repr__ = sane_repr("type", "condition")
+
+ # The condition is the logic condition that needs to be met, gt, lt, eq, etc.
+ condition = models.CharField(max_length=200)
+
+ # The comparison is the value that the condition is compared to for the evaluation, this must be a primitive value
+ comparison = models.JSONField()
+
+ # The condition_result is the value that is returned if the condition is met, this must be a primitive value
+ condition_result = models.JSONField()
+
+ # The type of condition, this is used to initialize the condition classes
+ type = models.CharField(max_length=200)
+
+ condition_group = models.ForeignKey(
+ DataConditionGroup,
+ on_delete=models.CASCADE,
+ )
diff --git a/src/sentry/workflow_engine/models/data_condition_group.py b/src/sentry/workflow_engine/models/data_condition_group.py
new file mode 100644
index 0000000000000..3e29aca4b5e69
--- /dev/null
+++ b/src/sentry/workflow_engine/models/data_condition_group.py
@@ -0,0 +1,22 @@
+from django.db import models
+
+from sentry.backup.scopes import RelocationScope
+from sentry.db.models import DefaultFieldsModel, region_silo_model, sane_repr
+
+
+@region_silo_model
+class DataConditionGroup(DefaultFieldsModel):
+ """
+ A data group is a way to specify a group of conditions that must be met for a workflow action to execute
+ """
+
+ __relocation_scope__ = RelocationScope.Organization
+ __repr__ = sane_repr("logic_type")
+
+ class Type(models.TextChoices):
+ ANY = "any"
+ ALL = "all"
+ NONE = "none"
+
+ logic_type = models.CharField(max_length=200, choices=Type.choices, default=Type.ANY)
+ organization = models.ForeignKey("sentry.Organization", on_delete=models.CASCADE)
diff --git a/src/sentry/workflow_engine/models/data_condition_group_action.py b/src/sentry/workflow_engine/models/data_condition_group_action.py
new file mode 100644
index 0000000000000..baa4f3494c491
--- /dev/null
+++ b/src/sentry/workflow_engine/models/data_condition_group_action.py
@@ -0,0 +1,14 @@
+from sentry.backup.scopes import RelocationScope
+from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model
+
+
+@region_silo_model
+class DataConditionGroupAction(DefaultFieldsModel):
+ """
+ A model that represents the relationship between a data condition group and an action.
+ """
+
+ __relocation_scope__ = RelocationScope.Excluded
+
+ condition_group = FlexibleForeignKey("workflow_engine.DataConditionGroup")
+ action = FlexibleForeignKey("workflow_engine.Action")
diff --git a/src/sentry/workflow_engine/models/data_source.py b/src/sentry/workflow_engine/models/data_source.py
index e449d4c37621f..13c409865ae57 100644
--- a/src/sentry/workflow_engine/models/data_source.py
+++ b/src/sentry/workflow_engine/models/data_source.py
@@ -1,3 +1,6 @@
+import dataclasses
+from typing import Generic, TypeVar
+
from django.db import models
from sentry.backup.scopes import RelocationScope
@@ -9,6 +12,14 @@
)
from sentry.workflow_engine.models.data_source_detector import DataSourceDetector
+T = TypeVar("T")
+
+
+@dataclasses.dataclass
+class DataPacket(Generic[T]):
+ query_id: str
+ packet: T
+
@region_silo_model
class DataSource(DefaultFieldsModel):
@@ -16,6 +27,7 @@ class DataSource(DefaultFieldsModel):
class Type(models.IntegerChoices):
SNUBA_QUERY_SUBSCRIPTION = 1
+ SNUBA_QUERY = 2
organization = FlexibleForeignKey("sentry.Organization")
query_id = BoundedBigIntegerField()
diff --git a/src/sentry/workflow_engine/models/data_source_detector.py b/src/sentry/workflow_engine/models/data_source_detector.py
index b2ab7b7c75c78..7eccb46924038 100644
--- a/src/sentry/workflow_engine/models/data_source_detector.py
+++ b/src/sentry/workflow_engine/models/data_source_detector.py
@@ -6,6 +6,10 @@
@region_silo_model
class DataSourceDetector(DefaultFieldsModel):
+ """
+ Lookup table that maps a DataSource to a Detector. This is used to determine which detectors are available for a given data source.
+ """
+
__relocation_scope__ = RelocationScope.Organization
data_source = FlexibleForeignKey("workflow_engine.DataSource")
diff --git a/src/sentry/workflow_engine/models/detector.py b/src/sentry/workflow_engine/models/detector.py
index 482abf6e781c6..dcabca1c66f54 100644
--- a/src/sentry/workflow_engine/models/detector.py
+++ b/src/sentry/workflow_engine/models/detector.py
@@ -1,10 +1,24 @@
+from __future__ import annotations
+
+import abc
+import dataclasses
+import logging
+from typing import TYPE_CHECKING, Any, Generic, TypeVar
+
from django.db import models
from django.db.models import UniqueConstraint
from sentry.backup.scopes import RelocationScope
from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model
+from sentry.issues import grouptype
from sentry.models.owner_base import OwnerModel
-from sentry.workflow_engine.models.data_source_detector import DataSourceDetector
+from sentry.types.group import PriorityLevel
+from sentry.workflow_engine.models import DataPacket
+
+if TYPE_CHECKING:
+ from sentry.workflow_engine.models.detector_state import DetectorStatus
+
+logger = logging.getLogger(__name__)
@region_silo_model
@@ -13,7 +27,22 @@ class Detector(DefaultFieldsModel, OwnerModel):
organization = FlexibleForeignKey("sentry.Organization")
name = models.CharField(max_length=200)
- data_sources = models.ManyToManyField("workflow_engine.DataSource", through=DataSourceDetector)
+
+ # The data sources that the detector is watching
+ data_sources = models.ManyToManyField(
+ "workflow_engine.DataSource", through="workflow_engine.DataSourceDetector"
+ )
+
+ # The conditions that must be met for the detector to be considered 'active'
+ # This will emit an event for the workflow to process
+ workflow_condition_group = FlexibleForeignKey(
+ "workflow_engine.DataConditionGroup",
+ blank=True,
+ null=True,
+ unique=True,
+ on_delete=models.SET_NULL,
+ )
+ type = models.CharField(max_length=200)
class Meta(OwnerModel.Meta):
constraints = OwnerModel.Meta.constraints + [
@@ -22,3 +51,63 @@ class Meta(OwnerModel.Meta):
name="workflow_engine_detector_org_name",
)
]
+
+ @property
+ def detector_handler(self) -> DetectorHandler | None:
+ group_type = grouptype.registry.get_by_slug(self.type)
+ if not group_type:
+ logger.error(
+ "No registered grouptype for detector",
+ extra={
+ "group_type": str(group_type),
+ "detector_id": self.id,
+ "detector_type": self.type,
+ },
+ )
+ return None
+
+ if not group_type.detector_handler:
+ logger.error(
+ "Registered grouptype for detector has no detector_handler",
+ extra={
+ "group_type": str(group_type),
+ "detector_id": self.id,
+ "detector_type": self.type,
+ },
+ )
+ return None
+ return group_type.detector_handler(self)
+
+
+@dataclasses.dataclass(frozen=True)
+class DetectorStateData:
+ group_key: str | None
+ active: bool
+ status: DetectorStatus
+ # Stateful detectors always process data packets in order. Once we confirm that a data packet has been fully
+ # processed and all workflows have been done, this value will be used by the stateful detector to prevent
+ # reprocessing
+ dedupe_value: int
+ # Stateful detectors allow various counts to be tracked. We need to update these after we process workflows, so
+ # include the updates in the state
+ counter_updates: dict[str, int]
+
+
+@dataclasses.dataclass(frozen=True)
+class DetectorEvaluationResult:
+ is_active: bool
+ priority: PriorityLevel
+ data: Any
+ state_update_data: DetectorStateData | None = None
+
+
+T = TypeVar("T")
+
+
+class DetectorHandler(abc.ABC, Generic[T]):
+ def __init__(self, detector: Detector):
+ self.detector = detector
+
+ @abc.abstractmethod
+ def evaluate(self, data_packet: DataPacket[T]) -> list[DetectorEvaluationResult]:
+ pass
diff --git a/src/sentry/workflow_engine/models/detector_state.py b/src/sentry/workflow_engine/models/detector_state.py
new file mode 100644
index 0000000000000..dc68964d23dd1
--- /dev/null
+++ b/src/sentry/workflow_engine/models/detector_state.py
@@ -0,0 +1,27 @@
+from enum import StrEnum
+
+from django.db import models
+
+from sentry.backup.scopes import RelocationScope
+from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model
+
+
+class DetectorStatus(StrEnum):
+ OK = "ok"
+
+
+@region_silo_model
+class DetectorState(DefaultFieldsModel):
+ __relocation_scope__ = RelocationScope.Organization
+
+ detector = FlexibleForeignKey("workflow_engine.Detector")
+
+ # This key is used when a detector is using group-by
+ # allows us to link to a specific group from a single detector
+ detector_group_key = models.CharField(max_length=200, blank=True, null=True)
+
+ # If the detector is currently active
+ active = models.BooleanField(default=False)
+
+ # The current state of the detector
+ state = models.CharField(max_length=200, default=DetectorStatus.OK)
diff --git a/src/sentry/workflow_engine/models/detector_workflow.py b/src/sentry/workflow_engine/models/detector_workflow.py
new file mode 100644
index 0000000000000..2b22bcba2575d
--- /dev/null
+++ b/src/sentry/workflow_engine/models/detector_workflow.py
@@ -0,0 +1,16 @@
+from django.db import models
+
+from sentry.backup.scopes import RelocationScope
+from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model
+
+
+@region_silo_model
+class DetectorWorkflow(DefaultFieldsModel):
+ """
+ A model to represent the relationship between a detector and a workflow.
+ """
+
+ __relocation_scope__ = RelocationScope.Organization
+
+ detector = FlexibleForeignKey("workflow_engine.Detector", on_delete=models.CASCADE)
+ workflow = FlexibleForeignKey("workflow_engine.Workflow", on_delete=models.CASCADE)
diff --git a/src/sentry/workflow_engine/models/workflow.py b/src/sentry/workflow_engine/models/workflow.py
index ca7bf3b1044ca..4cb8fde6721cf 100644
--- a/src/sentry/workflow_engine/models/workflow.py
+++ b/src/sentry/workflow_engine/models/workflow.py
@@ -3,6 +3,8 @@
from sentry.backup.scopes import RelocationScope
from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model, sane_repr
+from .data_condition_group import DataConditionGroup
+
@region_silo_model
class Workflow(DefaultFieldsModel):
@@ -15,6 +17,9 @@ class Workflow(DefaultFieldsModel):
name = models.CharField(max_length=200)
organization = FlexibleForeignKey("sentry.Organization")
+ # Required as the 'when' condition for the workflow, this evalutes states emitted from the detectors
+ when_condition_group = FlexibleForeignKey(DataConditionGroup, blank=True, null=True)
+
__repr__ = sane_repr("name", "organization_id")
class Meta:
diff --git a/src/sentry/workflow_engine/models/workflow_action.py b/src/sentry/workflow_engine/models/workflow_action.py
deleted file mode 100644
index ccd9ac41a9f5a..0000000000000
--- a/src/sentry/workflow_engine/models/workflow_action.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from django.db import models
-
-from sentry.backup.scopes import RelocationScope
-from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model, sane_repr
-
-
-@region_silo_model
-class WorkflowAction(DefaultFieldsModel):
- """
- A workflow action is an action to be taken as part of a workflow.
- These will be executed in order as part of a workflow.
- """
-
- __relocation_scope__ = RelocationScope.Organization
-
- class Type(models.TextChoices):
- NOTIFICATION = "SendNotificationAction"
-
- required = models.BooleanField(default=False)
- workflow = FlexibleForeignKey("workflow_engine.Workflow")
- type = models.TextField(choices=Type.choices)
- data = models.JSONField(default=dict)
-
- __repr__ = sane_repr("workflow_id", "type")
diff --git a/src/sentry/workflow_engine/models/workflow_data_condition_group.py b/src/sentry/workflow_engine/models/workflow_data_condition_group.py
new file mode 100644
index 0000000000000..905a78d7d77e7
--- /dev/null
+++ b/src/sentry/workflow_engine/models/workflow_data_condition_group.py
@@ -0,0 +1,18 @@
+from django.db import models
+
+from sentry.backup.scopes import RelocationScope
+from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model
+
+
+@region_silo_model
+class WorkflowDataConditionGroup(DefaultFieldsModel):
+ """
+ A lookup table for the conditions associated with a workflow.
+ """
+
+ __relocation_scope__ = RelocationScope.Organization
+
+ condition_group = FlexibleForeignKey(
+ "workflow_engine.DataConditionGroup", unique=True, on_delete=models.CASCADE
+ )
+ workflow = FlexibleForeignKey("workflow_engine.Workflow", on_delete=models.CASCADE)
diff --git a/src/sentry/workflow_engine/processors/__init__.py b/src/sentry/workflow_engine/processors/__init__.py
new file mode 100644
index 0000000000000..700cd48361de4
--- /dev/null
+++ b/src/sentry/workflow_engine/processors/__init__.py
@@ -0,0 +1,7 @@
+__all__ = [
+ "process_data_sources",
+ "process_detectors",
+]
+
+from .data_source import process_data_sources
+from .detector import process_detectors
diff --git a/src/sentry/workflow_engine/processors/data_source.py b/src/sentry/workflow_engine/processors/data_source.py
new file mode 100644
index 0000000000000..00d580acba44a
--- /dev/null
+++ b/src/sentry/workflow_engine/processors/data_source.py
@@ -0,0 +1,45 @@
+import logging
+
+import sentry_sdk
+from django.db.models import Prefetch
+
+from sentry.utils import metrics
+from sentry.workflow_engine.models import DataPacket, DataSource, Detector
+
+logger = logging.getLogger("sentry.workflow_engine.process_data_source")
+
+
+def process_data_sources(
+ data_packets: list[DataPacket], query_type: DataSource.Type = DataSource.Type.SNUBA_QUERY
+) -> list[tuple[DataPacket, list[Detector]]]:
+ metrics.incr("sentry.workflow_engine.process_data_sources", tags={"query_type": query_type})
+
+ data_packet_ids = {packet.query_id for packet in data_packets}
+
+ # Fetch all data sources and associated detectors for the given data packets
+ with sentry_sdk.start_span(op="sentry.workflow_engine.process_data_sources.fetch_data_sources"):
+ data_sources = DataSource.objects.filter(
+ query_id__in=data_packet_ids, type=query_type
+ ).prefetch_related(Prefetch("detectors"))
+
+ # Build a lookup dict for query_id to detectors
+ query_id_to_detectors = {ds.query_id: list(ds.detectors.all()) for ds in data_sources}
+
+ # Create the result tuples
+ result = []
+ for packet in data_packets:
+ detectors = query_id_to_detectors.get(packet.query_id)
+
+ if detectors:
+ data_packet_tuple = (packet, detectors)
+ result.append(data_packet_tuple)
+ else:
+ logger.warning(
+ "No detectors found", extra={"query_id": packet.query_id, "query_type": query_type}
+ )
+ metrics.incr(
+ "sentry.workflow_engine.process_data_sources.no_detectors",
+ tags={"query_type": query_type},
+ )
+
+ return result
diff --git a/src/sentry/workflow_engine/processors/detector.py b/src/sentry/workflow_engine/processors/detector.py
new file mode 100644
index 0000000000000..24f02191b4bda
--- /dev/null
+++ b/src/sentry/workflow_engine/processors/detector.py
@@ -0,0 +1,38 @@
+import logging
+
+from sentry.workflow_engine.models import DataPacket, Detector, DetectorEvaluationResult
+
+logger = logging.getLogger(__name__)
+
+
+def process_detectors(
+ data_packet: DataPacket, detectors: list[Detector]
+) -> list[tuple[Detector, list[DetectorEvaluationResult]]]:
+ results = []
+
+ for detector in detectors:
+ handler = detector.detector_handler
+
+ if not handler:
+ continue
+
+ detector_results = handler.evaluate(data_packet)
+ detector_group_keys = set()
+
+ for result in detector_results:
+ if result.state_update_data:
+ if result.state_update_data.group_key in detector_group_keys:
+ # This shouldn't happen - log an error and continue on, but we should investigate this.
+ logger.error(
+ "Duplicate detector state group keys found",
+ extra={
+ "detector_id": detector.id,
+ "group_key": result.state_update_data.group_key,
+ },
+ )
+ detector_group_keys.add(result.state_update_data.group_key)
+
+ if detector_results:
+ results.append((detector, detector_results))
+
+ return results
diff --git a/src/sentry/wsgi.py b/src/sentry/wsgi.py
index f693389e3461f..98deebd9e95ca 100644
--- a/src/sentry/wsgi.py
+++ b/src/sentry/wsgi.py
@@ -2,6 +2,8 @@
import os.path
import sys
+from django.urls import reverse
+
# Add the project to the python path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir))
@@ -21,7 +23,7 @@
# trigger a warmup of the application
application(
{
- "PATH_INFO": "/_health/",
+ "PATH_INFO": reverse("sentry-warmup"),
"REQUEST_METHOD": "GET",
"SERVER_NAME": "127.0.0.1",
"SERVER_PORT": "9001",
diff --git a/static/app/actionCreators/broadcasts.tsx b/static/app/actionCreators/broadcasts.tsx
deleted file mode 100644
index 7f9e11f2554a9..0000000000000
--- a/static/app/actionCreators/broadcasts.tsx
+++ /dev/null
@@ -1,13 +0,0 @@
-import type {Client} from 'sentry/api';
-
-export function getAllBroadcasts(api: Client, orgSlug: string) {
- return api.requestPromise(`/organizations/${orgSlug}/broadcasts/`, {method: 'GET'});
-}
-
-export function markBroadcastsAsSeen(api: Client, idList: string[]) {
- return api.requestPromise('/broadcasts/', {
- method: 'PUT',
- query: {id: idList},
- data: {hasSeen: '1'},
- });
-}
diff --git a/static/app/actionCreators/events.tsx b/static/app/actionCreators/events.tsx
index 15fd1ed64cedd..a4ee3b0e04173 100644
--- a/static/app/actionCreators/events.tsx
+++ b/static/app/actionCreators/events.tsx
@@ -51,7 +51,7 @@ type Options = {
project?: Readonly;
query?: string;
queryBatching?: QueryBatching;
- queryExtras?: Record;
+ queryExtras?: Record;
referrer?: string;
start?: DateString;
team?: Readonly;
diff --git a/static/app/actionCreators/organization.tsx b/static/app/actionCreators/organization.tsx
index 2b9c8e033c64b..46882a81c2e82 100644
--- a/static/app/actionCreators/organization.tsx
+++ b/static/app/actionCreators/organization.tsx
@@ -42,7 +42,7 @@ async function fetchOrg(
}
FeatureFlagOverrides.singleton().loadOrg(org);
- FeatureObserver.singleton().observeFlags({organization: org, bufferSize: 10});
+ FeatureObserver.singleton().observeFlags({organization: org, bufferSize: 100});
OrganizationStore.onUpdate(org, {replace: true});
setActiveOrganization(org);
diff --git a/static/app/actionCreators/plugins.tsx b/static/app/actionCreators/plugins.tsx
index e53d5bd1a1fcd..51e9d16277fb8 100644
--- a/static/app/actionCreators/plugins.tsx
+++ b/static/app/actionCreators/plugins.tsx
@@ -9,7 +9,7 @@ import {t} from 'sentry/locale';
import PluginsStore from 'sentry/stores/pluginsStore';
import type {Plugin} from 'sentry/types/integrations';
-const activeFetch = {};
+const activeFetch: Record | null> = {};
// PluginsStore always exists, so api client should be independent of component lifecycle
const api = new Client();
diff --git a/static/app/actionCreators/projects.tsx b/static/app/actionCreators/projects.tsx
index 654b877e218cf..578dda9f82e0a 100644
--- a/static/app/actionCreators/projects.tsx
+++ b/static/app/actionCreators/projects.tsx
@@ -14,7 +14,7 @@ import LatestContextStore from 'sentry/stores/latestContextStore';
import ProjectsStatsStore from 'sentry/stores/projectsStatsStore';
import ProjectsStore from 'sentry/stores/projectsStore';
import type {Team} from 'sentry/types/organization';
-import type {PlatformKey, Project} from 'sentry/types/project';
+import type {Project} from 'sentry/types/project';
import type {ApiQueryKey} from 'sentry/utils/queryClient';
import {setApiQueryData, useApiQuery, useQueryClient} from 'sentry/utils/queryClient';
import useApi from 'sentry/utils/useApi';
@@ -355,29 +355,6 @@ export function removeProject({
});
}
-/**
- * Load platform documentation specific to the project. The DSN and various
- * other project specific secrets will be included in the documentation.
- *
- * @param api API Client
- * @param orgSlug Organization Slug
- * @param projectSlug Project Slug
- * @param platform Project platform.
- */
-export function loadDocs({
- api,
- orgSlug,
- projectSlug,
- platform,
-}: {
- api: Client;
- orgSlug: string;
- platform: PlatformKey | 'python-tracing' | 'node-tracing' | 'react-native-tracing';
- projectSlug: string;
-}) {
- return api.requestPromise(`/projects/${orgSlug}/${projectSlug}/docs/${platform}/`);
-}
-
/**
* Load the counts of my projects and all projects for the current user
*
diff --git a/static/app/actionCreators/tags.tsx b/static/app/actionCreators/tags.tsx
index f840e855d9145..7a4224ef8560f 100644
--- a/static/app/actionCreators/tags.tsx
+++ b/static/app/actionCreators/tags.tsx
@@ -171,10 +171,12 @@ export function fetchSpanFieldValues({
endpointParams,
projectIds,
search,
+ dataset,
}: {
api: Client;
fieldKey: string;
orgSlug: string;
+ dataset?: 'spans' | 'spansIndexed';
endpointParams?: Query;
projectIds?: string[];
search?: string;
@@ -199,6 +201,10 @@ export function fetchSpanFieldValues({
query.statsPeriod = endpointParams.statsPeriod;
}
}
+ if (dataset === 'spans') {
+ query.dataset = 'spans';
+ query.type = 'string';
+ }
return api.requestPromise(url, {
method: 'GET',
diff --git a/static/app/bootstrap/initializeApp.tsx b/static/app/bootstrap/initializeApp.tsx
index b550c7e388d3d..98a0eeb6b28a1 100644
--- a/static/app/bootstrap/initializeApp.tsx
+++ b/static/app/bootstrap/initializeApp.tsx
@@ -1,7 +1,6 @@
import './legacyTwitterBootstrap';
import './exportGlobals';
-import {routes} from 'sentry/routes';
import type {Config} from 'sentry/types/system';
import {metric} from 'sentry/utils/analytics';
@@ -13,7 +12,7 @@ import {renderOnDomReady} from './renderOnDomReady';
export function initializeApp(config: Config) {
commonInitialization(config);
- initializeSdk(config, {routes});
+ initializeSdk(config);
// Used for operational metrics to determine that the application js
// bundle was loaded by browser.
diff --git a/static/app/bootstrap/initializeSdk.tsx b/static/app/bootstrap/initializeSdk.tsx
index cf7a9e7455581..9ddba27c96741 100644
--- a/static/app/bootstrap/initializeSdk.tsx
+++ b/static/app/bootstrap/initializeSdk.tsx
@@ -1,6 +1,4 @@
-/* eslint-disable simple-import-sort/imports */
-// biome-ignore lint/nursery/noRestrictedImports: ignore warning
-import {browserHistory, createRoutes, match} from 'react-router';
+// eslint-disable-next-line simple-import-sort/imports
import * as Sentry from '@sentry/react';
import {_browserPerformanceTimeOriginMode} from '@sentry/utils';
import type {Event} from '@sentry/types';
@@ -57,31 +55,19 @@ const shouldOverrideBrowserProfiling = window?.__initialData?.user?.isSuperuser;
* having routing instrumentation in order to have a smaller bundle size.
* (e.g. `static/views/integrationPipeline`)
*/
-function getSentryIntegrations(routes?: Function) {
- const reactRouterIntegration = window.__SENTRY_USING_REACT_ROUTER_SIX
- ? Sentry.reactRouterV6BrowserTracingIntegration({
- useEffect: useEffect,
- useLocation: useLocation,
- useNavigationType: useNavigationType,
- createRoutesFromChildren: createRoutesFromChildren,
- matchRoutes: matchRoutes,
- })
- : Sentry.reactRouterV3BrowserTracingIntegration({
- history: browserHistory as any,
- routes: typeof routes === 'function' ? createRoutes(routes()) : [],
- match,
- enableLongAnimationFrame: true,
- _experiments: {
- enableInteractions: false,
- },
- });
-
+function getSentryIntegrations() {
const integrations = [
Sentry.extraErrorDataIntegration({
// 6 is arbitrary, seems like a nice number
depth: 6,
}),
- reactRouterIntegration,
+ Sentry.reactRouterV6BrowserTracingIntegration({
+ useEffect: useEffect,
+ useLocation: useLocation,
+ useNavigationType: useNavigationType,
+ createRoutesFromChildren: createRoutesFromChildren,
+ matchRoutes: matchRoutes,
+ }),
Sentry.browserProfilingIntegration(),
Sentry.thirdPartyErrorFilterIntegration({
filterKeys: ['sentry-spa'],
@@ -92,13 +78,15 @@ function getSentryIntegrations(routes?: Function) {
return integrations;
}
+// TODO(__SENTRY_USING_REACT_ROUTER_SIX): Remove opts once getsentry has had
+// this paramter removed
/**
* Initialize the Sentry SDK
*
* If `routes` is passed, we will instrument react-router. Not all
* entrypoints require this.
*/
-export function initializeSdk(config: Config, {routes}: {routes?: Function} = {}) {
+export function initializeSdk(config: Config, _otps?: any) {
const {apmSampling, sentryConfig, userIdentity} = config;
const tracesSampleRate = apmSampling ?? 0;
const extraTracePropagationTargets = SPA_DSN
@@ -119,7 +107,7 @@ export function initializeSdk(config: Config, {routes}: {routes?: Function} = {}
*/
release: SENTRY_RELEASE_VERSION ?? sentryConfig?.release,
allowUrls: SPA_DSN ? SPA_MODE_ALLOW_URLS : sentryConfig?.allowUrls,
- integrations: getSentryIntegrations(routes),
+ integrations: getSentryIntegrations(),
tracesSampleRate,
profilesSampleRate: shouldOverrideBrowserProfiling ? 1 : 0.1,
tracePropagationTargets: ['localhost', /^\//, ...extraTracePropagationTargets],
diff --git a/static/app/components/IssueStreamHeaderLabel.tsx b/static/app/components/IssueStreamHeaderLabel.tsx
new file mode 100644
index 0000000000000..d4463294a20af
--- /dev/null
+++ b/static/app/components/IssueStreamHeaderLabel.tsx
@@ -0,0 +1,24 @@
+import {css} from '@emotion/react';
+import styled from '@emotion/styled';
+
+import {space} from 'sentry/styles/space';
+
+const IssueStreamHeaderLabel = styled('div')<{breakpoint?: string}>`
+ position: relative;
+ display: inline-block;
+ margin-right: ${space(2)};
+ justify-content: space-between;
+ font-size: 13px;
+ font-weight: ${p => p.theme.fontWeightBold};
+ color: ${p => p.theme.subText};
+
+ ${p =>
+ p.breakpoint &&
+ css`
+ @media (max-width: ${p.breakpoint}) {
+ display: none;
+ }
+ `}
+`;
+
+export default IssueStreamHeaderLabel;
diff --git a/static/app/components/acl/access.spec.tsx b/static/app/components/acl/access.spec.tsx
index 999498fa8cd5a..b2dd8de38911f 100644
--- a/static/app/components/acl/access.spec.tsx
+++ b/static/app/components/acl/access.spec.tsx
@@ -134,7 +134,7 @@ describe('Access', function () {
})
);
- render({childrenMock} , {organization});
+ render({childrenMock} , {organization});
expect(childrenMock).toHaveBeenCalledWith({
hasAccess: true,
@@ -149,9 +149,14 @@ describe('Access', function () {
})
);
- render({childrenMock} , {
- organization,
- });
+ render(
+
+ {childrenMock}
+ ,
+ {
+ organization,
+ }
+ );
expect(childrenMock).toHaveBeenCalledWith({
hasAccess: true,
@@ -166,9 +171,14 @@ describe('Access', function () {
})
);
- render({childrenMock} , {
- organization,
- });
+ render(
+
+ {childrenMock}
+ ,
+ {
+ organization,
+ }
+ );
expect(childrenMock).toHaveBeenCalledWith({
hasAccess: true,
@@ -208,7 +218,7 @@ describe('Access', function () {
);
render(
-
+
The Child
,
{organization}
@@ -225,7 +235,7 @@ describe('Access', function () {
);
render(
-
+
The Child
,
{organization}
diff --git a/static/app/components/acl/access.tsx b/static/app/components/acl/access.tsx
index e451306368752..8f12d61623a1f 100644
--- a/static/app/components/acl/access.tsx
+++ b/static/app/components/acl/access.tsx
@@ -1,11 +1,9 @@
-import {Fragment} from 'react';
-
import type {Scope} from 'sentry/types/core';
import type {Organization, Team} from 'sentry/types/organization';
import type {Project} from 'sentry/types/project';
import {isRenderFunc} from 'sentry/utils/isRenderFunc';
+import useOrganization from 'sentry/utils/useOrganization';
import {useUser} from 'sentry/utils/useUser';
-import withOrganization from 'sentry/utils/withOrganization';
// Props that function children will get.
type ChildRenderProps = {
@@ -17,20 +15,23 @@ type ChildRenderProps = {
type ChildFunction = (props: ChildRenderProps) => any;
type Props = {
- organization: Organization;
/**
* List of required access levels
*/
- access?: Scope[];
+ access: Scope[];
/**
* Children can be a node or a function as child.
*/
- children?: React.ReactNode | ChildFunction;
-
+ children: React.ReactNode | ChildFunction;
/**
* Requires superuser
*/
isSuperuser?: boolean;
+ /**
+ * Evaluate access against a defined organization. If this is not provided,
+ * the access is evaluated against the currently active organization.
+ */
+ organization?: Organization;
/**
* Optional: To be used when you need to check for access to the Project
@@ -39,7 +40,7 @@ type Props = {
* An "org-member" does not have project:write but if they are "team-admin" for
* of a parent team, they will have appropriate scopes.
*/
- project?: Project | null | undefined;
+ project?: Project;
/**
* Optional: To be used when you need to check for access to the Team
*
@@ -47,7 +48,7 @@ type Props = {
* An "org-member" does not have team:write but if they are "team-admin" for
* the team, they will have appropriate scopes.
*/
- team?: Team | null | undefined;
+ team?: Team;
};
/**
@@ -55,48 +56,53 @@ type Props = {
*/
function Access({
children,
- isSuperuser = false,
- access = [],
+ organization: overrideOrganization,
+ isSuperuser,
+ access,
team,
project,
- organization,
}: Props) {
const user = useUser();
- team = team ?? undefined;
- project = project ?? undefined;
+ const implicitOrganization = useOrganization();
+ const organization = overrideOrganization || implicitOrganization;
- const hasAccess = hasEveryAccess(access, {organization, team, project});
const hasSuperuser = Boolean(user?.isSuperuser);
-
- const renderProps: ChildRenderProps = {
- hasAccess,
- hasSuperuser,
- };
-
- const render = hasAccess && (!isSuperuser || hasSuperuser);
+ const hasAccess = hasEveryAccess(access, {
+ organization,
+ team,
+ project,
+ });
if (isRenderFunc(children)) {
- return children(renderProps);
+ return children({
+ hasAccess,
+ hasSuperuser,
+ });
}
- return {render ? children : null} ;
+ const render = hasAccess && (!isSuperuser || hasSuperuser);
+ return render ? children : null;
}
export function hasEveryAccess(
access: Scope[],
- props: {organization?: Organization; project?: Project; team?: Team}
-) {
- const {organization, team, project} = props;
- const {access: orgAccess} = organization || {access: [] as Organization['access']};
- const {access: teamAccess} = team || {access: [] as Team['access']};
- const {access: projAccess} = project || {access: [] as Project['access']};
+ entities: {
+ organization?: Organization | null;
+ project?: Project | null;
+ team?: Team | null;
+ }
+): boolean {
+ const hasOrganizationAccess = entities.organization
+ ? access.every(acc => entities.organization?.access?.includes(acc))
+ : false;
+ const hasTeamAccess = entities.team
+ ? access.every(acc => entities.team?.access?.includes(acc))
+ : false;
+ const hasProjectAccess = entities.project
+ ? access.every(acc => entities.project?.access?.includes(acc))
+ : false;
- return (
- !access ||
- access.every(acc => orgAccess.includes(acc)) ||
- access.every(acc => teamAccess?.includes(acc)) ||
- access.every(acc => projAccess?.includes(acc))
- );
+ return !access.length || hasOrganizationAccess || hasTeamAccess || hasProjectAccess;
}
-export default withOrganization(Access);
+export default Access;
diff --git a/static/app/components/acl/role.spec.tsx b/static/app/components/acl/role.spec.tsx
deleted file mode 100644
index 81e251035d9ac..0000000000000
--- a/static/app/components/acl/role.spec.tsx
+++ /dev/null
@@ -1,162 +0,0 @@
-import {OrganizationFixture} from 'sentry-fixture/organization';
-import {UserFixture} from 'sentry-fixture/user';
-
-import {act, render, screen} from 'sentry-test/reactTestingLibrary';
-
-import {Role} from 'sentry/components/acl/role';
-import ConfigStore from 'sentry/stores/configStore';
-import OrganizationStore from 'sentry/stores/organizationStore';
-
-describe('Role', function () {
- const organization = OrganizationFixture({
- orgRole: 'admin',
- orgRoleList: [
- {
- id: 'member',
- name: 'Member',
- desc: '...',
- minimumTeamRole: 'contributor',
- isTeamRolesAllowed: true,
- },
- {
- id: 'admin',
- name: 'Admin',
- desc: '...',
- minimumTeamRole: 'admin',
- isTeamRolesAllowed: true,
- },
- {
- id: 'manager',
- name: 'Manager',
- desc: '...',
- minimumTeamRole: 'admin',
- isTeamRolesAllowed: true,
- },
- {
- id: 'owner',
- name: 'Owner',
- desc: '...',
- minimumTeamRole: 'admin',
- isTeamRolesAllowed: true,
- },
- ],
- });
-
- describe('as render prop', function () {
- const childrenMock = jest.fn().mockReturnValue(null);
- beforeEach(function () {
- OrganizationStore.init();
- childrenMock.mockClear();
- });
-
- it('has a sufficient role', function () {
- render({childrenMock} , {
- organization,
- });
-
- expect(childrenMock).toHaveBeenCalledWith({
- hasRole: true,
- });
- });
-
- it('has an insufficient role', function () {
- render({childrenMock} , {
- organization,
- });
-
- expect(childrenMock).toHaveBeenCalledWith({
- hasRole: false,
- });
- });
-
- it('gives access to a superuser with insufficient role', function () {
- organization.access = ['org:superuser'];
- OrganizationStore.onUpdate(organization, {replace: true});
-
- render({childrenMock} , {
- organization,
- });
-
- expect(childrenMock).toHaveBeenCalledWith({
- hasRole: true,
- });
- });
-
- it('does not give access to a made up role', function () {
- render({childrenMock} , {
- organization,
- });
-
- expect(childrenMock).toHaveBeenCalledWith({
- hasRole: false,
- });
- });
-
- it('handles no user', function () {
- const user = {...ConfigStore.get('user')};
- ConfigStore.set('user', undefined as any);
- render({childrenMock} , {
- organization,
- });
-
- expect(childrenMock).toHaveBeenCalledWith({
- hasRole: false,
- });
- act(() => ConfigStore.set('user', user));
- });
-
- it('updates if user changes', function () {
- ConfigStore.set('user', undefined as any);
- const {rerender} = render({childrenMock} , {
- organization,
- });
-
- expect(childrenMock).toHaveBeenCalledWith({
- hasRole: false,
- });
- act(() => ConfigStore.set('user', UserFixture()));
-
- rerender({childrenMock} );
- expect(childrenMock).toHaveBeenCalledWith({
- hasRole: true,
- });
- });
-
- it('handles no organization.orgRoleList', function () {
- render(
-
- {childrenMock}
- ,
- {organization}
- );
-
- expect(childrenMock).toHaveBeenCalledWith({
- hasRole: false,
- });
- });
- });
-
- describe('as React node', function () {
- it('has a sufficient role', function () {
- render(
-
- The Child
- ,
- {organization}
- );
-
- expect(screen.getByText('The Child')).toBeInTheDocument();
- });
-
- it('has an insufficient role', function () {
- render(
-
- The Child
- ,
- {organization}
- );
-
- expect(screen.queryByText('The Child')).not.toBeInTheDocument();
- });
- });
-});
diff --git a/static/app/components/acl/role.tsx b/static/app/components/acl/role.tsx
deleted file mode 100644
index 2f01646993739..0000000000000
--- a/static/app/components/acl/role.tsx
+++ /dev/null
@@ -1,77 +0,0 @@
-import {useMemo} from 'react';
-
-import ConfigStore from 'sentry/stores/configStore';
-import type {Organization} from 'sentry/types/organization';
-import type {User} from 'sentry/types/user';
-import {isActiveSuperuser} from 'sentry/utils/isActiveSuperuser';
-import {isRenderFunc} from 'sentry/utils/isRenderFunc';
-import withOrganization from 'sentry/utils/withOrganization';
-
-type RoleRenderProps = {
- hasRole: boolean;
-};
-
-type ChildrenRenderFn = (props: RoleRenderProps) => React.ReactElement | null;
-
-function checkUserRole(user: User, organization: Organization, role: RoleProps['role']) {
- if (!user) {
- return false;
- }
-
- if (isActiveSuperuser()) {
- return true;
- }
-
- if (!Array.isArray(organization.orgRoleList)) {
- return false;
- }
-
- const roleIds = organization.orgRoleList.map(r => r.id);
-
- if (!roleIds.includes(role) || !roleIds.includes(organization.orgRole ?? '')) {
- return false;
- }
-
- const requiredIndex = roleIds.indexOf(role);
- const currentIndex = roleIds.indexOf(organization.orgRole ?? '');
- return currentIndex >= requiredIndex;
-}
-
-interface RoleProps {
- /**
- * If children is a function then will be treated as a render prop and
- * passed RoleRenderProps.
- *
- * The other interface is more simple, only show `children` if user has
- * the minimum required role.
- */
- children: React.ReactElement | ChildrenRenderFn;
- /**
- * Current Organization
- */
- organization: Organization;
- /**
- * Minimum required role
- */
- role: string;
-}
-
-function Role({role, organization, children}: RoleProps): React.ReactElement | null {
- const user = ConfigStore.get('user');
-
- const hasRole = useMemo(
- () => checkUserRole(user, organization, role),
- // It seems that this returns a stable reference, but
- [organization, role, user]
- );
-
- if (isRenderFunc(children)) {
- return children({hasRole});
- }
-
- return hasRole ? children : null;
-}
-
-const withOrganizationRole = withOrganization(Role);
-
-export {withOrganizationRole as Role};
diff --git a/static/app/components/acl/useRole.spec.tsx b/static/app/components/acl/useRole.spec.tsx
new file mode 100644
index 0000000000000..076bb351bd071
--- /dev/null
+++ b/static/app/components/acl/useRole.spec.tsx
@@ -0,0 +1,79 @@
+import {OrganizationFixture} from 'sentry-fixture/organization';
+import {UserFixture} from 'sentry-fixture/user';
+
+import {renderHook} from 'sentry-test/reactTestingLibrary';
+
+import {useRole} from 'sentry/components/acl/useRole';
+import ConfigStore from 'sentry/stores/configStore';
+import OrganizationStore from 'sentry/stores/organizationStore';
+import type {Organization} from 'sentry/types/organization';
+import {OrganizationContext} from 'sentry/views/organizationContext';
+
+function createWrapper(organization: Organization) {
+ return function ({children}: {children: React.ReactNode}) {
+ return (
+
+ {children}
+
+ );
+ };
+}
+
+describe('useRole', () => {
+ const organization = OrganizationFixture({
+ // User is an admin of this test org
+ orgRole: 'admin',
+ // For these tests, attachments will require an admin role
+ attachmentsRole: 'admin',
+ debugFilesRole: 'member',
+ });
+
+ beforeEach(() => {
+ ConfigStore.set('user', UserFixture());
+ // OrganizationStore is still called directly in isActiveSuperuser()
+ OrganizationStore.init();
+ OrganizationStore.onUpdate(organization, {replace: true});
+ });
+
+ it('has a sufficient role', () => {
+ const {result} = renderHook(() => useRole({role: 'attachmentsRole'}), {
+ wrapper: createWrapper(organization),
+ });
+ expect(result.current.hasRole).toBe(true);
+ expect(result.current.roleRequired).toBe('admin');
+ });
+
+ it('has an insufficient role', () => {
+ const org = OrganizationFixture({
+ ...organization,
+ orgRole: 'member',
+ });
+ OrganizationStore.onUpdate(org, {replace: true});
+ const {result} = renderHook(() => useRole({role: 'attachmentsRole'}), {
+ wrapper: createWrapper(org),
+ });
+ expect(result.current.hasRole).toBe(false);
+ });
+
+ it('gives access to a superuser with insufficient role', () => {
+ const org = OrganizationFixture({
+ ...organization,
+ orgRole: 'member',
+ access: ['org:superuser'],
+ });
+ OrganizationStore.onUpdate(org, {replace: true});
+ const {result} = renderHook(() => useRole({role: 'attachmentsRole'}), {
+ wrapper: createWrapper(org),
+ });
+ expect(result.current.hasRole).toBe(true);
+ });
+
+ it('handles no organization.orgRoleList', () => {
+ const org = {...organization, orgRoleList: []};
+ OrganizationStore.onUpdate(org, {replace: true});
+ const {result} = renderHook(() => useRole({role: 'attachmentsRole'}), {
+ wrapper: createWrapper(org),
+ });
+ expect(result.current.hasRole).toBe(false);
+ });
+});
diff --git a/static/app/components/acl/useRole.tsx b/static/app/components/acl/useRole.tsx
new file mode 100644
index 0000000000000..029bd4ed21713
--- /dev/null
+++ b/static/app/components/acl/useRole.tsx
@@ -0,0 +1,55 @@
+import {useMemo} from 'react';
+
+import type {Organization} from 'sentry/types/organization';
+import {isActiveSuperuser} from 'sentry/utils/isActiveSuperuser';
+import useOrganization from 'sentry/utils/useOrganization';
+
+function hasOrganizationRole(organization: Organization, roleRequired: string): boolean {
+ if (!Array.isArray(organization.orgRoleList)) {
+ return false;
+ }
+
+ const roleIds = organization.orgRoleList.map(r => r.id);
+
+ const requiredIndex = roleIds.indexOf(roleRequired);
+ const currentIndex = roleIds.indexOf(organization.orgRole ?? '');
+
+ if (requiredIndex === -1 || currentIndex === -1) {
+ return false;
+ }
+
+ // If the user is a lower role than the required role, they do not have access
+ return currentIndex >= requiredIndex;
+}
+
+interface UseRoleOptions {
+ /**
+ * Minimum required role.
+ * The required role ('member', 'admin') are stored in the organization object.
+ * eg: Organization.debugFilesRole = 'member'
+ */
+ role: // Extract keys to enforce that they are available on the Organization type
+ Extract;
+}
+
+interface UseRoleResult {
+ hasRole: boolean;
+ /**
+ * The required role ('member', 'admin') from the organization object.
+ */
+ roleRequired: string;
+}
+
+export function useRole(options: UseRoleOptions): UseRoleResult {
+ const organization = useOrganization();
+
+ return useMemo((): UseRoleResult => {
+ const roleRequired = organization[options.role];
+ if (isActiveSuperuser()) {
+ return {hasRole: true, roleRequired};
+ }
+
+ const hasRole = hasOrganizationRole(organization, roleRequired);
+ return {hasRole, roleRequired};
+ }, [organization, options.role]);
+}
diff --git a/static/app/components/activity/note/header.tsx b/static/app/components/activity/note/header.tsx
index 7755bb9851bfe..36ab77cf10b34 100644
--- a/static/app/components/activity/note/header.tsx
+++ b/static/app/components/activity/note/header.tsx
@@ -5,19 +5,20 @@ import {openConfirmModal} from 'sentry/components/confirm';
import {DropdownMenu} from 'sentry/components/dropdownMenu';
import {IconEllipsis} from 'sentry/icons';
import {t} from 'sentry/locale';
-import ConfigStore from 'sentry/stores/configStore';
import {space} from 'sentry/styles/space';
import type {User} from 'sentry/types/user';
+import {useUser} from 'sentry/utils/useUser';
type Props = {
authorName: string;
onDelete: () => void;
onEdit: () => void;
+ // Naming is not great here, but this seems to be the author, aka user who wrote the note.
user?: User;
};
function NoteHeader({authorName, user, onEdit, onDelete}: Props) {
- const activeUser = ConfigStore.get('user');
+ const activeUser = useUser();
const canEdit = activeUser && (activeUser.isSuperuser || user?.id === activeUser.id);
return (
diff --git a/static/app/components/activity/note/inputWithStorage.tsx b/static/app/components/activity/note/inputWithStorage.tsx
index 167a2961ebc5c..b828e14b03ec8 100644
--- a/static/app/components/activity/note/inputWithStorage.tsx
+++ b/static/app/components/activity/note/inputWithStorage.tsx
@@ -6,6 +6,8 @@ import {NoteInput} from 'sentry/components/activity/note/input';
import type {MentionChangeEvent} from 'sentry/components/activity/note/types';
import type {NoteType} from 'sentry/types/alerts';
import localStorage from 'sentry/utils/localStorage';
+import {StreamlinedNoteInput} from 'sentry/views/issueDetails/streamline/note';
+import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils';
type InputProps = React.ComponentProps;
@@ -14,6 +16,7 @@ type Props = {
storageKey: string;
onLoad?: (data: string) => string;
onSave?: (data: string) => string;
+ source?: string;
text?: string;
} & InputProps;
@@ -54,8 +57,10 @@ function NoteInputWithStorage({
onLoad,
onSave,
text,
+ source,
...props
}: Props) {
+ const hasStreamlinedUi = useHasStreamlinedUI();
const value = useMemo(() => {
if (text) {
return text;
@@ -131,6 +136,19 @@ function NoteInputWithStorage({
);
// Make sure `this.props` does not override `onChange` and `onCreate`
+ if (hasStreamlinedUi && source === 'issue-details') {
+ return (
+
+ );
+ }
+
return (
);
diff --git a/static/app/components/activity/note/mentionStyle.tsx b/static/app/components/activity/note/mentionStyle.tsx
index 519c1a21fc312..ca9e10b928e92 100644
--- a/static/app/components/activity/note/mentionStyle.tsx
+++ b/static/app/components/activity/note/mentionStyle.tsx
@@ -5,13 +5,14 @@ import {space} from 'sentry/styles/space';
type Options = {
theme: Theme;
minHeight?: number;
+ streamlined?: boolean;
};
/**
* Note this is an object for `react-mentions` component and
* not a styled component/emotion style
*/
-export function mentionStyle({theme, minHeight}: Options) {
+export function mentionStyle({theme, minHeight, streamlined}: Options) {
const inputProps = {
fontSize: theme.fontSizeMedium,
padding: `${space(1.5)} ${space(2)}`,
@@ -21,6 +22,16 @@ export function mentionStyle({theme, minHeight}: Options) {
overflow: 'auto',
};
+ const streamlinedInputProps = {
+ fontSize: theme.fontSizeMedium,
+ padding: `${space(1)} ${space(1.5)}`,
+ outline: 0,
+ border: `1px solid ${theme.border}`,
+ borderRadius: theme.borderRadius,
+ minHeight,
+ overflow: 'auto',
+ };
+
return {
control: {
backgroundColor: `${theme.background}`,
@@ -56,8 +67,8 @@ export function mentionStyle({theme, minHeight}: Options) {
},
// Use the same props for the highliter to keep the phantom text aligned
- highlighter: inputProps,
- input: inputProps,
+ highlighter: streamlined ? streamlinedInputProps : inputProps,
+ input: streamlined ? streamlinedInputProps : inputProps,
},
suggestions: {
diff --git a/static/app/components/assigneeSelectorDropdown.spec.tsx b/static/app/components/assigneeSelectorDropdown.spec.tsx
index b9988e94b8e1e..675aef098928c 100644
--- a/static/app/components/assigneeSelectorDropdown.spec.tsx
+++ b/static/app/components/assigneeSelectorDropdown.spec.tsx
@@ -1,5 +1,4 @@
import {GroupFixture} from 'sentry-fixture/group';
-import {MemberFixture} from 'sentry-fixture/member';
import {ProjectFixture} from 'sentry-fixture/project';
import {TeamFixture} from 'sentry-fixture/team';
import {UserFixture} from 'sentry-fixture/user';
@@ -17,18 +16,25 @@ import MemberListStore from 'sentry/stores/memberListStore';
import ProjectsStore from 'sentry/stores/projectsStore';
import TeamStore from 'sentry/stores/teamStore';
import type {Group} from 'sentry/types/group';
+import type {Team} from 'sentry/types/organization';
+import type {Project} from 'sentry/types/project';
+import type {User} from 'sentry/types/user';
jest.mock('sentry/actionCreators/modal', () => ({
openInviteMembersModal: jest.fn(),
}));
describe('AssigneeSelectorDropdown', () => {
- let USER_1, USER_2, USER_3, USER_4;
- let TEAM_1, TEAM_2;
- let PROJECT_1;
- let GROUP_1;
- let GROUP_2;
- let GROUP_3;
+ let USER_1: User;
+ let USER_2: User;
+ let USER_3: User;
+ let USER_4: User;
+ let TEAM_1: Team;
+ let TEAM_2: Team;
+ let PROJECT_1: Project;
+ let GROUP_1: Group;
+ let GROUP_2: Group;
+ let GROUP_3: Group;
beforeEach(() => {
USER_1 = UserFixture({
@@ -46,7 +52,7 @@ describe('AssigneeSelectorDropdown', () => {
name: 'Epic Fail',
email: 'epicf@example.com',
});
- USER_4 = MemberFixture({
+ USER_4 = UserFixture({
id: '4',
name: 'Git Hub',
email: 'github@example.com',
diff --git a/static/app/components/assigneeSelectorDropdown.tsx b/static/app/components/assigneeSelectorDropdown.tsx
index de6f809553d84..476271fb237d5 100644
--- a/static/app/components/assigneeSelectorDropdown.tsx
+++ b/static/app/components/assigneeSelectorDropdown.tsx
@@ -19,7 +19,6 @@ import LoadingIndicator from 'sentry/components/loadingIndicator';
import {Tooltip} from 'sentry/components/tooltip';
import {IconAdd, IconUser} from 'sentry/icons';
import {t, tct, tn} from 'sentry/locale';
-import ConfigStore from 'sentry/stores/configStore';
import MemberListStore from 'sentry/stores/memberListStore';
import ProjectsStore from 'sentry/stores/projectsStore';
import {useLegacyStore} from 'sentry/stores/useLegacyStore';
@@ -29,6 +28,7 @@ import type {Group, SuggestedOwnerReason} from 'sentry/types/group';
import type {Team} from 'sentry/types/organization';
import type {User} from 'sentry/types/user';
import {buildTeamId} from 'sentry/utils';
+import {useUser} from 'sentry/utils/useUser';
const suggestedReasonTable: Record = {
suspectCommit: t('Suspect Commit'),
@@ -215,7 +215,7 @@ export default function AssigneeSelectorDropdown({
trigger,
}: AssigneeSelectorDropdownProps) {
const memberLists = useLegacyStore(MemberListStore);
- const sessionUser = ConfigStore.get('user');
+ const sessionUser = useUser();
const currentMemberList = memberList ?? memberLists?.members ?? [];
diff --git a/static/app/components/avatar/baseAvatar.tsx b/static/app/components/avatar/baseAvatar.tsx
index b564ded6bed73..0b73327b1a4fd 100644
--- a/static/app/components/avatar/baseAvatar.tsx
+++ b/static/app/components/avatar/baseAvatar.tsx
@@ -13,9 +13,8 @@ import Gravatar from './gravatar';
import type {ImageStyleProps} from './styles';
import {imageStyle} from './styles';
-type AllowedSize = (typeof ALLOWED_SIZES)[number];
+type AllowedSize = 20 | 32 | 36 | 48 | 52 | 64 | 80 | 96 | 120;
-const ALLOWED_SIZES = [20, 32, 36, 48, 52, 64, 80, 96, 120] as const;
const DEFAULT_REMOTE_SIZE = 120 satisfies AllowedSize;
interface BaseAvatarProps extends React.HTMLAttributes {
diff --git a/static/app/components/avatar/seenByList.tsx b/static/app/components/avatar/seenByList.tsx
index 75d534bd3a651..9d7e456f1653f 100644
--- a/static/app/components/avatar/seenByList.tsx
+++ b/static/app/components/avatar/seenByList.tsx
@@ -6,9 +6,9 @@ import AvatarList from 'sentry/components/avatar/avatarList';
import {Tooltip} from 'sentry/components/tooltip';
import {IconShow} from 'sentry/icons';
import {t} from 'sentry/locale';
-import ConfigStore from 'sentry/stores/configStore';
import type {AvatarUser, User} from 'sentry/types/user';
import {userDisplayName} from 'sentry/utils/formatters';
+import {useUser} from 'sentry/utils/useUser';
type Props = {
// Avatar size
@@ -35,7 +35,7 @@ function SeenByList({
iconPosition = 'left',
className,
}: Props) {
- const activeUser = ConfigStore.get('user');
+ const activeUser = useUser();
const displayUsers = seenBy.filter(user => activeUser.id !== user.id);
if (displayUsers.length === 0) {
diff --git a/static/app/components/badge/badge.tsx b/static/app/components/badge/badge.tsx
index 66d5e4a933be2..7c8ac2b082a2f 100644
--- a/static/app/components/badge/badge.tsx
+++ b/static/app/components/badge/badge.tsx
@@ -3,14 +3,14 @@ import styled from '@emotion/styled';
import {space} from 'sentry/styles/space';
-interface Props extends React.HTMLAttributes {
+export interface BadgeProps extends React.HTMLAttributes {
text?: string | number | null;
type?: keyof Theme['badge'];
}
-const Badge = styled(({children, text, ...props}: Props) => (
+const Badge = styled(({children, text, ...props}: BadgeProps) => (
{children ?? text}
-))`
+))`
display: inline-block;
height: 20px;
min-width: 20px;
diff --git a/static/app/components/badge/groupPriority.tsx b/static/app/components/badge/groupPriority.tsx
index a4eada534f3f8..1eeb3ceba2dd3 100644
--- a/static/app/components/badge/groupPriority.tsx
+++ b/static/app/components/badge/groupPriority.tsx
@@ -1,6 +1,7 @@
import {Fragment, useMemo} from 'react';
import type {Theme} from '@emotion/react';
import styled from '@emotion/styled';
+import {VisuallyHidden} from '@react-aria/visually-hidden';
import bannerStar from 'sentry-images/spot/banner-star.svg';
@@ -15,6 +16,7 @@ import HookOrDefault from 'sentry/components/hookOrDefault';
import Placeholder from 'sentry/components/placeholder';
import {Tooltip} from 'sentry/components/tooltip';
import {IconClose} from 'sentry/icons';
+import {IconCellSignal} from 'sentry/icons/iconCellSignal';
import {t, tct} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import type {Activity} from 'sentry/types/group';
@@ -34,6 +36,8 @@ type GroupPriorityDropdownProps = {
type GroupPriorityBadgeProps = {
priority: PriorityLevel;
children?: React.ReactNode;
+ showLabel?: boolean;
+ variant?: 'default' | 'signal';
};
const PRIORITY_KEY_TO_LABEL: Record = {
@@ -85,21 +89,40 @@ function useLastEditedBy({
export function makeGroupPriorityDropdownOptions({
onChange,
+ hasIssueStreamTableLayout,
}: {
+ hasIssueStreamTableLayout: boolean;
onChange: (value: PriorityLevel) => void;
}) {
return PRIORITY_OPTIONS.map(priority => ({
textValue: PRIORITY_KEY_TO_LABEL[priority],
key: priority,
- label: ,
+ label: (
+
+ ),
onAction: () => onChange(priority),
}));
}
-export function GroupPriorityBadge({priority, children}: GroupPriorityBadgeProps) {
+export function GroupPriorityBadge({
+ priority,
+ showLabel = true,
+ variant = 'default',
+ children,
+}: GroupPriorityBadgeProps) {
+ const bars =
+ priority === PriorityLevel.HIGH ? 3 : priority === PriorityLevel.MEDIUM ? 2 : 1;
+ const label = PRIORITY_KEY_TO_LABEL[priority] ?? t('Unknown');
+
return (
-
- {PRIORITY_KEY_TO_LABEL[priority] ?? t('Unknown')}
+ }
+ >
+ {showLabel ? label : {label} }
{children}
);
@@ -187,9 +210,14 @@ export function GroupPriorityDropdown({
onChange,
lastEditedBy,
}: GroupPriorityDropdownProps) {
+ const organization = useOrganization();
+ const hasIssueStreamTableLayout = organization.features.includes(
+ 'issue-stream-table-layout'
+ );
+
const options: MenuItemProps[] = useMemo(
- () => makeGroupPriorityDropdownOptions({onChange}),
- [onChange]
+ () => makeGroupPriorityDropdownOptions({onChange, hasIssueStreamTableLayout}),
+ [onChange, hasIssueStreamTableLayout]
);
return (
@@ -207,7 +235,11 @@ export function GroupPriorityDropdown({
aria-label={t('Modify issue priority')}
size="zero"
>
-
+
diff --git a/static/app/components/charts/baseChart.tsx b/static/app/components/charts/baseChart.tsx
index 16f8732ed1c86..52af47d0e8b43 100644
--- a/static/app/components/charts/baseChart.tsx
+++ b/static/app/components/charts/baseChart.tsx
@@ -704,6 +704,11 @@ const getTooltipStyles = (p: {theme: Theme}) => css`
justify-content: space-between;
align-items: baseline;
}
+ .tooltip-code-no-margin {
+ padding-left: 0;
+ margin-left: 0;
+ color: ${p.theme.subText};
+ }
.tooltip-footer {
border-top: solid 1px ${p.theme.innerBorder};
text-align: center;
diff --git a/static/app/components/charts/chartZoom.tsx b/static/app/components/charts/chartZoom.tsx
index 681539c298d29..41a3442289d39 100644
--- a/static/app/components/charts/chartZoom.tsx
+++ b/static/app/components/charts/chartZoom.tsx
@@ -22,6 +22,7 @@ import type {
} from 'sentry/types/echarts';
import type {InjectedRouter} from 'sentry/types/legacyReactRouter';
import {getUtcDateString, getUtcToLocalDateObject} from 'sentry/utils/dates';
+import withSentryRouter from 'sentry/utils/withSentryRouter';
const getDate = date =>
date ? moment.utc(date).format(moment.HTML5_FMT.DATETIME_LOCAL_SECONDS) : null;
@@ -32,16 +33,15 @@ type Period = {
start: DateString;
};
-const ZoomPropKeys = [
- 'period',
- 'xAxis',
- 'onChartReady',
- 'onDataZoom',
- 'onRestore',
- 'onFinished',
-] as const;
+type ZoomPropKeys =
+ | 'period'
+ | 'xAxis'
+ | 'onChartReady'
+ | 'onDataZoom'
+ | 'onRestore'
+ | 'onFinished';
-export interface ZoomRenderProps extends Pick {
+export interface ZoomRenderProps extends Pick {
dataZoom?: DataZoomComponentOption[];
end?: Date;
isGroupedByDate?: boolean;
@@ -398,4 +398,4 @@ class ChartZoom extends Component {
}
}
-export default ChartZoom;
+export default withSentryRouter(ChartZoom);
diff --git a/static/app/components/charts/eventsChart.tsx b/static/app/components/charts/eventsChart.tsx
index f7fdbdb5a8f08..07fd7b641dfb9 100644
--- a/static/app/components/charts/eventsChart.tsx
+++ b/static/app/components/charts/eventsChart.tsx
@@ -46,7 +46,7 @@ import {
isEquation,
} from 'sentry/utils/discover/fields';
import type {DiscoverDatasets} from 'sentry/utils/discover/types';
-import {decodeList} from 'sentry/utils/queryString';
+import {decodeList, decodeScalar} from 'sentry/utils/queryString';
import EventsRequest from './eventsRequest';
@@ -78,6 +78,7 @@ type ChartProps = {
* a list of series names that are also disableable.
*/
disableableSeries?: string[];
+ forceChartType?: string;
fromDiscover?: boolean;
height?: number;
interval?: string;
@@ -137,7 +138,7 @@ class Chart extends Component {
}
getChartComponent(): ChartComponent {
- const {showDaily, timeseriesData, yAxis, chartComponent} = this.props;
+ const {showDaily, timeseriesData, yAxis, chartComponent, forceChartType} = this.props;
if (defined(chartComponent)) {
return chartComponent;
@@ -148,7 +149,7 @@ class Chart extends Component {
}
if (timeseriesData.length > 1) {
- switch (aggregateMultiPlotType(yAxis)) {
+ switch (forceChartType || aggregateMultiPlotType(yAxis)) {
case 'line':
return LineChart;
case 'area':
@@ -541,6 +542,7 @@ class EventsChart extends Component {
// Include previous only on relative dates (defaults to relative if no start and end)
const includePrevious = !disablePrevious && !start && !end;
+ const forceChartType = decodeScalar(router.location.query.forceChartType);
const yAxisArray = decodeList(yAxis);
const yAxisSeriesNames = yAxisArray.map(name => {
let yAxisLabel = name && isEquation(name) ? getEquation(name) : name;
@@ -590,6 +592,7 @@ class EventsChart extends Component {
{isValidElement(chartHeader) && chartHeader}
{
return (
;
+ queryExtras?: Record;
/**
* A unique name for what's triggering this request, see organization_events_stats for an allowlist
*/
diff --git a/static/app/components/charts/releaseSeries.spec.tsx b/static/app/components/charts/releaseSeries.spec.tsx
index 5d047e11f4755..22151a9547547 100644
--- a/static/app/components/charts/releaseSeries.spec.tsx
+++ b/static/app/components/charts/releaseSeries.spec.tsx
@@ -1,7 +1,8 @@
+import {Fragment} from 'react';
import {OrganizationFixture} from 'sentry-fixture/organization';
import {RouterFixture} from 'sentry-fixture/routerFixture';
-import {render, waitFor} from 'sentry-test/reactTestingLibrary';
+import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary';
import type {ReleaseSeriesProps} from 'sentry/components/charts/releaseSeries';
import ReleaseSeries from 'sentry/components/charts/releaseSeries';
@@ -14,6 +15,8 @@ describe('ReleaseSeries', function () {
let releasesMock;
beforeEach(function () {
+ jest.resetAllMocks();
+
releases = [
{
version: 'sentry-android-shop@1.2.0',
@@ -218,6 +221,28 @@ describe('ReleaseSeries', function () {
await waitFor(() => expect(releasesMock).toHaveBeenCalledTimes(2));
});
+ it('shares release fetches between components with memoize enabled', async function () {
+ render(
+
+
+ {({releaseSeries}) => {
+ return releaseSeries.length > 0 ? Series 1 : null;
+ }}
+
+
+ {({releaseSeries}) => {
+ return releaseSeries.length > 0 ? Series 2 : null;
+ }}
+
+
+ );
+
+ await waitFor(() => expect(screen.getByText('Series 1')).toBeInTheDocument());
+ await waitFor(() => expect(screen.getByText('Series 2')).toBeInTheDocument());
+
+ await waitFor(() => expect(releasesMock).toHaveBeenCalledTimes(1));
+ });
+
it('generates an eCharts `markLine` series from releases', async function () {
render({renderFunc} );
diff --git a/static/app/components/charts/releaseSeries.tsx b/static/app/components/charts/releaseSeries.tsx
index 6f114b5e02d96..b126c57ee68cc 100644
--- a/static/app/components/charts/releaseSeries.tsx
+++ b/static/app/components/charts/releaseSeries.tsx
@@ -46,7 +46,7 @@ function getOrganizationReleases(
organization: Organization,
conditions: ReleaseConditions
) {
- const query = {};
+ const query: Record = {};
Object.keys(conditions).forEach(key => {
let value = conditions[key];
if (value && (key === 'start' || key === 'end')) {
@@ -64,6 +64,14 @@ function getOrganizationReleases(
}) as Promise<[ReleaseMetaBasic[], any, ResponseMeta]>;
}
+const getOrganizationReleasesMemoized = memoize(
+ getOrganizationReleases,
+ (_, __, conditions) =>
+ Object.values(conditions)
+ .map(val => JSON.stringify(val))
+ .join('-')
+);
+
export interface ReleaseSeriesProps extends WithRouterProps {
api: Client;
children: (s: State) => React.ReactNode;
@@ -130,15 +138,6 @@ class ReleaseSeries extends Component {
_isMounted: boolean = false;
- getOrganizationReleasesMemoized = memoize(
- (api: Client, organization: Organization, conditions: ReleaseConditions) =>
- getOrganizationReleases(api, organization, conditions),
- (_, __, conditions) =>
- Object.values(conditions)
- .map(val => JSON.stringify(val))
- .join('-')
- );
-
async fetchData() {
const {
api,
@@ -164,7 +163,7 @@ class ReleaseSeries extends Component {
while (hasMore) {
try {
const getReleases = memoized
- ? this.getOrganizationReleasesMemoized
+ ? getOrganizationReleasesMemoized
: getOrganizationReleases;
const [newReleases, , resp] = await getReleases(api, organization, conditions);
releases.push(...newReleases);
@@ -293,7 +292,6 @@ class ReleaseSeries extends Component {
'',
- '',
'
',
].join('');
},
diff --git a/static/app/components/charts/series/barSeries.tsx b/static/app/components/charts/series/barSeries.tsx
index 99d9ef266e676..6120073c92119 100644
--- a/static/app/components/charts/series/barSeries.tsx
+++ b/static/app/components/charts/series/barSeries.tsx
@@ -1,11 +1,15 @@
import 'echarts/lib/chart/bar';
-import type {BarSeriesOption} from 'echarts';
+import type {BarSeriesOption, LineSeriesOption} from 'echarts';
-function barSeries(props: BarSeriesOption): BarSeriesOption {
+/**
+ * The return type can be BarSeriesOption or LineSeriesOption so that we can add
+ * custom lines on top of the event bar chart in `eventGraph.tsx`.
+ */
+function barSeries(props: BarSeriesOption): BarSeriesOption | LineSeriesOption {
return {
...props,
- type: 'bar',
+ type: props.type ?? 'bar',
};
}
diff --git a/static/app/components/charts/useChartZoom.tsx b/static/app/components/charts/useChartZoom.tsx
index 34c69a483474a..99a3d5deeb5e9 100644
--- a/static/app/components/charts/useChartZoom.tsx
+++ b/static/app/components/charts/useChartZoom.tsx
@@ -1,11 +1,10 @@
-import {useCallback, useMemo, useState} from 'react';
+import {useCallback, useEffect, useMemo, useRef} from 'react';
import type {
DataZoomComponentOption,
+ ECharts,
InsideDataZoomComponentOption,
ToolboxComponentOption,
- XAXisComponentOption,
} from 'echarts';
-import moment from 'moment-timezone';
import * as qs from 'query-string';
import {updateDateTime} from 'sentry/actionCreators/pageFilters';
@@ -17,112 +16,138 @@ import type {
EChartChartReadyHandler,
EChartDataZoomHandler,
EChartFinishedHandler,
- EChartRestoreHandler,
} from 'sentry/types/echarts';
-import type {InjectedRouter} from 'sentry/types/legacyReactRouter';
-import {getUtcDateString, getUtcToLocalDateObject} from 'sentry/utils/dates';
+import {getUtcDateString} from 'sentry/utils/dates';
+import {useLocation} from 'sentry/utils/useLocation';
+import {useNavigate} from 'sentry/utils/useNavigate';
+import useRouter from 'sentry/utils/useRouter';
// TODO: replace usages of ChartZoom with useChartZoom
-const getDate = date =>
- date ? moment.utc(date).format(moment.HTML5_FMT.DATETIME_LOCAL_SECONDS) : null;
+type DateTimeUpdate = Parameters[0];
-type Period = {
- end: DateString;
- period: string | null;
- start: DateString;
-};
-
-const ZoomPropKeys = [
- 'period',
- 'xAxis',
- 'onChartReady',
- 'onDataZoom',
- 'onRestore',
- 'onFinished',
-] as const;
-
-export interface ZoomRenderProps extends Pick {
- dataZoom?: DataZoomComponentOption[];
- end?: Date;
- isGroupedByDate?: boolean;
- showTimeInTooltip?: boolean;
- start?: Date;
- toolBox?: ToolboxComponentOption;
- utc?: boolean;
+/**
+ * Our api query params expects a specific date format
+ */
+const getQueryTime = (date: DateString | undefined) =>
+ date ? getUtcDateString(date) : null;
+
+interface ZoomRenderProps {
+ dataZoom: DataZoomComponentOption[];
+ isGroupedByDate: boolean;
+ onChartReady: EChartChartReadyHandler;
+ onDataZoom: EChartDataZoomHandler;
+ onFinished: EChartFinishedHandler;
+ toolBox: ToolboxComponentOption;
}
interface Props {
children: (props: ZoomRenderProps) => React.ReactNode;
chartZoomOptions?: DataZoomComponentOption;
+ /**
+ * Disables saving changes to the current period
+ */
disabled?: boolean;
- end?: DateString;
- onChartReady?: EChartChartReadyHandler;
- onDataZoom?: EChartDataZoomHandler;
- onFinished?: EChartFinishedHandler;
- onRestore?: EChartRestoreHandler;
- onZoom?: (period: Period) => void;
- period?: string | null;
- router?: InjectedRouter;
+ onZoom?: (period: DateTimeUpdate) => void;
+ /**
+ * Use either `saveOnZoom` or `usePageDate` not both
+ * Will persist zoom state to page filters
+ */
saveOnZoom?: boolean;
showSlider?: boolean;
- start?: DateString;
+ /**
+ * Use either `saveOnZoom` or `usePageDate` not both
+ * Persists zoom state to query params without updating page filters.
+ * Sets the pageStart and pageEnd query params
+ */
usePageDate?: boolean;
- utc?: boolean | null;
- xAxis?: XAXisComponentOption;
xAxisIndex?: number | number[];
}
+/**
+ * Adds listeners to the document to allow for cancelling the zoom action
+ */
+function useChartZoomCancel() {
+ const chartInstance = useRef(null);
+ const handleKeyDown = useCallback((evt: KeyboardEvent) => {
+ if (!chartInstance.current) {
+ return;
+ }
+
+ if (evt.key === 'Escape') {
+ evt.stopPropagation();
+ // Mark the component as currently cancelling a zoom selection. This allows
+ // us to prevent "restore" handlers from running
+ // "restore" removes the current chart zoom selection
+ chartInstance.current.dispatchAction({
+ type: 'restore',
+ });
+ }
+ }, []);
+
+ const handleMouseUp = useCallback(() => {
+ document.body.removeEventListener('mouseup', handleMouseUp);
+ }, []);
+
+ const handleMouseDown = useCallback(() => {
+ // Register `mouseup` and `keydown` listeners on mouse down
+ // This ensures that there is only one live listener at a time
+ // regardless of how many charts are rendered. NOTE: It's
+ // important to set `useCapture: true` in the `"keydown"` handler
+ // otherwise the Escape will close whatever modal or panel the
+ // chart is in. Those elements register their handlers _earlier_.
+ document.body.addEventListener('mouseup', handleMouseUp);
+ document.body.addEventListener('keydown', handleKeyDown, true);
+ }, [handleKeyDown, handleMouseUp]);
+
+ const handleChartReady = useCallback(
+ chart => {
+ if (chartInstance.current) {
+ // remove listeners from previous chart if called multiple times
+ chartInstance.current.getDom()?.removeEventListener('mousedown', handleMouseDown);
+ }
+
+ chartInstance.current = chart;
+ const chartDom = chart.getDom();
+ chartDom.addEventListener('mousedown', handleMouseDown);
+ },
+ [handleMouseDown]
+ );
+
+ useEffect(() => {
+ return () => {
+ // Cleanup listeners on unmount
+ document.body.removeEventListener('mouseup', handleMouseUp);
+ document.body.removeEventListener('keydown', handleKeyDown);
+ chartInstance.current?.getDom()?.removeEventListener('mousedown', handleMouseDown);
+ };
+ }, [handleMouseDown, handleMouseUp, handleKeyDown]);
+
+ return {handleChartReady};
+}
+
/**
* This hook provides an alternative to using the `ChartZoom` component. It returns
* the props that would be passed to the `BaseChart` as zoomRenderProps.
*/
export function useChartZoom({
- period,
- start,
- end,
- utc,
- router,
onZoom,
usePageDate,
saveOnZoom,
- onChartReady,
- onRestore,
- onDataZoom,
- onFinished,
xAxisIndex,
showSlider,
chartZoomOptions,
- xAxis,
- disabled,
-}: Omit = {}) {
- const [currentPeriod, setCurrentPeriod] = useState({
- period: period!,
- start: getDate(start),
- end: getDate(end),
- });
- const [history, setHistory] = useState([]);
-
- const [zooming, setZooming] = useState<(() => void) | null>(null);
+}: Omit): ZoomRenderProps {
+ const {handleChartReady} = useChartZoomCancel();
+ const location = useLocation();
+ const navigate = useNavigate();
+ const router = useRouter();
/**
- * Save current period state from period in props to be used
- * in handling chart's zoom history state
+ * Used to store the date update function so that we can call it after the chart
+ * animation is complete
*/
- const saveCurrentPeriod = useCallback(
- (newPeriod: Period) => {
- if (disabled) {
- return;
- }
-
- setCurrentPeriod({
- period: newPeriod.period,
- start: getDate(newPeriod.start),
- end: getDate(newPeriod.end),
- });
- },
- [disabled]
- );
+ const zooming = useRef<(() => void) | null>(null);
/**
* Sets the new period due to a zoom related action
@@ -134,14 +159,9 @@ export function useChartZoom({
* Saves a callback function to be called after chart animation is completed
*/
const setPeriod = useCallback(
- (newPeriod, saveHistory = false) => {
- const startFormatted = getDate(newPeriod.start);
- const endFormatted = getDate(newPeriod.end);
-
- // Save period so that we can revert back to it when using echarts "back" navigation
- if (saveHistory) {
- setHistory(curr => [...curr, currentPeriod!]);
- }
+ (newPeriod: DateTimeUpdate) => {
+ const startFormatted = getQueryTime(newPeriod.start);
+ const endFormatted = getQueryTime(newPeriod.end);
// Callback to let parent component know zoom has changed
// This is required for some more perceived responsiveness since
@@ -151,23 +171,23 @@ export function useChartZoom({
// URL parameters are changed
onZoom?.({
period: newPeriod.period,
- start: startFormatted,
- end: endFormatted,
+ start: getQueryTime(newPeriod.start),
+ end: getQueryTime(newPeriod.end),
});
- setZooming(() => {
- if (usePageDate && router) {
+ zooming.current = () => {
+ if (usePageDate) {
const newQuery = {
- ...router.location.query,
- pageStart: newPeriod.start ? getUtcDateString(newPeriod.start) : undefined,
- pageEnd: newPeriod.end ? getUtcDateString(newPeriod.end) : undefined,
+ ...location.query,
+ pageStart: startFormatted,
+ pageEnd: endFormatted,
pageStatsPeriod: newPeriod.period ?? undefined,
};
// Only push new location if query params has changed because this will cause a heavy re-render
- if (qs.stringify(newQuery) !== qs.stringify(router.location.query)) {
- router.push({
- pathname: router.location.pathname,
+ if (qs.stringify(newQuery) !== qs.stringify(location.query)) {
+ navigate({
+ pathname: location.pathname,
query: newQuery,
});
}
@@ -175,69 +195,37 @@ export function useChartZoom({
updateDateTime(
{
period: newPeriod.period,
- start: startFormatted
- ? getUtcToLocalDateObject(startFormatted)
- : startFormatted,
- end: endFormatted ? getUtcToLocalDateObject(endFormatted) : endFormatted,
+ start: startFormatted,
+ end: endFormatted,
},
router,
{save: saveOnZoom}
);
}
-
- saveCurrentPeriod(newPeriod);
- });
+ };
},
- [currentPeriod, onZoom, router, saveCurrentPeriod, saveOnZoom, usePageDate]
+ [onZoom, navigate, location, router, saveOnZoom, usePageDate]
);
- /**
- * Enable zoom immediately instead of having to toggle to zoom
- */
- const handleChartReady = chart => {
- onChartReady?.(chart);
- };
-
- /**
- * Restores the chart to initial viewport/zoom level
- *
- * Updates URL state to reflect initial params
- */
- const handleZoomRestore = (evt, chart) => {
- if (!history.length) {
- return;
- }
-
- setPeriod(history[0]);
- setHistory([]);
-
- onRestore?.(evt, chart);
- };
-
- const handleDataZoom = (evt, chart) => {
- const model = chart.getModel();
- const {startValue, endValue} = model._payload.batch[0];
-
- // if `rangeStart` and `rangeEnd` are null, then we are going back
- if (startValue === null && endValue === null) {
- const previousPeriod = history.pop();
- setHistory(history);
-
- if (!previousPeriod) {
- return;
+ const handleDataZoom = useCallback(
+ evt => {
+ // @ts-expect-error weirdly evt.startValue and evt.endValue do not exist
+ const {startValue, endValue} = evt.batch[0] as {
+ endValue: number | null;
+ startValue: number | null;
+ };
+
+ // if `rangeStart` and `rangeEnd` are null, then we are going back
+ if (startValue && endValue) {
+ setPeriod({
+ period: null,
+ start: startValue ? getUtcDateString(startValue) : null,
+ end: endValue ? getUtcDateString(endValue) : null,
+ });
}
-
- setPeriod(previousPeriod);
- } else {
- setPeriod(
- // Add a day so we go until the end of the day (e.g. next day at midnight)
- {period: null, start: moment.utc(startValue), end: moment.utc(endValue)},
- true
- );
- }
-
- onDataZoom?.(evt, chart);
- };
+ },
+ [setPeriod]
+ );
/**
* Chart event when *any* rendering+animation finishes
@@ -246,14 +234,15 @@ export function useChartZoom({
* we can let the native zoom animation on the chart complete
* before we update URL state and re-render
*/
- const handleChartFinished = (_props, chart) => {
- if (typeof zooming === 'function') {
- zooming();
- setZooming(null);
+ const handleChartFinished = useCallback((_props, chart) => {
+ if (typeof zooming.current === 'function') {
+ zooming.current();
+ zooming.current = null;
}
// This attempts to activate the area zoom toolbox feature
- const zoom = chart._componentsViews?.find(c => c._features?.dataZoom);
+ // @ts-expect-error _componentsViews is private
+ const zoom = chart._componentsViews?.find((c: any) => c._features?.dataZoom);
if (zoom && !zoom._features.dataZoom._isZoomActive) {
// Calling dispatchAction will re-trigger handleChartFinished
chart.dispatchAction({
@@ -262,31 +251,19 @@ export function useChartZoom({
dataZoomSelectActive: true,
});
}
+ }, []);
- if (typeof onFinished === 'function') {
- onFinished(_props, chart);
- }
- };
-
- const startProp = start ? getUtcToLocalDateObject(start) : undefined;
- const endProp = end ? getUtcToLocalDateObject(end) : undefined;
-
- const dataZoomProp = useMemo(() => {
+ const dataZoomProp = useMemo(() => {
+ const zoomInside = DataZoomInside({
+ xAxisIndex,
+ ...(chartZoomOptions as InsideDataZoomComponentOption),
+ });
return showSlider
- ? [
- ...DataZoomSlider({xAxisIndex, ...chartZoomOptions}),
- ...DataZoomInside({
- xAxisIndex,
- ...(chartZoomOptions as InsideDataZoomComponentOption),
- }),
- ]
- : DataZoomInside({
- xAxisIndex,
- ...(chartZoomOptions as InsideDataZoomComponentOption),
- });
+ ? [...DataZoomSlider({xAxisIndex, ...chartZoomOptions}), ...zoomInside]
+ : zoomInside;
}, [chartZoomOptions, showSlider, xAxisIndex]);
- const toolBox = useMemo(
+ const toolBox = useMemo(
() =>
ToolBox(
{},
@@ -307,20 +284,14 @@ export function useChartZoom({
[]
);
- const renderProps = {
+ const renderProps: ZoomRenderProps = {
// Zooming only works when grouped by date
isGroupedByDate: true,
- utc: utc ?? undefined,
- start: startProp,
- end: endProp,
- xAxis,
dataZoom: dataZoomProp,
- showTimeInTooltip: true,
toolBox,
- onChartReady: handleChartReady,
onDataZoom: handleDataZoom,
onFinished: handleChartFinished,
- onRestore: handleZoomRestore,
+ onChartReady: handleChartReady,
};
return renderProps;
diff --git a/static/app/components/charts/utils.tsx b/static/app/components/charts/utils.tsx
index 73028c4a1f107..1cd65eee02086 100644
--- a/static/app/components/charts/utils.tsx
+++ b/static/app/components/charts/utils.tsx
@@ -8,7 +8,11 @@ import moment from 'moment-timezone';
import {DEFAULT_STATS_PERIOD} from 'sentry/constants';
import type {PageFilters} from 'sentry/types/core';
import type {ReactEchartsRef, Series} from 'sentry/types/echarts';
-import type {EventsStats, MultiSeriesEventsStats} from 'sentry/types/organization';
+import type {
+ EventsStats,
+ GroupedMultiSeriesEventsStats,
+ MultiSeriesEventsStats,
+} from 'sentry/types/organization';
import {defined, escape} from 'sentry/utils';
import {getFormattedDate} from 'sentry/utils/dates';
import type {TableDataWithTitle} from 'sentry/utils/discover/discoverQuery';
@@ -216,7 +220,7 @@ export function getSeriesSelection(
}
function isSingleSeriesStats(
- data: MultiSeriesEventsStats | EventsStats
+ data: MultiSeriesEventsStats | EventsStats | GroupedMultiSeriesEventsStats
): data is EventsStats {
return (
(defined(data.data) || defined(data.totals)) &&
@@ -226,7 +230,12 @@ function isSingleSeriesStats(
}
export function isMultiSeriesStats(
- data: MultiSeriesEventsStats | EventsStats | null | undefined,
+ data:
+ | MultiSeriesEventsStats
+ | EventsStats
+ | GroupedMultiSeriesEventsStats
+ | null
+ | undefined,
isTopN?: boolean
): data is MultiSeriesEventsStats {
return (
diff --git a/static/app/components/codeSnippet.tsx b/static/app/components/codeSnippet.tsx
index c30c62ef8cdeb..0100efaeb7a05 100644
--- a/static/app/components/codeSnippet.tsx
+++ b/static/app/components/codeSnippet.tsx
@@ -225,8 +225,8 @@ const Header = styled('div')<{isSolid: boolean}>`
${p =>
p.isSolid
? `
- margin: 0 ${space(0.5)};
- border-bottom: solid 1px var(--prism-highlight-accent);
+ padding: 0 ${space(0.5)};
+ border-bottom: solid 1px ${p.theme.innerBorder};
`
: `
justify-content: flex-end;
diff --git a/static/app/components/commitRow.tsx b/static/app/components/commitRow.tsx
index b2380235e869d..f71c727f35d9e 100644
--- a/static/app/components/commitRow.tsx
+++ b/static/app/components/commitRow.tsx
@@ -17,12 +17,12 @@ import Version from 'sentry/components/version';
import VersionHoverCard from 'sentry/components/versionHoverCard';
import {IconQuestion, IconWarning} from 'sentry/icons';
import {t, tct} from 'sentry/locale';
-import ConfigStore from 'sentry/stores/configStore';
import {space} from 'sentry/styles/space';
import type {Commit} from 'sentry/types/integrations';
import type {AvatarProject} from 'sentry/types/project';
import {trackAnalytics} from 'sentry/utils/analytics';
import useOrganization from 'sentry/utils/useOrganization';
+import {useUser} from 'sentry/utils/useUser';
import {Divider} from 'sentry/views/issueDetails/divider';
import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils';
@@ -49,6 +49,7 @@ function CommitRow({
onCommitClick,
project,
}: CommitRowProps) {
+ const user = useUser();
const hasStreamlinedUI = useHasStreamlinedUI();
const organization = useOrganization();
const handleInviteClick = useCallback(() => {
@@ -74,7 +75,6 @@ function CommitRow({
});
}, [commit.author, organization]);
- const user = ConfigStore.get('user');
const isUser = user?.id === commit.author?.id;
const firstRelease = commit.releases?.[0];
diff --git a/static/app/components/compactSelect/control.tsx b/static/app/components/compactSelect/control.tsx
index 8b63cb8128481..141d148d7a4f7 100644
--- a/static/app/components/compactSelect/control.tsx
+++ b/static/app/components/compactSelect/control.tsx
@@ -121,7 +121,7 @@ export interface ControlProps
/**
* Message to be displayed when all options have been filtered out (via search).
*/
- emptyMessage?: string;
+ emptyMessage?: React.ReactNode;
/**
* Whether to render a grid list rather than a list box.
*
@@ -215,6 +215,7 @@ export interface ControlProps
*/
export function Control({
// Control props
+ autoFocus,
trigger,
triggerLabel: triggerLabelProp,
triggerProps,
@@ -297,6 +298,11 @@ export function Control({
?.focus();
}
+ // Prevent form submissions on Enter key press in search box
+ if (e.key === 'Enter') {
+ e.preventDefault();
+ }
+
// Continue propagation, otherwise the overlay won't close on Esc key press
e.continuePropagation();
},
@@ -395,6 +401,20 @@ export function Control({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [menuBody, hideOptions]);
+ const wasRefAvailable = useRef(false);
+ useEffect(() => {
+ // Trigger ref is set by a setState in useOverlay, so we need to wait for it to be available
+ // We also need to make sure we only focus once
+ if (!triggerRef.current || wasRefAvailable.current) {
+ return;
+ }
+ wasRefAvailable.current = true;
+
+ if (autoFocus && !disabled) {
+ triggerRef.current.focus();
+ }
+ }, [autoFocus, disabled, triggerRef]);
+
/**
* The menu's full width, before any option has been filtered out. Used to maintain a
* constant width while the user types into the search box.
diff --git a/static/app/components/dataExport.tsx b/static/app/components/dataExport.tsx
index 6ab45276d578a..5f30338ebd5b2 100644
--- a/static/app/components/dataExport.tsx
+++ b/static/app/components/dataExport.tsx
@@ -7,6 +7,8 @@ import Feature from 'sentry/components/acl/feature';
import {Button} from 'sentry/components/button';
import {t} from 'sentry/locale';
import type {Organization} from 'sentry/types/organization';
+import useApi from 'sentry/utils/useApi';
+import useOrganization from 'sentry/utils/useOrganization';
import withApi from 'sentry/utils/withApi';
import withOrganization from 'sentry/utils/withOrganization';
@@ -30,39 +32,20 @@ interface DataExportProps {
icon?: React.ReactNode;
}
-function DataExport({
- api,
- children,
- disabled,
- organization,
+export function useDataExport({
payload,
- icon,
-}: DataExportProps): React.ReactElement {
- const unmountedRef = useRef(false);
- const [inProgress, setInProgress] = useState(false);
-
- // We clear the indicator if export props change so that the user
- // can fire another export without having to wait for the previous one to finish.
- useEffect(() => {
- if (inProgress) {
- setInProgress(false);
- }
- // We are skipping the inProgress dependency because it would have fired on each handleDataExport
- // call and would have immediately turned off the value giving users no feedback on their click action.
- // An alternative way to handle this would have probably been to key the component by payload/queryType,
- // but that seems like it can be a complex object so tracking changes could result in very brittle behavior.
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [payload.queryType, payload.queryInfo]);
-
- // Tracking unmounting of the component to prevent setState call on unmounted component
- useEffect(() => {
- return () => {
- unmountedRef.current = true;
- };
- }, []);
+ inProgressCallback,
+ unmountedRef,
+}: {
+ payload: DataExportPayload;
+ inProgressCallback?: (inProgress: boolean) => void;
+ unmountedRef?: React.RefObject;
+}) {
+ const organization = useOrganization();
+ const api = useApi();
- const handleDataExport = useCallback(() => {
- setInProgress(true);
+ return useCallback(() => {
+ inProgressCallback?.(true);
// This is a fire and forget request.
api
@@ -76,7 +59,7 @@ function DataExport({
})
.then(([_data, _, response]) => {
// If component has unmounted, don't do anything
- if (unmountedRef.current) {
+ if (unmountedRef?.current) {
return;
}
@@ -90,7 +73,7 @@ function DataExport({
})
.catch(err => {
// If component has unmounted, don't do anything
- if (unmountedRef.current) {
+ if (unmountedRef?.current) {
return;
}
const message =
@@ -100,9 +83,51 @@ function DataExport({
);
addErrorMessage(message);
- setInProgress(false);
+ inProgressCallback?.(false);
});
- }, [payload.queryInfo, payload.queryType, organization.slug, api]);
+ }, [
+ payload.queryInfo,
+ payload.queryType,
+ organization.slug,
+ api,
+ inProgressCallback,
+ unmountedRef,
+ ]);
+}
+
+function DataExport({
+ children,
+ disabled,
+ payload,
+ icon,
+}: DataExportProps): React.ReactElement {
+ const unmountedRef = useRef(false);
+ const [inProgress, setInProgress] = useState(false);
+ const handleDataExport = useDataExport({
+ payload,
+ unmountedRef,
+ inProgressCallback: setInProgress,
+ });
+
+ // We clear the indicator if export props change so that the user
+ // can fire another export without having to wait for the previous one to finish.
+ useEffect(() => {
+ if (inProgress) {
+ setInProgress(false);
+ }
+ // We are skipping the inProgress dependency because it would have fired on each handleDataExport
+ // call and would have immediately turned off the value giving users no feedback on their click action.
+ // An alternative way to handle this would have probably been to key the component by payload/queryType,
+ // but that seems like it can be a complex object so tracking changes could result in very brittle behavior.
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [payload.queryType, payload.queryInfo]);
+
+ // Tracking unmounting of the component to prevent setState call on unmounted component
+ useEffect(() => {
+ return () => {
+ unmountedRef.current = true;
+ };
+ }, []);
return (
diff --git a/static/app/components/dateTime.tsx b/static/app/components/dateTime.tsx
index 806ea26c90371..64f08aba4cf0e 100644
--- a/static/app/components/dateTime.tsx
+++ b/static/app/components/dateTime.tsx
@@ -1,7 +1,7 @@
import moment from 'moment-timezone';
-import ConfigStore from 'sentry/stores/configStore';
import {getFormat} from 'sentry/utils/dates';
+import {useUser} from 'sentry/utils/useUser';
export interface DateTimeProps extends React.HTMLAttributes {
/**
@@ -59,7 +59,7 @@ export function DateTime({
forcedTimezone,
...props
}: DateTimeProps) {
- const user = ConfigStore.get('user');
+ const user = useUser();
const options = user?.options;
const formatString =
diff --git a/static/app/components/deprecatedAssigneeSelector.spec.tsx b/static/app/components/deprecatedAssigneeSelector.spec.tsx
index e955edaddbd4e..c11f5d77679d8 100644
--- a/static/app/components/deprecatedAssigneeSelector.spec.tsx
+++ b/static/app/components/deprecatedAssigneeSelector.spec.tsx
@@ -1,5 +1,4 @@
import {GroupFixture} from 'sentry-fixture/group';
-import {MemberFixture} from 'sentry-fixture/member';
import {ProjectFixture} from 'sentry-fixture/project';
import {TeamFixture} from 'sentry-fixture/team';
import {UserFixture} from 'sentry-fixture/user';
@@ -15,19 +14,26 @@ import IndicatorStore from 'sentry/stores/indicatorStore';
import MemberListStore from 'sentry/stores/memberListStore';
import ProjectsStore from 'sentry/stores/projectsStore';
import TeamStore from 'sentry/stores/teamStore';
+import type {Group} from 'sentry/types/group';
+import type {Team} from 'sentry/types/organization';
+import type {Project} from 'sentry/types/project';
+import type {User} from 'sentry/types/user';
jest.mock('sentry/actionCreators/modal', () => ({
openInviteMembersModal: jest.fn(),
}));
describe('DeprecatedAssigneeSelector', () => {
- let assignMock;
- let assignGroup2Mock;
- let USER_1, USER_2, USER_3, USER_4;
- let TEAM_1;
- let PROJECT_1;
- let GROUP_1;
- let GROUP_2;
+ let assignMock: jest.Mock;
+ let assignGroup2Mock: jest.Mock;
+ let USER_1: User;
+ let USER_2: User;
+ let USER_3: User;
+ let USER_4: User;
+ let TEAM_1: Team;
+ let PROJECT_1: Project;
+ let GROUP_1: Group;
+ let GROUP_2: Group;
beforeEach(() => {
USER_1 = UserFixture({
@@ -45,7 +51,7 @@ describe('DeprecatedAssigneeSelector', () => {
name: 'J J',
email: 'jj@example.com',
});
- USER_4 = MemberFixture({
+ USER_4 = UserFixture({
id: '4',
name: 'Jane Doe',
email: 'janedoe@example.com',
diff --git a/static/app/components/discover/quickContextCommitRow.tsx b/static/app/components/discover/quickContextCommitRow.tsx
index 65779b6a84e2e..4584885a08ab9 100644
--- a/static/app/components/discover/quickContextCommitRow.tsx
+++ b/static/app/components/discover/quickContextCommitRow.tsx
@@ -9,11 +9,11 @@ import PanelItem from 'sentry/components/panels/panelItem';
import TextOverflow from 'sentry/components/textOverflow';
import {Tooltip} from 'sentry/components/tooltip';
import {t, tct} from 'sentry/locale';
-import ConfigStore from 'sentry/stores/configStore';
import {space} from 'sentry/styles/space';
+import {useUser} from 'sentry/utils/useUser';
function QuickContextCommitRow({commit}: CommitRowProps) {
- const user = ConfigStore.get('user');
+ const user = useUser();
const isUser = user?.id === commit.author?.id;
const hasPullRequestURL = commit.pullRequest?.externalUrl;
const commitMessage = formatCommitMessage(commit.message);
diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx
index 9675cc3e8c031..9fb4551b4cc71 100644
--- a/static/app/components/draggableTabs/draggableTabList.tsx
+++ b/static/app/components/draggableTabs/draggableTabList.tsx
@@ -29,9 +29,9 @@ import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {defined} from 'sentry/utils';
import {trackAnalytics} from 'sentry/utils/analytics';
-import {browserHistory} from 'sentry/utils/browserHistory';
import {useDimensions} from 'sentry/utils/useDimensions';
import {useDimensionsMultiple} from 'sentry/utils/useDimensionsMultiple';
+import {useNavigate} from 'sentry/utils/useNavigate';
import useOrganization from 'sentry/utils/useOrganization';
import type {DraggableTabListItemProps} from './item';
@@ -125,6 +125,7 @@ function Tabs({
state,
className,
onReorder,
+ onReorderComplete,
tabVariant,
setTabRefs,
tabs,
@@ -132,6 +133,7 @@ function Tabs({
hoveringKey,
setHoveringKey,
tempTabActive,
+ editingTabKey,
}: {
ariaProps: AriaTabListOptions;
hoveringKey: Key | 'addView' | null;
@@ -145,7 +147,9 @@ function Tabs({
tempTabActive: boolean;
className?: string;
disabled?: boolean;
+ editingTabKey?: string;
onChange?: (key: string | number) => void;
+ onReorderComplete?: () => void;
tabVariant?: BaseTabProps['variant'];
value?: string | number;
}) {
@@ -228,10 +232,14 @@ function Tabs({
dragConstraints={dragConstraints} // dragConstraints are the bounds that the tab can be dragged within
dragElastic={0} // Prevents the tab from being dragged outside of the dragConstraints (w/o this you can drag it outside but it'll spring back)
dragTransition={{bounceStiffness: 400, bounceDamping: 40}} // Recovers spring behavior thats lost when using dragElastic=0
- transition={{delay: -0.1}} // Skips the first few frames of the animation that make the tab appear to shrink before growing
+ transition={{duration: 0.1}}
layout
+ drag={item.key !== editingTabKey} // Disable dragging if the tab is being edited
onDrag={() => setIsDragging(true)}
- onDragEnd={() => setIsDragging(false)}
+ onDragEnd={() => {
+ setIsDragging(false);
+ onReorderComplete?.();
+ }}
onHoverStart={() => setHoveringKey(item.key)}
onHoverEnd={() => setHoveringKey(null)}
initial={false}
@@ -245,7 +253,12 @@ function Tabs({
variant={tabVariant}
/>
-
+
))}
@@ -258,10 +271,12 @@ function BaseDraggableTabList({
className,
outerWrapStyles,
onReorder,
+ onReorderComplete,
onAddView,
tabVariant = 'filled',
...props
}: BaseDraggableTabListProps) {
+ const navigate = useNavigate();
const [hoveringKey, setHoveringKey] = useState(null);
const {rootProps, setTabListState} = useContext(TabsContext);
const organization = useOrganization();
@@ -292,7 +307,7 @@ function BaseDraggableTabList({
organization,
});
- browserHistory.push(linkTo);
+ navigate(linkTo);
},
isDisabled: disabled,
keyboardActivation,
@@ -324,6 +339,7 @@ function BaseDraggableTabList({
state={state}
className={className}
onReorder={onReorder}
+ onReorderComplete={onReorderComplete}
tabVariant={tabVariant}
setTabRefs={setTabElements}
tabs={persistentTabs}
@@ -331,6 +347,7 @@ function BaseDraggableTabList({
hoveringKey={hoveringKey}
setHoveringKey={setHoveringKey}
tempTabActive={!!tempTab}
+ editingTabKey={props.editingTabKey}
/>
{
onReorder: (newOrder: Node[]) => void;
className?: string;
+ editingTabKey?: string;
hideBorder?: boolean;
onAddView?: React.MouseEventHandler;
+ onReorderComplete?: () => void;
outerWrapStyles?: React.CSSProperties;
showTempTab?: boolean;
tabVariant?: BaseTabProps['variant'];
diff --git a/static/app/components/eventOrGroupExtraDetails.tsx b/static/app/components/eventOrGroupExtraDetails.tsx
index 8a078df2c06ad..c4745583a7ac5 100644
--- a/static/app/components/eventOrGroupExtraDetails.tsx
+++ b/static/app/components/eventOrGroupExtraDetails.tsx
@@ -1,8 +1,11 @@
+import {Fragment} from 'react';
+import {css} from '@emotion/react';
import styled from '@emotion/styled';
+import ErrorLevel from 'sentry/components/events/errorLevel';
import EventAnnotation from 'sentry/components/events/eventAnnotation';
import GlobalSelectionLink from 'sentry/components/globalSelectionLink';
-import InboxShortId from 'sentry/components/group/inboxBadges/shortId';
+import ShortId from 'sentry/components/group/inboxBadges/shortId';
import TimesTag from 'sentry/components/group/inboxBadges/timesTag';
import UnhandledTag from 'sentry/components/group/inboxBadges/unhandledTag';
import IssueReplayCount from 'sentry/components/group/issueReplayCount';
@@ -16,6 +19,9 @@ import {space} from 'sentry/styles/space';
import type {Event} from 'sentry/types/event';
import type {Group} from 'sentry/types/group';
import type {Organization} from 'sentry/types/organization';
+import {defined} from 'sentry/utils';
+import {eventTypeHasLogLevel, getTitle} from 'sentry/utils/events';
+import useReplayCountForIssues from 'sentry/utils/replayCount/useReplayCountForIssues';
import {projectCanLinkToReplay} from 'sentry/utils/replays/projectSupportsReplay';
import withOrganization from 'sentry/utils/withOrganization';
@@ -23,9 +29,36 @@ type Props = {
data: Event | Group;
organization: Organization;
showAssignee?: boolean;
+ showLifetime?: boolean;
};
-function EventOrGroupExtraDetails({data, showAssignee, organization}: Props) {
+function Lifetime({
+ firstSeen,
+ lastSeen,
+ lifetime,
+}: {
+ firstSeen: string;
+ lastSeen: string;
+ lifetime?: Group['lifetime'];
+}) {
+ if (!lifetime && !firstSeen && !lastSeen) {
+ return ;
+ }
+
+ return (
+
+ );
+}
+
+function EventOrGroupExtraDetails({
+ data,
+ showAssignee,
+ organization,
+ showLifetime = true,
+}: Props) {
const {
id,
lastSeen,
@@ -42,76 +75,94 @@ function EventOrGroupExtraDetails({data, showAssignee, organization}: Props) {
} = data as Group;
const issuesPath = `/organizations/${organization.slug}/issues/`;
+ const {getReplayCountForIssue} = useReplayCountForIssues();
const showReplayCount =
organization.features.includes('session-replay') &&
- projectCanLinkToReplay(organization, project);
+ projectCanLinkToReplay(organization, project) &&
+ data.issueCategory &&
+ !!getReplayCountForIssue(data.id, data.issueCategory);
- return (
-
- {shortId && (
-
- )
- }
- />
- )}
- {isUnhandled && }
- {!lifetime && !firstSeen && !lastSeen ? (
-
- ) : (
- : null,
+ shortId ? (
+
+ }
+ />
+ ) : null,
+ isUnhandled ? : null,
+ showLifetime ? (
+
+ ) : null,
+ hasNewLayout && subtitle ? {subtitle} : null,
+ numComments > 0 ? (
+
+
- )}
- {/* Always display comment count on inbox */}
- {numComments > 0 && (
-
-
- {numComments}
-
- )}
- {showReplayCount && }
- {logger && (
-
-
- {logger}
-
-
- )}
- {annotations?.map((annotation, key) => (
-
- {annotation.displayName}
-
- ))}
-
- {showAssignee && assignedTo && (
- {tct('Assigned to [name]', {name: assignedTo.name})}
- )}
+ {numComments}
+
+ ) : null,
+ showReplayCount ? : null,
+ logger ? (
+
+
+ {logger}
+
+
+ ) : null,
+ ...(annotations?.map((annotation, key) => (
+
+ {annotation.displayName}
+
+ )) ?? []),
+ showAssignee && assignedTo ? (
+ {tct('Assigned to [name]', {name: assignedTo.name})}
+ ) : null,
+ ].filter(defined);
+
+ return (
+
+ {items.map((item, i) => {
+ if (!item) {
+ return null;
+ }
+
+ if (!hasNewLayout) {
+ return {item} ;
+ }
+
+ return (
+
+ {item}
+ {i < items.length - 1 ? : null}
+
+ );
+ })}
);
}
-const GroupExtra = styled('div')`
+const GroupExtra = styled('div')<{hasNewLayout: boolean}>`
display: inline-grid;
grid-auto-flow: column dense;
- gap: ${space(1.5)};
+ gap: ${p => (p.hasNewLayout ? space(0.75) : space(1.5))};
justify-content: start;
align-items: center;
color: ${p => p.theme.textColor};
@@ -121,15 +172,27 @@ const GroupExtra = styled('div')`
white-space: nowrap;
line-height: 1.2;
- a {
- color: inherit;
- }
+ ${p =>
+ p.hasNewLayout &&
+ css`
+ color: ${p.theme.subText};
+ & > a {
+ color: ${p.theme.subText};
+ }
+ `}
@media (min-width: ${p => p.theme.breakpoints.xlarge}) {
line-height: 1;
}
`;
+const Separator = styled('div')`
+ height: 10px;
+ width: 1px;
+ background-color: ${p => p.theme.innerBorder};
+ border-radius: 1px;
+`;
+
const ShadowlessProjectBadge = styled(ProjectBadge)`
* > img {
box-shadow: none;
@@ -144,17 +207,35 @@ const CommentsLink = styled(Link)`
color: ${p => p.theme.textColor};
`;
-const AnnotationNoMargin = styled(EventAnnotation)`
+const AnnotationNoMargin = styled(EventAnnotation)<{hasNewLayout: boolean}>`
margin-left: 0;
padding-left: 0;
border-left: none;
- & > a {
- color: ${p => p.theme.textColor};
- }
+
+ ${p =>
+ !p.hasNewLayout &&
+ css`
+ & > a {
+ color: ${p.theme.textColor};
+ }
+ `}
+
+ ${p =>
+ p.hasNewLayout &&
+ css`
+ & > a:hover {
+ color: ${p.theme.linkHoverColor};
+ }
+ `}
`;
const LoggerAnnotation = styled(AnnotationNoMargin)`
color: ${p => p.theme.textColor};
`;
+const Location = styled('div')`
+ font-size: ${p => p.theme.fontSizeSmall};
+ color: ${p => p.theme.subText};
+`;
+
export default withOrganization(EventOrGroupExtraDetails);
diff --git a/static/app/components/eventOrGroupHeader.tsx b/static/app/components/eventOrGroupHeader.tsx
index 8e5a88f9aa417..bf2046d4ad527 100644
--- a/static/app/components/eventOrGroupHeader.tsx
+++ b/static/app/components/eventOrGroupHeader.tsx
@@ -15,6 +15,7 @@ import type {Organization} from 'sentry/types/organization';
import {getLocation, getMessage, isTombstone} from 'sentry/utils/events';
import {useLocation} from 'sentry/utils/useLocation';
import withOrganization from 'sentry/utils/withOrganization';
+import {createIssueLink} from 'sentry/views/issueList/utils';
import EventTitleError from './eventTitleError';
@@ -46,6 +47,8 @@ function EventOrGroupHeader({
}: EventOrGroupHeaderProps) {
const location = useLocation();
+ const hasNewLayout = organization.features.includes('issue-stream-table-layout');
+
function getTitleChildren() {
const {isBookmarked, hasSeen} = data as Group;
return (
@@ -69,8 +72,7 @@ function EventOrGroupHeader({
}
function getTitle() {
- const {id, status} = data as Group;
- const {eventID: latestEventId, groupID} = data as Event;
+ const {status} = data as Group;
const commonEleProps = {
'data-test-id': status === 'resolved' ? 'resolved-issue' : null,
@@ -82,32 +84,18 @@ function EventOrGroupHeader({
);
}
- // If we have passed in a custom event ID, use it; otherwise use default
- const finalEventId = eventId ?? latestEventId;
-
return (
{getTitleChildren()}
@@ -120,13 +108,16 @@ function EventOrGroupHeader({
return (
{getTitle()}
- {eventLocation && {eventLocation} }
-
+ {eventLocation && !hasNewLayout ? {eventLocation} : null}
+ {!hasNewLayout ? (
+
+ ) : null}
);
}
@@ -140,6 +131,7 @@ const truncateStyles = css`
const Title = styled('div')`
margin-bottom: ${space(0.25)};
+ font-size: ${p => p.theme.fontSizeLarge};
& em {
font-size: ${p => p.theme.fontSizeMedium};
font-style: normal;
@@ -174,6 +166,7 @@ function Location(props) {
const StyledEventMessage = styled(EventMessage)`
margin: 0 0 5px;
gap: ${space(0.5)};
+ font-size: inherit;
`;
const IconWrapper = styled('span')`
diff --git a/static/app/components/eventOrGroupTitle.tsx b/static/app/components/eventOrGroupTitle.tsx
index 22bb487b7098b..d3a0cc4ea74b0 100644
--- a/static/app/components/eventOrGroupTitle.tsx
+++ b/static/app/components/eventOrGroupTitle.tsx
@@ -3,7 +3,8 @@ import styled from '@emotion/styled';
import type {Event} from 'sentry/types/event';
import type {BaseGroup, GroupTombstoneHelper} from 'sentry/types/group';
-import {getTitle, isTombstone} from 'sentry/utils/events';
+import {getMessage, getTitle, isTombstone} from 'sentry/utils/events';
+import useOrganization from 'sentry/utils/useOrganization';
import GroupPreviewTooltip from './groupPreviewTooltip';
@@ -20,13 +21,19 @@ function EventOrGroupTitle({
className,
query,
}: EventOrGroupTitleProps) {
+ const organization = useOrganization({allowNull: true});
const {id, groupID} = data as Event;
const {title, subtitle} = getTitle(data);
const titleLabel = title ?? '';
+ const hasNewLayout =
+ organization?.features.includes('issue-stream-table-layout') ?? false;
+
+ const secondaryTitle = hasNewLayout ? getMessage(data) : subtitle;
+
return (
-
+
{!isTombstone(data) && withStackTracePreview ? (
- {titleLabel}
+ {hasNewLayout ? (
+ {titleLabel}
+ ) : (
+ titleLabel
+ )}
) : (
titleLabel
)}
- {subtitle && (
+ {secondaryTitle && (
- {subtitle}
+ {hasNewLayout ? (
+ {secondaryTitle}
+ ) : (
+ {secondaryTitle}
+ )}
)}
@@ -69,9 +84,23 @@ const Subtitle = styled('em')`
height: 100%;
`;
-const Wrapper = styled('span')`
- font-size: ${p => p.theme.fontSizeLarge};
+const Message = styled('span')`
+ ${p => p.theme.overflowEllipsis};
+ display: inline-block;
+ height: 100%;
+ color: ${p => p.theme.textColor};
+ font-weight: ${p => p.theme.fontWeightNormal};
+`;
+
+const Title = styled('span')`
+ ${p => p.theme.overflowEllipsis};
+ display: inline-block;
+ color: ${p => p.theme.textColor};
+`;
+
+const Wrapper = styled('span')<{hasNewLayout: boolean}>`
display: inline-grid;
grid-template-columns: auto max-content 1fr max-content;
- align-items: baseline;
+
+ align-items: ${p => (p.hasNewLayout ? 'normal' : 'baseline')};
`;
diff --git a/static/app/components/events/aiSuggestedSolution/useOpenAISuggestionLocalStorage.tsx b/static/app/components/events/aiSuggestedSolution/useOpenAISuggestionLocalStorage.tsx
index 7118c8498d49f..96b7bc66486db 100644
--- a/static/app/components/events/aiSuggestedSolution/useOpenAISuggestionLocalStorage.tsx
+++ b/static/app/components/events/aiSuggestedSolution/useOpenAISuggestionLocalStorage.tsx
@@ -1,7 +1,7 @@
import {useCallback} from 'react';
-import ConfigStore from 'sentry/stores/configStore';
import {useLocalStorageState} from 'sentry/utils/useLocalStorageState';
+import {useUser} from 'sentry/utils/useUser';
type LocalState = {
individualConsent: boolean;
@@ -11,7 +11,7 @@ export function useOpenAISuggestionLocalStorage(): [
LocalState,
(newState: Partial) => void,
] {
- const user = ConfigStore.get('user');
+ const user = useUser();
const [localStorageState, setLocalStorageState] = useLocalStorageState(
`open-ai-suggestion:${user.id}`,
diff --git a/static/app/components/events/autofix/autofixBanner.tsx b/static/app/components/events/autofix/autofixBanner.tsx
index efb45193fbc74..3d1199e0b80de 100644
--- a/static/app/components/events/autofix/autofixBanner.tsx
+++ b/static/app/components/events/autofix/autofixBanner.tsx
@@ -41,7 +41,10 @@ function SuccessfulSetup({
// we don't reopen it immediately, and instead let the button handle this itself.
shouldCloseOnInteractOutside: element => {
const viewAllButton = openButtonRef.current;
- if (viewAllButton?.contains(element)) {
+ if (
+ viewAllButton?.contains(element) ||
+ document.getElementById('sentry-feedback')?.contains(element)
+ ) {
return false;
}
return true;
diff --git a/static/app/components/events/autofix/autofixChanges.spec.tsx b/static/app/components/events/autofix/autofixChanges.spec.tsx
index 26deda563c156..53c8689bc81a6 100644
--- a/static/app/components/events/autofix/autofixChanges.spec.tsx
+++ b/static/app/components/events/autofix/autofixChanges.spec.tsx
@@ -18,6 +18,7 @@ import {
describe('AutofixChanges', function () {
const defaultProps = {
groupId: '1',
+ runId: '1',
onRetry: jest.fn(),
step: AutofixStepFixture({
type: AutofixStepType.CHANGES,
diff --git a/static/app/components/events/autofix/autofixChanges.tsx b/static/app/components/events/autofix/autofixChanges.tsx
index 1bce114ece127..e804438296065 100644
--- a/static/app/components/events/autofix/autofixChanges.tsx
+++ b/static/app/components/events/autofix/autofixChanges.tsx
@@ -29,6 +29,7 @@ import useApi from 'sentry/utils/useApi';
type AutofixChangesProps = {
groupId: string;
onRetry: () => void;
+ runId: string;
step: AutofixChangesStep;
};
@@ -168,9 +169,11 @@ function PullRequestLinkOrCreateButton({
function AutofixRepoChange({
change,
groupId,
+ runId,
}: {
change: AutofixCodebaseChange;
groupId: string;
+ runId: string;
}) {
return (
@@ -181,7 +184,12 @@ function AutofixRepoChange({
-
+
);
}
@@ -193,7 +201,7 @@ const cardAnimationProps: AnimationProps = {
transition: testableTransition({duration: 0.3}),
};
-export function AutofixChanges({step, onRetry, groupId}: AutofixChangesProps) {
+export function AutofixChanges({step, onRetry, groupId, runId}: AutofixChangesProps) {
const data = useAutofixData({groupId});
if (step.status === 'ERROR' || data?.status === 'ERROR') {
@@ -242,7 +250,7 @@ export function AutofixChanges({step, onRetry, groupId}: AutofixChangesProps) {
{step.changes.map((change, i) => (
{i > 0 && }
-
+
))}
@@ -267,7 +275,7 @@ const ChangesContainer = styled('div')`
border: 1px solid ${p => p.theme.innerBorder};
border-radius: ${p => p.theme.borderRadius};
overflow: hidden;
- box-shadow: ${p => p.theme.dropShadowHeavy};
+ box-shadow: ${p => p.theme.dropShadowMedium};
padding-left: ${space(2)};
padding-right: ${space(2)};
padding-top: ${space(1)};
diff --git a/static/app/components/events/autofix/autofixDiff.spec.tsx b/static/app/components/events/autofix/autofixDiff.spec.tsx
index 080525059d5d7..2d26c5da597d7 100644
--- a/static/app/components/events/autofix/autofixDiff.spec.tsx
+++ b/static/app/components/events/autofix/autofixDiff.spec.tsx
@@ -1,15 +1,31 @@
import {AutofixDiffFilePatch} from 'sentry-fixture/autofixDiffFilePatch';
-import {render, screen, userEvent, within} from 'sentry-test/reactTestingLibrary';
+import {
+ render,
+ screen,
+ userEvent,
+ waitFor,
+ within,
+} from 'sentry-test/reactTestingLibrary';
import {textWithMarkupMatcher} from 'sentry-test/utils';
+import {addErrorMessage} from 'sentry/actionCreators/indicator';
import {AutofixDiff} from 'sentry/components/events/autofix/autofixDiff';
+jest.mock('sentry/actionCreators/indicator');
+
describe('AutofixDiff', function () {
const defaultProps = {
diff: [AutofixDiffFilePatch()],
+ groupId: '1',
+ runId: '1',
};
+ beforeEach(() => {
+ MockApiClient.clearMockResponses();
+ (addErrorMessage as jest.Mock).mockClear();
+ });
+
it('displays a modified file diff correctly', function () {
render( );
@@ -68,4 +84,71 @@ describe('AutofixDiff', function () {
await userEvent.click(screen.getByRole('button', {name: 'Toggle file diff'}));
expect(screen.getAllByTestId('line-context')).toHaveLength(6);
});
+
+ it('can edit changes', async function () {
+ render( );
+
+ await userEvent.click(screen.getByRole('button', {name: 'Edit changes'}));
+
+ expect(
+ screen.getByText('Editing src/sentry/processing/backpressure/memory.py')
+ ).toBeInTheDocument();
+
+ const textarea = screen.getByRole('textbox');
+ await userEvent.clear(textarea);
+ await userEvent.type(textarea, 'New content');
+
+ MockApiClient.addMockResponse({
+ url: '/issues/1/autofix/update/',
+ method: 'POST',
+ });
+
+ await userEvent.click(screen.getByRole('button', {name: 'Save'}));
+
+ await waitFor(() => {
+ expect(
+ screen.queryByText('Editing src/sentry/processing/backpressure/memory.py')
+ ).not.toBeInTheDocument();
+ });
+ });
+
+ it('can reject changes', async function () {
+ render( );
+
+ MockApiClient.addMockResponse({
+ url: '/issues/1/autofix/update/',
+ method: 'POST',
+ });
+
+ await userEvent.click(screen.getByRole('button', {name: 'Reject changes'}));
+
+ await waitFor(() => {
+ expect(screen.queryByTestId('line-added')).not.toBeInTheDocument();
+ expect(screen.queryByTestId('line-removed')).not.toBeInTheDocument();
+ });
+ });
+
+ it('shows error message on failed edit', async function () {
+ render( );
+
+ await userEvent.click(screen.getByRole('button', {name: 'Edit changes'}));
+
+ const textarea = screen.getByRole('textbox');
+ await userEvent.clear(textarea);
+ await userEvent.type(textarea, 'New content');
+
+ MockApiClient.addMockResponse({
+ url: '/issues/1/autofix/update/',
+ method: 'POST',
+ statusCode: 500,
+ });
+
+ await userEvent.click(screen.getByRole('button', {name: 'Save'}));
+
+ await waitFor(() => {
+ expect(addErrorMessage).toHaveBeenCalledWith(
+ 'Something went wrong when updating changes.'
+ );
+ });
+ });
});
diff --git a/static/app/components/events/autofix/autofixDiff.tsx b/static/app/components/events/autofix/autofixDiff.tsx
index 2f1dd00ac6832..cddd855b4d805 100644
--- a/static/app/components/events/autofix/autofixDiff.tsx
+++ b/static/app/components/events/autofix/autofixDiff.tsx
@@ -1,20 +1,27 @@
-import {Fragment, useMemo, useState} from 'react';
+import {Fragment, useEffect, useMemo, useRef, useState} from 'react';
import styled from '@emotion/styled';
import {type Change, diffWords} from 'diff';
+import {addErrorMessage} from 'sentry/actionCreators/indicator';
import {Button} from 'sentry/components/button';
import {
type DiffLine,
DiffLineType,
type FilePatch,
} from 'sentry/components/events/autofix/types';
+import TextArea from 'sentry/components/forms/controls/textarea';
import InteractionStateLayer from 'sentry/components/interactionStateLayer';
-import {IconChevron} from 'sentry/icons';
+import {IconChevron, IconClose, IconDelete, IconEdit} from 'sentry/icons';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
+import {useMutation} from 'sentry/utils/queryClient';
+import useApi from 'sentry/utils/useApi';
type AutofixDiffProps = {
diff: FilePatch[];
+ groupId: string;
+ runId: string;
+ repoId?: string;
};
interface DiffLineWithChanges extends DiffLine {
@@ -91,17 +98,288 @@ function HunkHeader({lines, sectionHeader}: {lines: DiffLine[]; sectionHeader: s
);
}
-function DiffHunkContent({lines, header}: {header: string; lines: DiffLine[]}) {
- const linesWithChanges = useMemo(() => {
- return addChangesToDiffLines(lines);
+function useUpdateHunk({groupId, runId}: {groupId: string; runId: string}) {
+ const api = useApi({persistInFlight: true});
+
+ return useMutation({
+ mutationFn: (params: {
+ fileName: string;
+ hunkIndex: number;
+ lines: DiffLine[];
+ repoId?: string;
+ }) => {
+ return api.requestPromise(`/issues/${groupId}/autofix/update/`, {
+ method: 'POST',
+ data: {
+ run_id: runId,
+ payload: {
+ type: 'update_code_change',
+ repo_id: params.repoId ?? null,
+ hunk_index: params.hunkIndex,
+ lines: params.lines,
+ file_path: params.fileName,
+ },
+ },
+ });
+ },
+ onError: () => {
+ addErrorMessage(t('Something went wrong when updating changes.'));
+ },
+ });
+}
+
+function DiffHunkContent({
+ groupId,
+ runId,
+ repoId,
+ hunkIndex,
+ lines,
+ header,
+ fileName,
+}: {
+ fileName: string;
+ groupId: string;
+ header: string;
+ hunkIndex: number;
+ lines: DiffLine[];
+ runId: string;
+ repoId?: string;
+}) {
+ const [linesWithChanges, setLinesWithChanges] = useState([]);
+
+ useEffect(() => {
+ setLinesWithChanges(addChangesToDiffLines(lines));
}, [lines]);
+ const [editingGroup, setEditingGroup] = useState(null);
+ const [editedContent, setEditedContent] = useState('');
+ const [editedLines, setEditedLines] = useState([]);
+ const overlayRef = useRef(null);
+
+ useEffect(() => {
+ function handleClickOutside(event: MouseEvent) {
+ if (overlayRef.current && !overlayRef.current.contains(event.target as Node)) {
+ setEditingGroup(null);
+ setEditedContent('');
+ }
+ }
+
+ document.addEventListener('mousedown', handleClickOutside);
+ return () => {
+ document.removeEventListener('mousedown', handleClickOutside);
+ };
+ }, []);
+
+ const lineGroups = useMemo(() => {
+ const groups: {end: number; start: number; type: 'change' | DiffLineType}[] = [];
+ let currentGroup: (typeof groups)[number] | null = null;
+
+ linesWithChanges.forEach((line, index) => {
+ if (line.line_type !== DiffLineType.CONTEXT) {
+ if (!currentGroup) {
+ currentGroup = {start: index, end: index, type: 'change'};
+ } else if (currentGroup.type === 'change') {
+ currentGroup.end = index;
+ } else {
+ groups.push(currentGroup);
+ currentGroup = {start: index, end: index, type: 'change'};
+ }
+ } else if (currentGroup) {
+ groups.push(currentGroup);
+ currentGroup = null;
+ }
+ });
+
+ if (currentGroup) {
+ groups.push(currentGroup);
+ }
+
+ return groups;
+ }, [linesWithChanges]);
+
+ const handleEditClick = (index: number) => {
+ const group = lineGroups.find(g => g.start === index);
+ if (group) {
+ const content = linesWithChanges
+ .slice(group.start, group.end + 1)
+ .filter(line => line.line_type === DiffLineType.ADDED)
+ .map(line => line.value)
+ .join('');
+ const splitLines = content.split('\n');
+ if (splitLines[splitLines.length - 1] === '') {
+ splitLines.pop();
+ }
+ setEditedLines(splitLines);
+ if (content === '\n') {
+ setEditedContent('');
+ } else {
+ setEditedContent(content.endsWith('\n') ? content.slice(0, -1) : content);
+ }
+ setEditingGroup(index);
+ }
+ };
+
+ const handleTextAreaChange = (e: React.ChangeEvent) => {
+ const newContent = e.target.value;
+ setEditedContent(newContent);
+ setEditedLines(newContent.split('\n'));
+ };
+
+ const updateHunk = useUpdateHunk({groupId, runId});
+ const handleSaveEdit = () => {
+ if (editingGroup === null) {
+ return;
+ }
+ const group = lineGroups.find(g => g.start === editingGroup);
+ if (!group) {
+ return;
+ }
+
+ let lastSourceLineNo = 0;
+ let lastTargetLineNo = 0;
+ let lastDiffLineNo = 0;
+
+ const updatedLines = linesWithChanges
+ .map((line, index) => {
+ if (index < group.start) {
+ lastSourceLineNo = line.source_line_no ?? lastSourceLineNo;
+ lastTargetLineNo = line.target_line_no ?? lastTargetLineNo;
+ lastDiffLineNo = line.diff_line_no ?? lastDiffLineNo;
+ }
+ if (index >= group.start && index <= group.end) {
+ if (line.line_type === DiffLineType.ADDED) {
+ return null; // Remove existing added lines
+ }
+ if (line.line_type === DiffLineType.REMOVED) {
+ lastSourceLineNo = line.source_line_no ?? lastSourceLineNo;
+ }
+ return line; // Keep other lines (removed and context) as is
+ }
+ return line;
+ })
+ .filter((line): line is DiffLine => line !== null);
+
+ // Insert new added lines
+ const newAddedLines: DiffLine[] = editedContent.split('\n').map((content, i) => {
+ lastDiffLineNo++;
+ lastTargetLineNo++;
+ return {
+ diff_line_no: lastDiffLineNo,
+ source_line_no: null,
+ target_line_no: lastTargetLineNo,
+ line_type: DiffLineType.ADDED,
+ value: content + (i === editedContent.split('\n').length - 1 ? '' : '\n'),
+ };
+ });
+
+ // Find the insertion point (after the last removed line or at the start of the group)
+ const insertionIndex = updatedLines.findIndex(
+ (line, index) => index >= group.start && line.line_type !== DiffLineType.REMOVED
+ );
+
+ updatedLines.splice(
+ insertionIndex === -1 ? group.start : insertionIndex,
+ 0,
+ ...newAddedLines
+ );
+
+ // Update diff_line_no for all lines after the insertion
+ for (let i = insertionIndex + newAddedLines.length; i < updatedLines.length; i++) {
+ updatedLines[i].diff_line_no = ++lastDiffLineNo;
+ }
+
+ updateHunk.mutate({hunkIndex, lines: updatedLines, repoId, fileName});
+ setLinesWithChanges(addChangesToDiffLines(updatedLines));
+ setEditingGroup(null);
+ setEditedContent('');
+ };
+
+ const handleCancelEdit = () => {
+ setEditingGroup(null);
+ setEditedContent('');
+ };
+
+ const rejectChanges = (index: number) => {
+ const group = lineGroups.find(g => g.start === index);
+ if (!group) {
+ return;
+ }
+
+ const updatedLines = linesWithChanges
+ .map((line, i) => {
+ if (i >= group.start && i <= group.end) {
+ if (line.line_type === DiffLineType.ADDED) {
+ return null; // Remove added lines
+ }
+ if (line.line_type === DiffLineType.REMOVED) {
+ return {...line, line_type: DiffLineType.CONTEXT}; // Convert removed lines to context
+ }
+ }
+ return line;
+ })
+ .filter((line): line is DiffLine => line !== null);
+
+ updateHunk.mutate({hunkIndex, lines: updatedLines, repoId, fileName});
+ setLinesWithChanges(addChangesToDiffLines(updatedLines));
+ };
+
+ const getStartLineNumber = (index: number, lineType: DiffLineType) => {
+ const line = linesWithChanges[index];
+ if (lineType === DiffLineType.REMOVED) {
+ return line.source_line_no;
+ }
+ if (lineType === DiffLineType.ADDED) {
+ // Find the first non-null target_line_no
+ for (let i = index; i < linesWithChanges.length; i++) {
+ if (linesWithChanges[i].target_line_no !== null) {
+ return linesWithChanges[i].target_line_no;
+ }
+ }
+ }
+ return null;
+ };
+
+ const handleClearChanges = () => {
+ setEditedContent('');
+ setEditedLines([]);
+ };
+
+ const getDeletedLineTitle = (index: number) => {
+ return t(
+ '%s deleted line%s%s',
+ linesWithChanges
+ .slice(index, lineGroups.find(g => g.start === index)?.end! + 1)
+ .filter(l => l.line_type === DiffLineType.REMOVED).length,
+ linesWithChanges
+ .slice(index, lineGroups.find(g => g.start === index)?.end)
+ .filter(l => l.line_type === DiffLineType.REMOVED).length === 1
+ ? ''
+ : 's',
+ linesWithChanges
+ .slice(index, lineGroups.find(g => g.start === index)?.end)
+ .filter(l => l.line_type === DiffLineType.REMOVED).length > 0
+ ? t(' from line %s', getStartLineNumber(index, DiffLineType.REMOVED))
+ : ''
+ );
+ };
+
+ const getNewLineTitle = (index: number) => {
+ return t(
+ '%s new line%s%s',
+ editedLines.length,
+ editedLines.length === 1 ? '' : 's',
+ editedLines.length > 0
+ ? t(' from line %s', getStartLineNumber(index, DiffLineType.ADDED))
+ : ''
+ );
+ };
+
return (
- {linesWithChanges.map(line => (
-
+ {linesWithChanges.map((line, index) => (
+
{line.source_line_no}
{line.target_line_no}
+ {lineGroups.some(group => index === group.start) && (
+
+ }
+ aria-label={t('Edit changes')}
+ title={t('Edit')}
+ onClick={() => handleEditClick(index)}
+ />
+ }
+ aria-label={t('Reject changes')}
+ title={t('Reject')}
+ onClick={() => rejectChanges(index)}
+ />
+
+ )}
+ {editingGroup === index && (
+
+ {t('Editing %s', fileName)}
+ {getDeletedLineTitle(index)}
+ {linesWithChanges
+ .slice(index, lineGroups.find(g => g.start === index)?.end! + 1)
+ .filter(l => l.line_type === DiffLineType.REMOVED).length > 0 ? (
+
+ {linesWithChanges
+ .slice(index, lineGroups.find(g => g.start === index)?.end! + 1)
+ .filter(l => l.line_type === DiffLineType.REMOVED)
+ .map((l, i) => (
+ {l.value}
+ ))}
+
+ ) : (
+ {t('No lines are being deleted.')}
+ )}
+ {getNewLineTitle(index)}
+
+
+ }
+ title={t('Clear all new lines')}
+ />
+
+
+
+ {t('Cancel')}
+
+
+ {t('Save')}
+
+
+
+ )}
))}
@@ -116,7 +459,17 @@ function DiffHunkContent({lines, header}: {header: string; lines: DiffLine[]}) {
);
}
-function FileDiff({file}: {file: FilePatch}) {
+function FileDiff({
+ file,
+ groupId,
+ runId,
+ repoId,
+}: {
+ file: FilePatch;
+ groupId: string;
+ runId: string;
+ repoId?: string;
+}) {
const [isExpanded, setIsExpanded] = useState(true);
return (
@@ -138,9 +491,18 @@ function FileDiff({file}: {file: FilePatch}) {
{isExpanded && (
- {file.hunks.map(({section_header, source_start, lines}) => {
+ {file.hunks.map(({section_header, source_start, lines}, index) => {
return (
-
+
);
})}
@@ -149,7 +511,7 @@ function FileDiff({file}: {file: FilePatch}) {
);
}
-export function AutofixDiff({diff}: AutofixDiffProps) {
+export function AutofixDiff({diff, groupId, runId, repoId}: AutofixDiffProps) {
if (!diff || !diff.length) {
return null;
}
@@ -157,7 +519,13 @@ export function AutofixDiff({diff}: AutofixDiffProps) {
return (
{diff.map(file => (
-
+
))}
);
@@ -248,7 +616,10 @@ const LineNumber = styled('div')<{lineType: DiffLineType}>`
const DiffContent = styled('div')<{lineType: DiffLineType}>`
position: relative;
padding-left: ${space(4)};
+ padding-right: ${space(4)};
white-space: pre-wrap;
+ word-break: break-all;
+ word-wrap: break-word;
${p =>
p.lineType === DiffLineType.ADDED &&
@@ -275,3 +646,101 @@ const CodeDiff = styled('span')<{added?: boolean; removed?: boolean}>`
${p => p.added && `background-color: ${p.theme.diff.added};`};
${p => p.removed && `background-color: ${p.theme.diff.removed};`};
`;
+
+const ButtonGroup = styled('div')`
+ position: absolute;
+ top: 0;
+ right: ${space(0.25)};
+ display: flex;
+ opacity: 0;
+ transition: opacity 0.1s ease-in-out;
+
+ ${DiffContent}:hover & {
+ opacity: 1;
+ }
+`;
+
+const ActionButton = styled(Button)`
+ margin-left: ${space(0.5)};
+ font-family: ${p => p.theme.text.family};
+`;
+
+const EditOverlay = styled('div')`
+ position: fixed;
+ bottom: 200px;
+ right: ${space(2)};
+ left: calc(50% + ${space(2)});
+ background: ${p => p.theme.backgroundElevated};
+ border: 1px solid ${p => p.theme.border};
+ border-radius: ${p => p.theme.borderRadius};
+ box-shadow: ${p => p.theme.dropShadowHeavy};
+ padding: ${space(2)};
+ z-index: 1;
+`;
+
+const OverlayButtonGroup = styled('div')`
+ display: flex;
+ justify-content: flex-end;
+ gap: ${space(1)};
+ margin-top: ${space(1)};
+ font-family: ${p => p.theme.text.family};
+`;
+
+const RemovedLines = styled('div')`
+ margin-bottom: ${space(1)};
+ font-family: ${p => p.theme.text.familyMono};
+ border-radius: ${p => p.theme.borderRadius};
+ overflow: hidden;
+`;
+
+const RemovedLine = styled('div')`
+ background-color: ${p => p.theme.diff.removedRow};
+ color: ${p => p.theme.textColor};
+ padding: ${space(0.25)} ${space(0.5)};
+`;
+
+const StyledTextArea = styled(TextArea)`
+ font-family: ${p => p.theme.text.familyMono};
+ font-size: ${p => p.theme.fontSizeSmall};
+ background-color: ${p => p.theme.diff.addedRow};
+ border-color: ${p => p.theme.border};
+ position: relative;
+
+ &:focus {
+ border-color: ${p => p.theme.focusBorder};
+ box-shadow: inset 0 0 0 1px ${p => p.theme.focusBorder};
+ }
+`;
+
+const ClearButton = styled(Button)`
+ position: absolute;
+ top: -${space(1)};
+ right: -${space(1)};
+ z-index: 1;
+`;
+
+const TextAreaWrapper = styled('div')`
+ position: relative;
+`;
+
+const SectionTitle = styled('p')`
+ margin: ${space(1)} 0;
+ font-size: ${p => p.theme.fontSizeMedium};
+ font-weight: bold;
+ color: ${p => p.theme.textColor};
+ font-family: ${p => p.theme.text.family};
+`;
+
+const NoChangesMessage = styled('p')`
+ margin: ${space(1)} 0;
+ color: ${p => p.theme.subText};
+ font-family: ${p => p.theme.text.family};
+`;
+
+const OverlayTitle = styled('h3')`
+ margin: 0 0 ${space(2)} 0;
+ font-size: ${p => p.theme.fontSizeMedium};
+ font-weight: bold;
+ color: ${p => p.theme.textColor};
+ font-family: ${p => p.theme.text.family};
+`;
diff --git a/static/app/components/events/autofix/autofixDrawer.spec.tsx b/static/app/components/events/autofix/autofixDrawer.spec.tsx
index 9c00a410d1e5e..d6851c67dcef7 100644
--- a/static/app/components/events/autofix/autofixDrawer.spec.tsx
+++ b/static/app/components/events/autofix/autofixDrawer.spec.tsx
@@ -34,7 +34,7 @@ describe('AutofixDrawer', () => {
expect(screen.getByRole('heading', {name: 'Autofix'})).toBeInTheDocument();
- expect(screen.getByText('Autofix is ready to start')).toBeInTheDocument();
+ expect(screen.getByText('Ready to start')).toBeInTheDocument();
const startButton = screen.getByRole('button', {name: 'Start'});
expect(startButton).toBeInTheDocument();
@@ -88,7 +88,7 @@ describe('AutofixDrawer', () => {
await userEvent.click(startOverButton);
await waitFor(() => {
- expect(screen.getByText('Autofix is ready to start')).toBeInTheDocument();
+ expect(screen.getByText('Ready to start')).toBeInTheDocument();
expect(screen.getByRole('button', {name: 'Start'})).toBeInTheDocument();
});
});
diff --git a/static/app/components/events/autofix/autofixDrawer.tsx b/static/app/components/events/autofix/autofixDrawer.tsx
index ecdb157262db8..e614146e8a36c 100644
--- a/static/app/components/events/autofix/autofixDrawer.tsx
+++ b/static/app/components/events/autofix/autofixDrawer.tsx
@@ -1,7 +1,7 @@
import {useState} from 'react';
import styled from '@emotion/styled';
-import bannerImage from 'sentry-images/spot/ai-suggestion-banner.svg';
+import bannerImage from 'sentry-images/insights/module-upsells/insights-module-upsell.svg';
import ProjectAvatar from 'sentry/components/avatar/projectAvatar';
import {Breadcrumbs as NavigationBreadcrumbs} from 'sentry/components/breadcrumbs';
@@ -34,26 +34,27 @@ function AutofixStartBox({onSend}: AutofixStartBoxProps) {
return (
- Autofix is ready to start
+
+
+
+
We'll begin by trying to figure out the root cause, analyzing the issue details
and the codebase. If you have any other helpful context on the issue before we
begin, you can share that below.
- setMessage(e.target.value)}
- placeholder={'Provide any extra context here...'}
- />
-
-
- Start
-
-
-
-
+
+ setMessage(e.target.value)}
+ placeholder={'Provide any extra context here...'}
+ />
+
+ Start
+
+
);
}
@@ -125,8 +126,13 @@ export function AutofixDrawer({group, project, event}: AutofixDrawerProps) {
);
}
+const Row = styled('div')`
+ display: flex;
+ gap: ${space(1)};
+`;
+
const IllustrationContainer = styled('div')`
- padding-top: ${space(4)};
+ padding: ${space(4)} 0 ${space(4)} 0;
`;
const Illustration = styled('img')`
@@ -137,7 +143,6 @@ const StartBox = styled('div')`
padding: ${space(2)};
display: flex;
flex-direction: column;
- justify-content: center;
height: 100%;
width: 100%;
`;
diff --git a/static/app/components/events/autofix/autofixFeedback.tsx b/static/app/components/events/autofix/autofixFeedback.tsx
index fc6ceda7c048b..1d6010fed3b06 100644
--- a/static/app/components/events/autofix/autofixFeedback.tsx
+++ b/static/app/components/events/autofix/autofixFeedback.tsx
@@ -1,29 +1,33 @@
import {useRef} from 'react';
import {Button} from 'sentry/components/button';
-import useFeedbackWidget from 'sentry/components/feedback/widget/useFeedbackWidget';
import {IconMegaphone} from 'sentry/icons/iconMegaphone';
import {t} from 'sentry/locale';
+import {useFeedbackForm} from 'sentry/utils/useFeedbackForm';
function AutofixFeedback() {
const buttonRef = useRef(null);
- const feedback = useFeedbackWidget({
- buttonRef,
- messagePlaceholder: t('How can we make Autofix better for you?'),
- optionOverrides: {
- tags: {
- ['feedback.source']: 'issue_details_ai_autofix',
- ['feedback.owner']: 'ml-ai',
- },
- },
- });
+ const openForm = useFeedbackForm();
- if (!feedback) {
+ if (!openForm) {
return null;
}
return (
- }>
+ }
+ onClick={() =>
+ openForm({
+ messagePlaceholder: t('How can we make Autofix better for you?'),
+ tags: {
+ ['feedback.source']: 'issue_details_ai_autofix',
+ ['feedback.owner']: 'ml-ai',
+ },
+ })
+ }
+ >
{t('Give Feedback')}
);
diff --git a/static/app/components/events/autofix/autofixInsightCards.spec.tsx b/static/app/components/events/autofix/autofixInsightCards.spec.tsx
index b3511f1cb3248..989f755ce0689 100644
--- a/static/app/components/events/autofix/autofixInsightCards.spec.tsx
+++ b/static/app/components/events/autofix/autofixInsightCards.spec.tsx
@@ -1,19 +1,55 @@
-import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
+import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
+import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator';
import AutofixInsightCards from 'sentry/components/events/autofix/autofixInsightCards';
+import type {AutofixInsight} from 'sentry/components/events/autofix/types';
jest.mock('sentry/utils/marked', () => ({
singleLineRenderer: jest.fn(text => text),
}));
-const sampleInsights = [
+jest.mock('sentry/actionCreators/indicator');
+
+const sampleInsights: AutofixInsight[] = [
{
- breadcrumb_context: [],
- codebase_context: [],
+ breadcrumb_context: [
+ {
+ body: 'Breadcrumb body',
+ category: 'ui',
+ level: 'info',
+ data_as_json: '{"testData": "testValue"}',
+ type: 'info',
+ },
+ ],
+ codebase_context: [
+ {
+ snippet: 'console.log("Hello, World!");',
+ repo_name: 'sample-repo',
+ file_path: 'src/index.js',
+ },
+ ],
error_message_context: ['Error message 1'],
insight: 'Sample insight 1',
justification: 'Sample justification 1',
+ stacktrace_context: [
+ {
+ code_snippet: 'function() { throw new Error("Test error"); }',
+ repo_name: 'sample-repo',
+ file_name: 'src/error.js',
+ vars_as_json: '{"testVar": "testValue"}',
+ col_no: 1,
+ line_no: 1,
+ function: 'testFunction',
+ },
+ ],
+ },
+ {
+ insight: 'User message',
+ justification: 'USER',
+ breadcrumb_context: [],
stacktrace_context: [],
+ codebase_context: [],
+ error_message_context: [],
},
];
@@ -27,6 +63,12 @@ const sampleRepos = [
},
];
+beforeEach(() => {
+ (addSuccessMessage as jest.Mock).mockClear();
+ (addErrorMessage as jest.Mock).mockClear();
+ MockApiClient.clearMockResponses();
+});
+
describe('AutofixInsightCards', () => {
const renderComponent = (props = {}) => {
return render(
@@ -35,6 +77,9 @@ describe('AutofixInsightCards', () => {
repos={sampleRepos}
hasStepAbove={false}
hasStepBelow={false}
+ groupId="1"
+ runId="1"
+ stepIndex={0}
{...props}
/>
);
@@ -43,13 +88,179 @@ describe('AutofixInsightCards', () => {
it('renders insights correctly', () => {
renderComponent();
expect(screen.getByText('Sample insight 1')).toBeInTheDocument();
+ expect(screen.getByText('User message')).toBeInTheDocument();
});
- it('expands context when clicked', async () => {
+ it('renders breadcrumb context correctly', async () => {
renderComponent();
const contextButton = screen.getByText('Context');
+ await userEvent.click(contextButton);
+ expect(screen.getByText('Breadcrumb body')).toBeInTheDocument();
+ expect(screen.getByText('info')).toBeInTheDocument();
+ });
+
+ it('renders codebase context correctly', async () => {
+ renderComponent();
+ const contextButton = screen.getByText('Context');
+ await userEvent.click(contextButton);
+ expect(screen.getByText('console.log("Hello, World!");')).toBeInTheDocument();
+ expect(screen.getByText('src/index.js')).toBeInTheDocument();
+ });
+
+ it('renders stacktrace context correctly', async () => {
+ renderComponent();
+ const contextButton = screen.getByText('Context');
+ await userEvent.click(contextButton);
+ expect(
+ screen.getByText('function() { throw new Error("Test error"); }')
+ ).toBeInTheDocument();
+ expect(screen.getByText('src/error.js')).toBeInTheDocument();
+ expect(screen.getByText('testVar')).toBeInTheDocument();
+ });
+
+ it('renders user messages differently', () => {
+ renderComponent();
+ const userMessage = screen.getByText('User message');
+ expect(userMessage.closest('div')).toHaveStyle('color: inherit');
+ });
+
+ it('renders "No insights yet" message when there are no insights', () => {
+ renderComponent({insights: []});
+ expect(
+ screen.getByText(/Autofix will share important conclusions here/)
+ ).toBeInTheDocument();
+ });
+
+ it('toggles context expansion correctly', async () => {
+ renderComponent();
+ const contextButton = screen.getByText('Context');
+
await userEvent.click(contextButton);
expect(screen.getByText('Sample justification 1')).toBeInTheDocument();
- expect(screen.getByText('`Error message 1`')).toBeInTheDocument();
+
+ await userEvent.click(contextButton);
+ expect(screen.queryByText('Sample justification 1')).not.toBeInTheDocument();
+ });
+
+ it('renders multiple insights correctly', () => {
+ const multipleInsights = [
+ ...sampleInsights,
+ {
+ insight: 'Another insight',
+ justification: 'Another justification',
+ error_message_context: ['Another error message'],
+ },
+ ];
+ renderComponent({insights: multipleInsights});
+ expect(screen.getByText('Sample insight 1')).toBeInTheDocument();
+ expect(screen.getByText('User message')).toBeInTheDocument();
+ expect(screen.getByText('Another insight')).toBeInTheDocument();
+ });
+
+ it('renders "Rethink from here" buttons', () => {
+ renderComponent();
+ const rethinkButtons = screen.getAllByRole('button', {name: 'Rethink from here'});
+ expect(rethinkButtons.length).toBeGreaterThan(0);
+ });
+
+ it('shows rethink input overlay when "Rethink from here" is clicked', async () => {
+ renderComponent();
+ const rethinkButton = screen.getByRole('button', {name: 'Rethink from here'});
+ await userEvent.click(rethinkButton);
+ expect(screen.getByPlaceholderText('Say something...')).toBeInTheDocument();
+ });
+
+ it('hides rethink input overlay when clicked outside', async () => {
+ renderComponent();
+ const rethinkButton = screen.getByRole('button', {name: 'Rethink from here'});
+ await userEvent.click(rethinkButton);
+ expect(screen.getByPlaceholderText('Say something...')).toBeInTheDocument();
+
+ await userEvent.click(document.body);
+ expect(screen.queryByPlaceholderText('Say something...')).not.toBeInTheDocument();
+ });
+
+ it('submits rethink request when form is submitted', async () => {
+ const mockApi = MockApiClient.addMockResponse({
+ url: '/issues/1/autofix/update/',
+ method: 'POST',
+ });
+
+ renderComponent();
+ const rethinkButton = screen.getByRole('button', {name: 'Rethink from here'});
+ await userEvent.click(rethinkButton);
+
+ const input = screen.getByPlaceholderText('Say something...');
+ await userEvent.type(input, 'Rethink this part');
+
+ const submitButton = screen.getByLabelText(
+ 'Restart analysis from this point in the chain'
+ );
+ await userEvent.click(submitButton);
+
+ expect(mockApi).toHaveBeenCalledWith(
+ '/issues/1/autofix/update/',
+ expect.objectContaining({
+ method: 'POST',
+ data: expect.objectContaining({
+ run_id: '1',
+ payload: expect.objectContaining({
+ type: 'restart_from_point_with_feedback',
+ message: 'Rethink this part',
+ step_index: 0,
+ retain_insight_card_index: 0,
+ }),
+ }),
+ })
+ );
+ });
+
+ it('shows success message after successful rethink submission', async () => {
+ MockApiClient.addMockResponse({
+ url: '/issues/1/autofix/update/',
+ method: 'POST',
+ });
+
+ renderComponent();
+ const rethinkButton = screen.getByRole('button', {name: 'Rethink from here'});
+ await userEvent.click(rethinkButton);
+
+ const input = screen.getByPlaceholderText('Say something...');
+ await userEvent.type(input, 'Rethink this part');
+
+ const submitButton = screen.getByLabelText(
+ 'Restart analysis from this point in the chain'
+ );
+ await userEvent.click(submitButton);
+
+ await waitFor(() => {
+ expect(addSuccessMessage).toHaveBeenCalledWith("Thanks, I'll rethink this...");
+ });
+ });
+
+ it('shows error message after failed rethink submission', async () => {
+ MockApiClient.addMockResponse({
+ url: '/issues/1/autofix/update/',
+ method: 'POST',
+ statusCode: 500,
+ });
+
+ renderComponent();
+ const rethinkButton = screen.getByRole('button', {name: 'Rethink from here'});
+ await userEvent.click(rethinkButton);
+
+ const input = screen.getByPlaceholderText('Say something...');
+ await userEvent.type(input, 'Rethink this part');
+
+ const submitButton = screen.getByLabelText(
+ 'Restart analysis from this point in the chain'
+ );
+ await userEvent.click(submitButton);
+
+ await waitFor(() => {
+ expect(addErrorMessage).toHaveBeenCalledWith(
+ 'Something went wrong when sending Autofix your message.'
+ );
+ });
});
});
diff --git a/static/app/components/events/autofix/autofixInsightCards.tsx b/static/app/components/events/autofix/autofixInsightCards.tsx
index 65d191d17a223..3e5b3e213b2c7 100644
--- a/static/app/components/events/autofix/autofixInsightCards.tsx
+++ b/static/app/components/events/autofix/autofixInsightCards.tsx
@@ -1,9 +1,10 @@
-import {useState} from 'react';
+import {useEffect, useRef, useState} from 'react';
import styled from '@emotion/styled';
import {AnimatePresence, type AnimationProps, motion} from 'framer-motion';
-import bannerImage from 'sentry-images/spot/ai-suggestion-banner.svg';
+import bannerImage from 'sentry-images/insights/module-upsells/insights-module-upsell.svg';
+import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator';
import {Button} from 'sentry/components/button';
import {
replaceHeadersWithBold,
@@ -21,13 +22,25 @@ import {
getBreadcrumbColorConfig,
getBreadcrumbTitle,
} from 'sentry/components/events/breadcrumbs/utils';
+import Input from 'sentry/components/input';
import StructuredEventData from 'sentry/components/structuredEventData';
import Timeline from 'sentry/components/timeline';
-import {IconArrow, IconChevron, IconCode, IconFire} from 'sentry/icons';
+import {
+ IconArrow,
+ IconChevron,
+ IconCode,
+ IconEdit,
+ IconFire,
+ IconRefresh,
+ IconUser,
+} from 'sentry/icons';
+import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {BreadcrumbLevelType, BreadcrumbType} from 'sentry/types/breadcrumbs';
import {singleLineRenderer} from 'sentry/utils/marked';
+import {useMutation} from 'sentry/utils/queryClient';
import testableTransition from 'sentry/utils/testableTransition';
+import useApi from 'sentry/utils/useApi';
interface AutofixBreadcrumbSnippetProps {
breadcrumb: BreadcrumbContext;
@@ -70,9 +83,13 @@ function AutofixBreadcrumbSnippet({breadcrumb}: AutofixBreadcrumbSnippetProps) {
export function ExpandableInsightContext({
children,
title,
+ icon,
+ rounded,
}: {
children: React.ReactNode;
title: string;
+ icon?: React.ReactNode;
+ rounded?: boolean;
}) {
const [expanded, setExpanded] = useState(false);
@@ -81,10 +98,18 @@ export function ExpandableInsightContext({
};
return (
-
-
+
+
- {title}
+
+ {icon}
+ {title}
+
@@ -101,10 +126,14 @@ const animationProps: AnimationProps = {
};
interface AutofixInsightCardProps {
+ groupId: string;
hasCardAbove: boolean;
hasCardBelow: boolean;
+ index: number;
insight: AutofixInsight;
repos: AutofixRepository[];
+ runId: string;
+ stepIndex: number;
}
function AutofixInsightCard({
@@ -112,111 +141,140 @@ function AutofixInsightCard({
hasCardBelow,
hasCardAbove,
repos,
+ index,
+ stepIndex,
+ groupId,
+ runId,
}: AutofixInsightCardProps) {
+ const isUserMessage = insight.justification === 'USER';
+
return (
{hasCardAbove && (
-
-
-
- )}
-
-
-
-
+
- {insight.error_message_context &&
- insight.error_message_context.length > 0 && (
+
+
+ {insight.error_message_context &&
+ insight.error_message_context.length > 0 && (
+
+ {insight.error_message_context
+ .map((message, i) => {
+ return (
+
+
+
+
+
+
+
+
+ );
+ })
+ .reverse()}
+
+ )}
+ {insight.stacktrace_context && insight.stacktrace_context.length > 0 && (
- {insight.error_message_context
- .map((message, i) => {
+ {insight.stacktrace_context
+ .map((stacktrace, i) => {
+ let vars: any = {};
+ try {
+ vars = JSON.parse(stacktrace.vars_as_json);
+ } catch {
+ vars = {vars: stacktrace.vars_as_json};
+ }
return (
-
-
-
-
-
-
-
-
+
+ }
+ />
+
+
);
})
.reverse()}
)}
- {insight.stacktrace_context && insight.stacktrace_context.length > 0 && (
-
- {insight.stacktrace_context
- .map((stacktrace, i) => {
- return (
-
+ {insight.breadcrumb_context && insight.breadcrumb_context.length > 0 && (
+
+ {insight.breadcrumb_context
+ .map((breadcrumb, i) => {
+ return (
+
+ );
+ })
+ .reverse()}
+
+ )}
+ {insight.codebase_context && insight.codebase_context.length > 0 && (
+
+ {insight.codebase_context
+ .map((code, i) => {
+ return (
}
- />
- }
/>
-
- );
- })
- .reverse()}
-
- )}
- {insight.breadcrumb_context && insight.breadcrumb_context.length > 0 && (
-
- {insight.breadcrumb_context
- .map((breadcrumb, i) => {
- return
;
- })
- .reverse()}
-
- )}
- {insight.codebase_context && insight.codebase_context.length > 0 && (
-
- {insight.codebase_context
- .map((code, i) => {
- return (
- }
- />
- );
- })
- .reverse()}
-
- )}
-
-
+ );
+ })
+ .reverse()}
+
+ )}
+
+
+ )}
+ {isUserMessage && (
+
+
+
+
+ )}
{hasCardBelow && (
-
-
-
+
)}
@@ -225,10 +283,13 @@ function AutofixInsightCard({
}
interface AutofixInsightCardsProps {
+ groupId: string;
hasStepAbove: boolean;
hasStepBelow: boolean;
insights: AutofixInsight[];
repos: AutofixRepository[];
+ runId: string;
+ stepIndex: number;
}
function AutofixInsightCards({
@@ -236,17 +297,12 @@ function AutofixInsightCards({
repos,
hasStepBelow,
hasStepAbove,
+ stepIndex,
+ groupId,
+ runId,
}: AutofixInsightCardsProps) {
return (
- {!hasStepAbove && (
-
- Insights
-
-
-
-
- )}
{insights.length > 0 ? (
insights.map((insight, index) =>
!insight ? null : (
@@ -256,6 +312,10 @@ function AutofixInsightCards({
hasCardBelow={index < insights.length - 1 || hasStepBelow}
hasCardAbove={hasStepAbove && index === 0}
repos={repos}
+ index={index}
+ stepIndex={stepIndex}
+ groupId={groupId}
+ runId={runId}
/>
)
)
@@ -274,6 +334,131 @@ function AutofixInsightCards({
);
}
+export function useUpdateInsightCard({groupId, runId}: {groupId: string; runId: string}) {
+ const api = useApi({persistInFlight: true});
+
+ return useMutation({
+ mutationFn: (params: {
+ message: string;
+ retain_insight_card_index: number | null;
+ step_index: number;
+ }) => {
+ return api.requestPromise(`/issues/${groupId}/autofix/update/`, {
+ method: 'POST',
+ data: {
+ run_id: runId,
+ payload: {
+ type: 'restart_from_point_with_feedback',
+ message: params.message,
+ step_index: params.step_index,
+ retain_insight_card_index: params.retain_insight_card_index,
+ },
+ },
+ });
+ },
+ onSuccess: _ => {
+ addSuccessMessage(t("Thanks, I'll rethink this..."));
+ },
+ onError: () => {
+ addErrorMessage(t('Something went wrong when sending Autofix your message.'));
+ },
+ });
+}
+
+function ChainLink({
+ groupId,
+ runId,
+ stepIndex,
+ insightCardAboveIndex,
+}: {
+ groupId: string;
+ insightCardAboveIndex: number | null;
+ runId: string;
+ stepIndex: number;
+}) {
+ const [showOverlay, setShowOverlay] = useState(false);
+ const overlayRef = useRef(null);
+ const [comment, setComment] = useState('');
+ const {mutate: send} = useUpdateInsightCard({groupId, runId});
+
+ const handleClickOutside = event => {
+ if (overlayRef.current && !overlayRef.current.contains(event.target)) {
+ setShowOverlay(false);
+ }
+ };
+
+ useEffect(() => {
+ if (showOverlay) {
+ document.addEventListener('mousedown', handleClickOutside);
+ } else {
+ document.removeEventListener('mousedown', handleClickOutside);
+ }
+ return () => {
+ document.removeEventListener('mousedown', handleClickOutside);
+ };
+ }, [showOverlay]);
+
+ return (
+
+
+ }
+ size="zero"
+ className="rethink-button"
+ title={t('Rethink from here')}
+ aria-label={t('Rethink from here')}
+ onClick={() => setShowOverlay(true)}
+ />
+
+ {showOverlay && (
+
+
+
+ )}
+
+ );
+}
+
+const UserMessageContainer = styled('div')`
+ color: ${p => p.theme.subText};
+ display: flex;
+ padding: ${space(1)};
+`;
+
+const UserMessage = styled('div')`
+ margin-left: ${space(2)};
+ flex-shrink: 100;
+`;
+
const IllustrationContainer = styled('div')`
padding-top: ${space(4)};
`;
@@ -291,14 +476,6 @@ const NoInsightsYet = styled('div')`
text-align: center;
`;
-const TitleText = styled('p')`
- font-size: ${p => p.theme.fontSizeLarge};
- font-weight: ${p => p.theme.fontWeightBold};
- margin: 0;
- display: flex;
- justify-content: center;
-`;
-
const InsightsContainer = styled('div')``;
const InsightContainer = styled(motion.div)`
@@ -308,10 +485,49 @@ const InsightContainer = styled(motion.div)`
box-shadow: ${p => p.theme.dropShadowMedium};
`;
-const IconContainer = styled('div')`
- padding: ${space(1)};
- display: flex;
- justify-content: center;
+const ArrowContainer = styled('div')`
+ display: grid;
+ grid-template-columns: 1fr auto 1fr;
+ color: ${p => p.theme.subText};
+ align-items: center;
+ position: relative;
+ z-index: 0;
+
+ .arrow-icon {
+ margin-top: ${space(1)};
+ grid-column: 2 / 3;
+ justify-self: center;
+ }
+
+ .rethink-button {
+ grid-column: 3 / 4;
+ justify-self: end;
+ transition: opacity 0.1s ease-in-out;
+ }
+`;
+
+const RethinkButton = styled(Button)`
+ font-weight: normal;
+ font-size: small;
+ border: none;
+ color: ${p => p.theme.subText};
+ margin-top: ${space(1)};
+`;
+
+const RethinkInput = styled('div')`
+ position: absolute;
+ box-shadow: ${p => p.theme.dropShadowHeavy};
+ border: 1px solid ${p => p.theme.border};
+ width: 95%;
+ background: ${p => p.theme.backgroundElevated};
+ padding: ${space(0.5)};
+ border-radius: ${p => p.theme.borderRadius};
+ margin: 0 ${space(2)} 0 ${space(2)};
+
+ .row-form {
+ display: flex;
+ gap: ${space(1)};
+ }
`;
const BreadcrumbItem = styled(Timeline.Item)`
@@ -353,19 +569,34 @@ const MiniHeader = styled('p')`
padding-left: ${space(2)};
`;
-const ExpandableContext = styled('div')`
+const ExpandableContext = styled('div')<{isRounded?: boolean}>`
width: 100%;
background: ${p => p.theme.alert.info.backgroundLight};
+ border-radius: ${p => (p.isRounded ? p.theme.borderRadius : 0)};
`;
-const ContextHeader = styled(Button)`
+const ContextHeader = styled(Button)<{isExpanded?: boolean; isRounded?: boolean}>`
width: 100%;
box-shadow: none;
margin: 0;
border: none;
font-weight: normal;
background: ${p => p.theme.backgroundSecondary};
- border-radius: 0px;
+ border-radius: ${p => {
+ if (!p.isRounded) {
+ return 0;
+ }
+ if (p.isExpanded) {
+ return `${p.theme.borderRadius} ${p.theme.borderRadius} 0 0`;
+ }
+ return p.theme.borderRadius;
+ }};
+`;
+
+const ContextHeaderLeftAlign = styled('div')`
+ display: flex;
+ gap: ${space(1)};
+ align-items: center;
`;
const ContextHeaderWrapper = styled('div')`
diff --git a/static/app/components/events/autofix/autofixMessageBox.spec.tsx b/static/app/components/events/autofix/autofixMessageBox.spec.tsx
index 941bbfc7c1904..fb0d051a24d90 100644
--- a/static/app/components/events/autofix/autofixMessageBox.spec.tsx
+++ b/static/app/components/events/autofix/autofixMessageBox.spec.tsx
@@ -12,9 +12,7 @@ describe('AutofixMessageBox', () => {
displayText: 'Test display text',
groupId: '123',
runId: '456',
- inputPlaceholder: 'Test placeholder',
actionText: 'Send',
- isDisabled: false,
allowEmptyMessage: false,
responseRequired: false,
step: null,
@@ -31,7 +29,7 @@ describe('AutofixMessageBox', () => {
render( );
expect(screen.getByText('Test display text')).toBeInTheDocument();
- expect(screen.getByPlaceholderText('Test placeholder')).toBeInTheDocument();
+ expect(screen.getByPlaceholderText('Say something...')).toBeInTheDocument();
expect(screen.getByRole('button', {name: 'Send'})).toBeInTheDocument();
});
@@ -39,7 +37,7 @@ describe('AutofixMessageBox', () => {
const onSendMock = jest.fn();
render( );
- const input = screen.getByPlaceholderText('Test placeholder');
+ const input = screen.getByPlaceholderText('Say something...');
await userEvent.type(input, 'Test message');
await userEvent.click(screen.getByRole('button', {name: 'Send'}));
@@ -55,13 +53,13 @@ describe('AutofixMessageBox', () => {
render( );
- const input = screen.getByPlaceholderText('Test placeholder');
+ const input = screen.getByPlaceholderText('Say something...');
await userEvent.type(input, 'Test message');
await userEvent.click(screen.getByRole('button', {name: 'Send'}));
await waitFor(() => {
expect(addSuccessMessage).toHaveBeenCalledWith(
- "Thanks for the input! I'll get to it right after this."
+ "Thanks for the input. I'll get to it soon."
);
});
});
@@ -78,7 +76,7 @@ describe('AutofixMessageBox', () => {
render( );
- const input = screen.getByPlaceholderText('Test placeholder');
+ const input = screen.getByPlaceholderText('Say something...');
await userEvent.type(input, 'Test message');
await userEvent.click(screen.getByRole('button', {name: 'Send'}));
@@ -100,13 +98,6 @@ describe('AutofixMessageBox', () => {
expect(screen.getByText(AutofixStepFixture().title)).toBeInTheDocument();
});
- it('disables input and button when isDisabled is true', () => {
- render( );
-
- expect(screen.getByPlaceholderText('Test placeholder')).toBeDisabled();
- expect(screen.getByRole('button', {name: 'Send'})).toBeDisabled();
- });
-
it('renders required input style when responseRequired is true', () => {
render( );
@@ -114,4 +105,33 @@ describe('AutofixMessageBox', () => {
screen.getByPlaceholderText('Please answer to continue...')
).toBeInTheDocument();
});
+
+ it('handles suggested root cause selection correctly', async () => {
+ const onSendMock = jest.fn();
+ render(
+
+ );
+
+ // Test suggested root cause
+ const input = screen.getByPlaceholderText('Provide any instructions for the fix...');
+ await userEvent.type(input, 'Use this suggestion');
+ await userEvent.click(screen.getByRole('button', {name: 'Send'}));
+
+ expect(onSendMock).toHaveBeenCalledWith('Use this suggestion', false);
+ });
+
+ it('handles custom root cause selection correctly', async () => {
+ const onSendMock = jest.fn();
+ render(
+
+ );
+
+ // Test custom root cause
+ await userEvent.click(screen.getAllByText('Provide your own root cause')[0]);
+ const customInput = screen.getByPlaceholderText('Propose your own root cause...');
+ await userEvent.type(customInput, 'Custom root cause');
+ await userEvent.click(screen.getByRole('button', {name: 'Send'}));
+
+ expect(onSendMock).toHaveBeenCalledWith('Custom root cause', true);
+ });
});
diff --git a/static/app/components/events/autofix/autofixMessageBox.tsx b/static/app/components/events/autofix/autofixMessageBox.tsx
index 30ea803310c99..4221a52247147 100644
--- a/static/app/components/events/autofix/autofixMessageBox.tsx
+++ b/static/app/components/events/autofix/autofixMessageBox.tsx
@@ -6,6 +6,7 @@ import {Button} from 'sentry/components/button';
import {type AutofixStep, AutofixStepType} from 'sentry/components/events/autofix/types';
import Input from 'sentry/components/input';
import LoadingIndicator from 'sentry/components/loadingIndicator';
+import {SegmentedControl} from 'sentry/components/segmentedControl';
import {
IconCheckmark,
IconChevron,
@@ -38,7 +39,7 @@ function useSendMessage({groupId, runId}: {groupId: string; runId: string}) {
});
},
onSuccess: _ => {
- addSuccessMessage("Thanks for the input! I'll get to it right after this.");
+ addSuccessMessage("Thanks for the input. I'll get to it soon.");
},
onError: () => {
addErrorMessage(t('Something went wrong when sending Autofix your message.'));
@@ -51,14 +52,11 @@ interface AutofixMessageBoxProps {
allowEmptyMessage: boolean;
displayText: string;
groupId: string;
- inputPlaceholder: string;
- isDisabled: boolean;
- onSend: ((message: string) => void) | null;
+ onSend: ((message: string, isCustom?: boolean) => void) | null;
responseRequired: boolean;
runId: string;
step: AutofixStep | null;
- emptyInfoText?: string;
- notEmptyInfoText?: string;
+ isRootCauseSelectionStep?: boolean;
primaryAction?: boolean;
scrollIntoView?: (() => void) | null;
}
@@ -80,6 +78,8 @@ function StepIcon({step}: {step: AutofixStep}) {
}
switch (step.status) {
+ case 'WAITING_FOR_USER_RESPONSE':
+ return ;
case 'PROCESSING':
return ;
case 'CANCELLED':
@@ -96,24 +96,41 @@ function StepIcon({step}: {step: AutofixStep}) {
function AutofixMessageBox({
displayText = '',
step = null,
- inputPlaceholder = 'Say something...',
primaryAction = false,
responseRequired = false,
onSend,
actionText = 'Send',
allowEmptyMessage = false,
- isDisabled = false,
groupId,
runId,
- emptyInfoText = '',
- notEmptyInfoText = '',
scrollIntoView = null,
+ isRootCauseSelectionStep = false,
}: AutofixMessageBoxProps) {
const [message, setMessage] = useState('');
const {mutate: send} = useSendMessage({groupId, runId});
+ const [rootCauseMode, setRootCauseMode] = useState<
+ 'suggested_root_cause' | 'custom_root_cause'
+ >('suggested_root_cause');
+
+ const isDisabled =
+ step?.status === 'ERROR' ||
+ (step?.type === AutofixStepType.ROOT_CAUSE_ANALYSIS && step.causes?.length === 0);
+
const handleSend = (e: FormEvent) => {
e.preventDefault();
+
+ if (isRootCauseSelectionStep && onSend) {
+ if (rootCauseMode === 'custom_root_cause' && message.trim() !== '') {
+ onSend?.(message, true);
+ setMessage('');
+ } else if (rootCauseMode === 'suggested_root_cause') {
+ onSend?.(message, false);
+ setMessage('');
+ }
+ return;
+ }
+
if (message.trim() !== '' || allowEmptyMessage) {
if (onSend != null) {
onSend(message);
@@ -155,7 +172,23 @@ function AutofixMessageBox({
}}
/>
- {message.length > 0 ? notEmptyInfoText : emptyInfoText}
+ {isRootCauseSelectionStep && (
+
+
+
+ {t('Use suggested root cause')}
+
+
+ {t('Provide your own root cause')}
+
+
+
+ )}
);
}}
@@ -373,22 +391,21 @@ export function Threads({data, event, projectSlug, groupingCurrentLevel}: Props)
const Grid = styled('div')`
display: grid;
grid-template-columns: auto 1fr;
+ gap: ${space(2)};
+`;
+
+const TheadStateContainer = styled('div')`
+ ${p => p.theme.overflowEllipsis}
`;
const ThreadStateWrapper = styled('div')`
display: flex;
position: relative;
flex-direction: row;
- align-items: flex-start;
+ align-items: center;
gap: ${space(0.5)};
`;
-const ThreadState = styled(TextOverflow)`
- max-width: 100%;
- text-align: left;
- font-weight: ${p => p.theme.fontWeightBold};
-`;
-
const LockReason = styled(TextOverflow)`
font-weight: ${p => p.theme.fontWeightNormal};
color: ${p => p.theme.gray300};
diff --git a/static/app/components/events/meta/annotatedText/redaction.tsx b/static/app/components/events/meta/annotatedText/redaction.tsx
index e07937d1a33b8..825fc050472aa 100644
--- a/static/app/components/events/meta/annotatedText/redaction.tsx
+++ b/static/app/components/events/meta/annotatedText/redaction.tsx
@@ -2,6 +2,5 @@ import styled from '@emotion/styled';
export const Redaction = styled('span')<{withoutBackground?: boolean}>`
cursor: default;
- vertical-align: middle;
${p => !p.withoutBackground && `background: rgba(255, 0, 0, 0.05);`}
`;
diff --git a/static/app/components/events/suspectCommits.spec.tsx b/static/app/components/events/suspectCommits.spec.tsx
index 99bdea50a4104..19a7174fb34cf 100644
--- a/static/app/components/events/suspectCommits.spec.tsx
+++ b/static/app/components/events/suspectCommits.spec.tsx
@@ -68,12 +68,16 @@ describe('SuspectCommits', function () {
committers,
},
});
+ MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/projects/`,
+ body: [project],
+ });
});
it('Renders base commit row', async function () {
render(
;
eventId: string;
- project: AvatarProject;
+ projectSlug: Project['slug'];
group?: Group;
}
-export function SuspectCommits({group, eventId, project, commitRow: CommitRow}: Props) {
+export function SuspectCommits({
+ group,
+ eventId,
+ projectSlug,
+ commitRow: CommitRow,
+}: Props) {
const organization = useOrganization();
const [isExpanded, setIsExpanded] = useState(false);
+ const project = useProjectFromSlug({organization, projectSlug});
const {data} = useCommitters({
eventId,
- projectSlug: project.slug,
+ projectSlug,
});
const committers = data?.committers ?? [];
@@ -66,7 +71,7 @@ export function SuspectCommits({group, eventId, project, commitRow: CommitRow}:
const handlePullRequestClick = (commit: Commit, commitIndex: number) => {
trackAnalytics('issue_details.suspect_commits.pull_request_clicked', {
organization,
- project_id: parseInt(project.id as string, 10),
+ project_id: parseInt(project?.id as string, 10),
suspect_commit_calculation: commit.suspectCommitType ?? 'unknown',
suspect_commit_index: commitIndex,
...getAnalyticsDataForGroup(group),
@@ -76,7 +81,7 @@ export function SuspectCommits({group, eventId, project, commitRow: CommitRow}:
const handleCommitClick = (commit: Commit, commitIndex: number) => {
trackAnalytics('issue_details.suspect_commits.commit_clicked', {
organization,
- project_id: parseInt(project.id as string, 10),
+ project_id: parseInt(project?.id as string, 10),
has_pull_request: commit.pullRequest?.id !== undefined,
suspect_commit_calculation: commit.suspectCommitType ?? 'unknown',
suspect_commit_index: commitIndex,
@@ -88,7 +93,7 @@ export function SuspectCommits({group, eventId, project, commitRow: CommitRow}:
return hasStreamlinedUI ? (
-
+
{commits.slice(0, 100).map((commit, commitIndex) => (
{t('Suspect Commit')}
@@ -101,9 +106,6 @@ export function SuspectCommits({group, eventId, project, commitRow: CommitRow}:
project={project}
/>
-
-
-
))}
@@ -166,27 +168,6 @@ const StreamlinedPanel = styled(Panel)`
margin-bottom: 0;
width: 100%;
min-width: 85%;
- &:last-child {
- margin-right: ${space(1.5)};
- }
- &:first-child {
- margin-left: ${space(1.5)};
- }
-`;
-
-const IllustrationContainer = styled('div')`
- position: absolute;
- top: 0px;
- right: 50px;
-
- @media (max-width: ${p => p.theme.breakpoints.xlarge}) {
- display: none;
- pointer-events: none;
- }
-`;
-
-const Illustration = styled('img')`
- height: 110px;
`;
const SuspectCommitWrapper = styled('div')`
diff --git a/static/app/components/events/userFeedback/userFeedbackDrawer.tsx b/static/app/components/events/userFeedback/userFeedbackDrawer.tsx
new file mode 100644
index 0000000000000..8723a8f44d149
--- /dev/null
+++ b/static/app/components/events/userFeedback/userFeedbackDrawer.tsx
@@ -0,0 +1,70 @@
+import styled from '@emotion/styled';
+
+import ProjectAvatar from 'sentry/components/avatar/projectAvatar';
+import {
+ CrumbContainer,
+ EventDrawerBody,
+ EventDrawerContainer,
+ EventDrawerHeader,
+ EventNavigator,
+ Header,
+ NavigationCrumbs,
+ ShortId,
+} from 'sentry/components/events/eventDrawer';
+import {Body} from 'sentry/components/layouts/thirds';
+import {t} from 'sentry/locale';
+import type {Group} from 'sentry/types/group';
+import type {Project} from 'sentry/types/project';
+import {useLocation} from 'sentry/utils/useLocation';
+import useOrganization from 'sentry/utils/useOrganization';
+import usePageFilters from 'sentry/utils/usePageFilters';
+import GroupUserFeedback from 'sentry/views/issueDetails/groupUserFeedback';
+
+export function UserFeedbackDrawer({group, project}: {group: Group; project: Project}) {
+ const location = useLocation();
+ const organization = useOrganization();
+ const {selection} = usePageFilters();
+ const {environments} = selection;
+
+ return (
+
+
+
+
+ {group.shortId}
+
+ ),
+ },
+ {label: t('User Feedback')},
+ ]}
+ />
+
+
+
+
+
+
+
+
+ );
+}
+
+/* Disable grid from Layout styles in drawer */
+const UserFeedbackBody = styled(EventDrawerBody)`
+ ${Body} {
+ grid-template-columns: unset;
+ }
+`;
diff --git a/static/app/components/featureFeedback/feedbackModal.tsx b/static/app/components/featureFeedback/feedbackModal.tsx
index a6a09a5a22926..6e42a489b38c0 100644
--- a/static/app/components/featureFeedback/feedbackModal.tsx
+++ b/static/app/components/featureFeedback/feedbackModal.tsx
@@ -30,6 +30,7 @@ import {defined} from 'sentry/utils';
import {useLocation} from 'sentry/utils/useLocation';
import useMedia from 'sentry/utils/useMedia';
import useProjects from 'sentry/utils/useProjects';
+import {useUser} from 'sentry/utils/useUser';
export const feedbackClient = new BrowserClient({
// feedback project under Sentry organization
@@ -95,7 +96,7 @@ export function FeedbackModal({
const location = useLocation();
const theme = useTheme();
- const user = ConfigStore.get('user');
+ const user = useUser();
const isSelfHosted = ConfigStore.get('isSelfHosted');
const [state, setState] = useState(
props.children === undefined
diff --git a/static/app/components/feedback/feedbackItem/feedbackActions.tsx b/static/app/components/feedback/feedbackItem/feedbackActions.tsx
index 4a5458aa07149..ccd15fddf4597 100644
--- a/static/app/components/feedback/feedbackItem/feedbackActions.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackActions.tsx
@@ -48,8 +48,17 @@ export default function FeedbackActions({
}
function LargeWidth({feedbackItem}: {feedbackItem: FeedbackIssue}) {
- const {isResolved, onResolveClick, isSpam, onSpamClick, hasSeen, onMarkAsReadClick} =
- useFeedbackActions({feedbackItem});
+ const {
+ disableDelete,
+ hasDelete,
+ onDelete,
+ isResolved,
+ onResolveClick,
+ isSpam,
+ onSpamClick,
+ hasSeen,
+ onMarkAsReadClick,
+ } = useFeedbackActions({feedbackItem});
return (
@@ -66,13 +75,27 @@ function LargeWidth({feedbackItem}: {feedbackItem: FeedbackIssue}) {
{hasSeen ? t('Mark Unread') : t('Mark Read')}
+ {hasDelete && (
+
+ {t('Delete')}
+
+ )}
);
}
function MediumWidth({feedbackItem}: {feedbackItem: FeedbackIssue}) {
- const {isResolved, onResolveClick, isSpam, onSpamClick, hasSeen, onMarkAsReadClick} =
- useFeedbackActions({feedbackItem});
+ const {
+ disableDelete,
+ hasDelete,
+ onDelete,
+ isResolved,
+ onResolveClick,
+ isSpam,
+ onSpamClick,
+ hasSeen,
+ onMarkAsReadClick,
+ } = useFeedbackActions({feedbackItem});
return (
@@ -103,6 +126,14 @@ function MediumWidth({feedbackItem}: {feedbackItem: FeedbackIssue}) {
label: hasSeen ? t('Mark Unread') : t('Mark Read'),
onAction: onMarkAsReadClick,
},
+ {
+ key: 'delete',
+ priority: 'danger' as const,
+ label: t('Delete'),
+ hidden: !hasDelete,
+ disabled: disableDelete,
+ onAction: onDelete,
+ },
].filter(defined)}
/>
@@ -110,8 +141,17 @@ function MediumWidth({feedbackItem}: {feedbackItem: FeedbackIssue}) {
}
function SmallWidth({feedbackItem}: {feedbackItem: FeedbackIssue}) {
- const {isResolved, onResolveClick, isSpam, onSpamClick, hasSeen, onMarkAsReadClick} =
- useFeedbackActions({feedbackItem});
+ const {
+ disableDelete,
+ hasDelete,
+ onDelete,
+ isResolved,
+ onResolveClick,
+ isSpam,
+ onSpamClick,
+ hasSeen,
+ onMarkAsReadClick,
+ } = useFeedbackActions({feedbackItem});
return (
);
diff --git a/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx b/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx
index d31c3c1a3f225..dbe4818d22342 100644
--- a/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx
@@ -52,7 +52,7 @@ export default function FeedbackAssignedTo({
projectIds: [feedbackIssue.project.id],
});
- const owners = getOwnerList([], eventOwners ?? null, feedbackIssue.assignedTo);
+ const owners = getOwnerList([], eventOwners, feedbackIssue.assignedTo);
// A new `key` will make the component re-render when showActorName changes
const key = showActorName ? 'showActor' : 'hideActor';
diff --git a/static/app/components/feedback/feedbackItem/feedbackItem.tsx b/static/app/components/feedback/feedbackItem/feedbackItem.tsx
index d8d2ac4f034e5..e814ba315897f 100644
--- a/static/app/components/feedback/feedbackItem/feedbackItem.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackItem.tsx
@@ -97,11 +97,9 @@ export default function FeedbackItem({feedbackItem, eventData, tags}: Props) {
/>
{eventData ? (
-
+
+
+
) : null}
} title={t('Tags')}>
diff --git a/static/app/components/feedback/feedbackItem/feedbackItemLoader.tsx b/static/app/components/feedback/feedbackItem/feedbackItemLoader.tsx
index 30da57475098d..5b2cd41bd980f 100644
--- a/static/app/components/feedback/feedbackItem/feedbackItemLoader.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackItemLoader.tsx
@@ -21,11 +21,17 @@ export default function FeedbackItemLoader() {
const projectSlug = useCurrentFeedbackProject();
useSentryAppComponentsData({projectId: projectSlug});
+ useEffect(() => {
+ if (issueResult.isError) {
+ trackAnalytics('feedback.feedback-item-not-found', {organization, feedbackId});
+ }
+ }, [organization, issueResult.isError, feedbackId]);
+
useEffect(() => {
if (issueData) {
trackAnalytics('feedback.feedback-item-rendered', {organization});
}
- }, [organization, issueData]);
+ }, [issueData, organization]);
// There is a case where we are done loading, but we're fetching updates
// This happens when the user has seen a feedback, clicks around a bit, then
diff --git a/static/app/components/feedback/feedbackItem/feedbackShortId.tsx b/static/app/components/feedback/feedbackItem/feedbackShortId.tsx
index c13274c767765..48cebcffe89f4 100644
--- a/static/app/components/feedback/feedbackItem/feedbackShortId.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackShortId.tsx
@@ -64,6 +64,11 @@ export default function FeedbackShortId({className, feedbackItem, style}: Props)
text: feedbackItem.shortId,
});
+ const {onClick: handleCopyMarkdown} = useCopyToClipboard({
+ text: `[${feedbackItem.shortId}](${feedbackUrl})`,
+ successMessage: t('Copied Markdown Feedback Link to clipboard'),
+ });
+
return (
diff --git a/static/app/components/feedback/feedbackItem/feedbackTimestampsTooltip.tsx b/static/app/components/feedback/feedbackItem/feedbackTimestampsTooltip.tsx
index cb818606c7dd8..17c20ce953749 100644
--- a/static/app/components/feedback/feedbackItem/feedbackTimestampsTooltip.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackTimestampsTooltip.tsx
@@ -4,16 +4,16 @@ import moment from 'moment-timezone';
import AutoSelectText from 'sentry/components/autoSelectText';
import {t} from 'sentry/locale';
-import ConfigStore from 'sentry/stores/configStore';
import {space} from 'sentry/styles/space';
import type {FeedbackIssue} from 'sentry/utils/feedback/types';
+import {useUser} from 'sentry/utils/useUser';
type Props = {
feedbackItem: FeedbackIssue;
};
export default function FeedbackTimestampsTooltip({feedbackItem}: Props) {
- const user = ConfigStore.get('user');
+ const user = useUser();
const options = user?.options ?? {};
const format = options.clock24Hours ? 'HH:mm:ss z' : 'LTS z';
const dateFirstSeen = feedbackItem.firstSeen ? moment(feedbackItem.firstSeen) : null;
diff --git a/static/app/components/feedback/feedbackItem/messageSection.tsx b/static/app/components/feedback/feedbackItem/messageSection.tsx
index 4de03b30b47c4..ec46f71fb074c 100644
--- a/static/app/components/feedback/feedbackItem/messageSection.tsx
+++ b/static/app/components/feedback/feedbackItem/messageSection.tsx
@@ -1,14 +1,17 @@
import {Fragment} from 'react';
import styled from '@emotion/styled';
-import {Role} from 'sentry/components/acl/role';
+import {useRole} from 'sentry/components/acl/useRole';
+import Tag from 'sentry/components/badge/tag';
import {Flex} from 'sentry/components/container/flex';
import FeedbackItemUsername from 'sentry/components/feedback/feedbackItem/feedbackItemUsername';
import FeedbackTimestampsTooltip from 'sentry/components/feedback/feedbackItem/feedbackTimestampsTooltip';
import FeedbackViewers from 'sentry/components/feedback/feedbackItem/feedbackViewers';
import {ScreenshotSection} from 'sentry/components/feedback/feedbackItem/screenshotSection';
+import ExternalLink from 'sentry/components/links/externalLink';
import TimeSince from 'sentry/components/timeSince';
-import {t} from 'sentry/locale';
+import {Tooltip} from 'sentry/components/tooltip';
+import {t, tct} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import type {Event} from 'sentry/types/event';
import type {FeedbackIssue} from 'sentry/utils/feedback/types';
@@ -21,37 +24,53 @@ interface Props {
export default function MessageSection({eventData, feedbackItem}: Props) {
const organization = useOrganization();
+ const {hasRole} = useRole({role: 'attachmentsRole'});
const project = feedbackItem.project;
+ const isSpam = eventData?.occurrence?.evidenceData.isSpam;
+
return (
-
-
- ) : undefined,
- overlayStyle: {maxWidth: 300},
- }}
- />
+
+ {isSpam ? (
+
+
+ ),
+ }
+ )}
+ >
+ {t('spam')}
+
+
+ ) : null}
+
+ ) : undefined,
+ overlayStyle: {maxWidth: 300},
+ }}
+ />
+
{feedbackItem.metadata.message}
- {eventData && project ? (
-
- {({hasRole}) =>
- hasRole ? (
-
- ) : null
- }
-
+ {eventData && project && hasRole ? (
+
) : null}
diff --git a/static/app/components/feedback/feedbackItem/traceDataSection.tsx b/static/app/components/feedback/feedbackItem/traceDataSection.tsx
index 478cce007a23c..e37fce4afe4fc 100644
--- a/static/app/components/feedback/feedbackItem/traceDataSection.tsx
+++ b/static/app/components/feedback/feedbackItem/traceDataSection.tsx
@@ -1,13 +1,17 @@
import {useEffect} from 'react';
import Section from 'sentry/components/feedback/feedbackItem/feedbackItemSection';
+import Placeholder from 'sentry/components/placeholder';
import {IconSpan} from 'sentry/icons';
import {t} from 'sentry/locale';
import type {Event} from 'sentry/types/event';
import {trackAnalytics} from 'sentry/utils/analytics';
import useOrganization from 'sentry/utils/useOrganization';
import {TraceDataSection as IssuesTraceDataSection} from 'sentry/views/issueDetails/traceDataSection';
-import {useTraceTimelineEvents} from 'sentry/views/issueDetails/traceTimeline/useTraceTimelineEvents';
+import {
+ type TimelineEvent,
+ useTraceTimelineEvents,
+} from 'sentry/views/issueDetails/traceTimeline/useTraceTimelineEvents';
/**
* Doesn't require a Section wrapper. Rendered conditionally if
@@ -17,11 +21,9 @@ import {useTraceTimelineEvents} from 'sentry/views/issueDetails/traceTimeline/us
export default function TraceDataSection({
eventData,
crashReportId,
- hasProject,
}: {
crashReportId: string | undefined;
eventData: Event;
- hasProject: boolean;
}) {
// If there's a linked error from a crash report and only one other issue, showing both could be redundant.
// TODO: we could add a jest test .spec for this ^
@@ -29,11 +31,7 @@ export default function TraceDataSection({
const {oneOtherIssueEvent, traceEvents, isLoading, isError} = useTraceTimelineEvents({
event: eventData,
});
- const show =
- !isLoading &&
- !isError &&
- traceEvents.length > 1 && // traceEvents include the current event.
- (!hasProject || !crashReportId || oneOtherIssueEvent?.id === crashReportId);
+ // Note traceEvents includes the current event (feedback).
useEffect(() => {
if (isError) {
@@ -45,23 +43,36 @@ export default function TraceDataSection({
organization,
});
}
- if (hasProject && !!crashReportId && oneOtherIssueEvent?.id === crashReportId) {
+ if (eventIsCrashReportDup(oneOtherIssueEvent, crashReportId)) {
trackAnalytics('feedback.trace-section.crash-report-dup', {organization});
}
}
}, [
crashReportId,
- hasProject,
isError,
isLoading,
- oneOtherIssueEvent?.id,
+ oneOtherIssueEvent,
organization,
traceEvents.length,
]);
- return show && organization.features.includes('user-feedback-trace-section') ? (
+ return organization.features.includes('user-feedback-trace-section') &&
+ !isError &&
+ traceEvents.length > 1 &&
+ !eventIsCrashReportDup(oneOtherIssueEvent, crashReportId) ? (
} title={t('Data From The Same Trace')}>
-
+ {isLoading ? (
+
+ ) : (
+
+ )}
) : null;
}
+
+function eventIsCrashReportDup(
+ event: TimelineEvent | undefined,
+ crashReportId: string | undefined
+) {
+ return !!crashReportId && event?.id === crashReportId;
+}
diff --git a/static/app/components/feedback/feedbackItem/useFeedbackActions.ts b/static/app/components/feedback/feedbackItem/useFeedbackActions.ts
index 55c423da66900..451deeeb02b98 100644
--- a/static/app/components/feedback/feedbackItem/useFeedbackActions.ts
+++ b/static/app/components/feedback/feedbackItem/useFeedbackActions.ts
@@ -5,6 +5,7 @@ import {
addLoadingMessage,
addSuccessMessage,
} from 'sentry/actionCreators/indicator';
+import {useDeleteFeedback} from 'sentry/components/feedback/useDeleteFeedback';
import useMutateFeedback from 'sentry/components/feedback/useMutateFeedback';
import {t} from 'sentry/locale';
import {GroupStatus} from 'sentry/types/group';
@@ -27,12 +28,18 @@ const mutationOptions = {
export default function useFeedbackActions({feedbackItem}: Props) {
const organization = useOrganization();
+ const projectId = feedbackItem.project?.id;
const {markAsRead, resolve} = useMutateFeedback({
feedbackIds: [feedbackItem.id],
organization,
projectIds: feedbackItem.project ? [feedbackItem.project.id] : [],
});
+ const deleteFeedback = useDeleteFeedback([feedbackItem.id], projectId);
+
+ const hasDelete = organization.features.includes('issue-platform-deletion-ui');
+ const disableDelete = !organization.access.includes('event:admin');
+ const onDelete = deleteFeedback;
// reuse the issues ignored category for spam feedbacks
const isResolved = feedbackItem.status === GroupStatus.RESOLVED;
@@ -63,6 +70,9 @@ export default function useFeedbackActions({feedbackItem}: Props) {
}, [hasSeen, markAsRead]);
return {
+ disableDelete,
+ hasDelete,
+ onDelete,
isResolved,
onResolveClick,
isSpam,
diff --git a/static/app/components/feedback/feedbackSearch.tsx b/static/app/components/feedback/feedbackSearch.tsx
index bf3f7f2087559..52e55f48d702f 100644
--- a/static/app/components/feedback/feedbackSearch.tsx
+++ b/static/app/components/feedback/feedbackSearch.tsx
@@ -1,15 +1,11 @@
-import type {CSSProperties} from 'react';
import {useCallback, useMemo} from 'react';
-import styled from '@emotion/styled';
import orderBy from 'lodash/orderBy';
import {fetchTagValues, useFetchOrganizationTags} from 'sentry/actionCreators/tags';
import {SearchQueryBuilder} from 'sentry/components/searchQueryBuilder';
import type {FilterKeySection} from 'sentry/components/searchQueryBuilder/types';
-import SmartSearchBar from 'sentry/components/smartSearchBar';
import {t} from 'sentry/locale';
import type {Tag, TagCollection, TagValue} from 'sentry/types/group';
-import type {Organization} from 'sentry/types/organization';
import {getUtcDateString} from 'sentry/utils/dates';
import {isAggregateField} from 'sentry/utils/discover/fields';
import {
@@ -86,14 +82,7 @@ function getFeedbackFilterKeys(supportedTags: TagCollection) {
return Object.fromEntries(keys.map(key => [key, allTags[key]]));
}
-const getFilterKeySections = (
- tags: TagCollection,
- organization: Organization
-): FilterKeySection[] => {
- if (!organization.features.includes('search-query-builder-user-feedback')) {
- return [];
- }
-
+const getFilterKeySections = (tags: TagCollection): FilterKeySection[] => {
const customTags: Tag[] = Object.values(tags).filter(
tag =>
tag.kind === FieldKind.TAG &&
@@ -121,12 +110,7 @@ const getFilterKeySections = (
];
};
-interface Props {
- className?: string;
- style?: CSSProperties;
-}
-
-export default function FeedbackSearch({className, style}: Props) {
+export default function FeedbackSearch() {
const {selection: pageFilters} = usePageFilters();
const projectIds = pageFilters.projects;
const {pathname, query: locationQuery} = useLocation();
@@ -168,8 +152,8 @@ export default function FeedbackSearch({className, style}: Props) {
);
const filterKeySections = useMemo(() => {
- return getFilterKeySections(issuePlatformTags, organization);
- }, [issuePlatformTags, organization]);
+ return getFilterKeySections(issuePlatformTags);
+ }, [issuePlatformTags]);
const getTagValues = useCallback(
(tag: Tag, searchQuery: string): Promise => {
@@ -218,42 +202,16 @@ export default function FeedbackSearch({className, style}: Props) {
[navigate, pathname, locationQuery]
);
- if (organization.features.includes('search-query-builder-user-feedback')) {
- return (
-
- );
- }
-
return (
-
-
-
+
);
}
-
-const SearchContainer = styled('div')`
- display: grid;
- width: 100%;
-`;
diff --git a/static/app/components/feedback/list/feedbackListBulkSelection.tsx b/static/app/components/feedback/list/feedbackListBulkSelection.tsx
index 4a2780ff13dbc..ddb1c8d08e1f7 100644
--- a/static/app/components/feedback/list/feedbackListBulkSelection.tsx
+++ b/static/app/components/feedback/list/feedbackListBulkSelection.tsx
@@ -9,6 +9,7 @@ import {IconEllipsis} from 'sentry/icons/iconEllipsis';
import {t, tct} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {GroupStatus} from 'sentry/types/group';
+import useOrganization from 'sentry/utils/useOrganization';
interface Props
extends Pick<
@@ -24,7 +25,8 @@ export default function FeedbackListBulkSelection({
selectedIds,
deselectAll,
}: Props) {
- const {onToggleResovled, onMarkAsRead, onMarkUnread} = useBulkEditFeedbacks({
+ const organization = useOrganization();
+ const {onDelete, onToggleResolved, onMarkAsRead, onMarkUnread} = useBulkEditFeedbacks({
selectedIds,
deselectAll,
});
@@ -36,6 +38,10 @@ export default function FeedbackListBulkSelection({
const newMailboxSpam =
mailbox === 'ignored' ? GroupStatus.UNRESOLVED : GroupStatus.IGNORED;
+ const hasDelete =
+ organization.features.includes('issue-platform-deletion-ui') && selectedIds !== 'all';
+ const disableDelete = !organization.access.includes('event:admin');
+
return (
@@ -49,7 +55,7 @@ export default function FeedbackListBulkSelection({
onToggleResovled({newMailbox: newMailboxResolve})}
+ onClick={() => onToggleResolved({newMailbox: newMailboxResolve})}
>
{mailbox === 'resolved' ? t('Unresolve') : t('Resolve')}
@@ -58,7 +64,7 @@ export default function FeedbackListBulkSelection({
- onToggleResovled({
+ onToggleResolved({
newMailbox: newMailboxSpam,
moveToInbox: mailbox === 'ignored',
})
@@ -87,6 +93,14 @@ export default function FeedbackListBulkSelection({
label: t('Mark Unread'),
onAction: onMarkUnread,
},
+ {
+ key: 'delete',
+ priority: 'danger' as const,
+ label: t('Delete'),
+ hidden: !hasDelete,
+ disabled: disableDelete,
+ onAction: onDelete,
+ },
]}
/>
diff --git a/static/app/components/feedback/list/useBulkEditFeedbacks.tsx b/static/app/components/feedback/list/useBulkEditFeedbacks.tsx
index e9e0ebfb844b3..90034c4db4671 100644
--- a/static/app/components/feedback/list/useBulkEditFeedbacks.tsx
+++ b/static/app/components/feedback/list/useBulkEditFeedbacks.tsx
@@ -7,6 +7,7 @@ import {
} from 'sentry/actionCreators/indicator';
import {openConfirmModal} from 'sentry/components/confirm';
import type useListItemCheckboxState from 'sentry/components/feedback/list/useListItemCheckboxState';
+import {useDeleteFeedback} from 'sentry/components/feedback/useDeleteFeedback';
import useMutateFeedback from 'sentry/components/feedback/useMutateFeedback';
import {t, tct} from 'sentry/locale';
import {GroupStatus} from 'sentry/types/group';
@@ -45,8 +46,11 @@ export default function useBulkEditFeedbacks({deselectAll, selectedIds}: Props)
organization,
projectIds: queryView.project,
});
+ const deleteFeedback = useDeleteFeedback(selectedIds, queryView.project);
- const onToggleResovled = useCallback(
+ const onDelete = deleteFeedback;
+
+ const onToggleResolved = useCallback(
({newMailbox, moveToInbox}: {newMailbox: GroupStatus; moveToInbox?: boolean}) => {
openConfirmModal({
bypass: Array.isArray(selectedIds) && selectedIds.length === 1,
@@ -125,7 +129,8 @@ export default function useBulkEditFeedbacks({deselectAll, selectedIds}: Props)
);
return {
- onToggleResovled,
+ onDelete,
+ onToggleResolved,
onMarkAsRead,
onMarkUnread,
};
diff --git a/static/app/components/feedback/useDeleteFeedback.tsx b/static/app/components/feedback/useDeleteFeedback.tsx
new file mode 100644
index 0000000000000..585a43809d002
--- /dev/null
+++ b/static/app/components/feedback/useDeleteFeedback.tsx
@@ -0,0 +1,53 @@
+import {useCallback} from 'react';
+
+import {bulkDelete} from 'sentry/actionCreators/group';
+import {addLoadingMessage} from 'sentry/actionCreators/indicator';
+import {openConfirmModal} from 'sentry/components/confirm';
+import {t} from 'sentry/locale';
+import normalizeUrl from 'sentry/utils/url/normalizeUrl';
+import useApi from 'sentry/utils/useApi';
+import {useLocation} from 'sentry/utils/useLocation';
+import {useNavigate} from 'sentry/utils/useNavigate';
+import useOrganization from 'sentry/utils/useOrganization';
+
+export const useDeleteFeedback = (feedbackIds, projectId) => {
+ const organization = useOrganization();
+ const api = useApi({
+ persistInFlight: false,
+ });
+ const navigate = useNavigate();
+ const {query: locationQuery} = useLocation();
+
+ return useCallback(() => {
+ openConfirmModal({
+ onConfirm: () => {
+ addLoadingMessage(t('Updating feedback...'));
+ bulkDelete(
+ api,
+ {
+ orgId: organization.slug,
+ projectId: projectId,
+ itemIds: feedbackIds,
+ },
+ {
+ complete: () => {
+ navigate(
+ normalizeUrl({
+ pathname: `/organizations/${organization.slug}/feedback/`,
+ query: {
+ mailbox: locationQuery.mailbox,
+ project: locationQuery.project,
+ query: locationQuery.query,
+ statsPeriod: locationQuery.statsPeriod,
+ },
+ })
+ );
+ },
+ }
+ );
+ },
+ message: t('Deleting feedbacks is permanent. Are you sure you wish to continue?'),
+ confirmText: t('Delete'),
+ });
+ }, [api, feedbackIds, locationQuery, navigate, organization.slug, projectId]);
+};
diff --git a/static/app/components/feedback/useFetchFeedbackData.tsx b/static/app/components/feedback/useFetchFeedbackData.tsx
index 11ecbfa9f08bb..76f03fd916d11 100644
--- a/static/app/components/feedback/useFetchFeedbackData.tsx
+++ b/static/app/components/feedback/useFetchFeedbackData.tsx
@@ -49,7 +49,7 @@ export default function useFetchFeedbackData({feedbackId}: Props) {
// Until that is fixed, we're going to run `markAsRead` after the issue is
// initially fetched in order to speedup initial fetch and avoid race conditions.
useEffect(() => {
- if (issueResult.isFetched && !issueData?.hasSeen) {
+ if (issueResult.isFetched && issueData && !issueData.hasSeen) {
markAsRead(true);
}
}, [issueResult.isFetched]); // eslint-disable-line react-hooks/exhaustive-deps
diff --git a/static/app/components/forms/controls/selectControl.tsx b/static/app/components/forms/controls/selectControl.tsx
index e16e6a0170cd9..b5242fdd6e90e 100644
--- a/static/app/components/forms/controls/selectControl.tsx
+++ b/static/app/components/forms/controls/selectControl.tsx
@@ -225,6 +225,10 @@ function SelectControl ({
@@ -261,7 +265,12 @@ function SelectControl ({
...provided,
diff --git a/static/app/components/globalDrawer/index.tsx b/static/app/components/globalDrawer/index.tsx
index fd5ccef22a626..fe192e975d468 100644
--- a/static/app/components/globalDrawer/index.tsx
+++ b/static/app/components/globalDrawer/index.tsx
@@ -47,6 +47,10 @@ export interface DrawerOptions {
* other elements.
*/
shouldCloseOnInteractOutside?: (interactedElement: Element) => boolean;
+ /**
+ * If true (default), closes the drawer when the location changes
+ */
+ shouldCloseOnLocationChange?: (newPathname: Location['pathname']) => boolean;
//
// Custom framer motion transition for the drawer
//
@@ -104,7 +108,24 @@ export function GlobalDrawer({children}) {
}, [currentDrawerConfig, closeDrawer]);
// Close the drawer when the browser history changes.
- useLayoutEffect(() => closeDrawer(), [location?.pathname, closeDrawer]);
+ useLayoutEffect(
+ () => {
+ // Defaults to closing the drawer when the location changes
+ if (
+ currentDrawerConfig?.options.shouldCloseOnLocationChange?.(location.pathname) ??
+ true
+ ) {
+ closeDrawer();
+ }
+ },
+ // Ignoring changes to currentDrawerConfig?.options to prevent closing the drawer when it opens
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ [
+ location?.pathname,
+ closeDrawer,
+ currentDrawerConfig?.options.shouldCloseOnLocationChange,
+ ]
+ );
// Close the drawer when clicking outside the panel and options allow it.
const panelRef = useRef(null);
diff --git a/static/app/components/globalSelectionLink.tsx b/static/app/components/globalSelectionLink.tsx
index 9f0f4a0ebfb59..57f7fe858e2aa 100644
--- a/static/app/components/globalSelectionLink.tsx
+++ b/static/app/components/globalSelectionLink.tsx
@@ -64,7 +64,7 @@ function GlobalSelectionLink(props: Props) {
return ;
}
- let queryStringObject = {};
+ let queryStringObject: typeof globalQuery = {};
if (typeof to === 'object' && to.search) {
queryStringObject = qs.parse(to.search);
}
diff --git a/static/app/components/group/assignedTo.tsx b/static/app/components/group/assignedTo.tsx
index 9ff46c8661ff2..524493a53535e 100644
--- a/static/app/components/group/assignedTo.tsx
+++ b/static/app/components/group/assignedTo.tsx
@@ -1,4 +1,4 @@
-import {useEffect, useState} from 'react';
+import {useEffect} from 'react';
import styled from '@emotion/styled';
import {fetchOrgMembers} from 'sentry/actionCreators/members';
@@ -26,21 +26,31 @@ import type {Event} from 'sentry/types/event';
import type {Group} from 'sentry/types/group';
import type {Commit, Committer} from 'sentry/types/integrations';
import type {Project} from 'sentry/types/project';
-import {defined} from 'sentry/utils';
import type {FeedbackIssue} from 'sentry/utils/feedback/types';
import {toTitleCase} from 'sentry/utils/string/toTitleCase';
import useApi from 'sentry/utils/useApi';
import useCommitters from 'sentry/utils/useCommitters';
+import {useIssueEventOwners} from 'sentry/utils/useIssueEventOwners';
import useOrganization from 'sentry/utils/useOrganization';
-// TODO(ts): add the correct type
-type Rules = Array | null;
+/**
+ * example: codeowners:/issues -> [['codeowners', '/issues']]
+ */
+type RuleDefinition = [string, string];
+/**
+ * example: #team1 -> ['team', 'team1']
+ */
+type RuleOwner = [string, string];
+type Rule = [RuleDefinition, RuleOwner[]];
/**
* Given a list of rule objects returned from the API, locate the matching
* rules for a specific owner.
*/
-function findMatchedRules(rules: Rules, owner: Actor) {
+function findMatchedRules(
+ rules: EventOwners['rules'],
+ owner: Actor
+): Array | undefined {
if (!rules) {
return undefined;
}
@@ -49,7 +59,7 @@ function findMatchedRules(rules: Rules, owner: Actor) {
(actorType === 'user' && key === owner.email) ||
(actorType === 'team' && key === owner.name);
- const actorHasOwner = ([actorType, key]) =>
+ const actorHasOwner = ([actorType, key]: RuleOwner) =>
actorType === owner.type && matchOwner(actorType, key);
return rules
@@ -70,10 +80,11 @@ type IssueOwner = {
commits?: Commit[];
rules?: Array<[string, string]> | null;
};
-export type EventOwners = {
+export interface EventOwners {
owners: Actor[];
- rules: Rules;
-};
+ rule: RuleDefinition;
+ rules: Array;
+}
function getSuggestedReason(owner: IssueOwner) {
if (owner.commits) {
@@ -114,7 +125,7 @@ function getSuggestedReason(owner: IssueOwner) {
*/
export function getOwnerList(
committers: Committer[],
- eventOwners: EventOwners | null,
+ eventOwners: EventOwners | undefined,
assignedTo: Actor | null
): Omit[] {
const owners: IssueOwner[] = committers.map(commiter => ({
@@ -128,7 +139,7 @@ export function getOwnerList(
const normalizedOwner: IssueOwner = {
actor: owner,
rules: matchingRule,
- source: matchingRule?.[0] === 'codeowners' ? 'codeowners' : 'projectOwnership',
+ source: matchingRule?.[0]?.[0] === 'codeowners' ? 'codeowners' : 'projectOwnership',
};
const existingIdx =
@@ -177,15 +188,17 @@ function AssignedTo({
}: AssignedToProps) {
const organization = useOrganization();
const api = useApi();
- const [eventOwners, setEventOwners] = useState(null);
- const {data} = useCommitters(
+ const {data: eventOwners} = useIssueEventOwners({
+ eventId: event?.id ?? '',
+ projectSlug: project.slug,
+ });
+ const {data: committersResponse} = useCommitters(
{
eventId: event?.id ?? '',
projectSlug: project.slug,
},
{
notifyOnChangeProps: ['data'],
- enabled: defined(event?.id),
}
);
@@ -200,31 +213,11 @@ function AssignedTo({
fetchOrgMembers(api, organization.slug, [project.id]);
}, [api, organization, project]);
- useEffect(() => {
- if (!event) {
- return () => {};
- }
-
- let unmounted = false;
-
- api
- .requestPromise(
- `/projects/${organization.slug}/${project.slug}/events/${event.id}/owners/`
- )
- .then(response => {
- if (unmounted) {
- return;
- }
-
- setEventOwners(response);
- });
-
- return () => {
- unmounted = true;
- };
- }, [api, event, organization, project.slug]);
-
- const owners = getOwnerList(data?.committers ?? [], eventOwners, group.assignedTo);
+ const owners = getOwnerList(
+ committersResponse?.committers ?? [],
+ eventOwners,
+ group.assignedTo
+ );
const makeTrigger = (props: any, isOpen: boolean) => {
return (
diff --git a/static/app/components/group/assigneeSelector.tsx b/static/app/components/group/assigneeSelector.tsx
index 8d2ecf48cd9f2..8f192826ffa5b 100644
--- a/static/app/components/group/assigneeSelector.tsx
+++ b/static/app/components/group/assigneeSelector.tsx
@@ -5,6 +5,7 @@ import {addErrorMessage} from 'sentry/actionCreators/indicator';
import {AssigneeBadge} from 'sentry/components/assigneeBadge';
import AssigneeSelectorDropdown, {
type AssignableEntity,
+ type SuggestedAssignee,
} from 'sentry/components/assigneeSelectorDropdown';
import {Button} from 'sentry/components/button';
import type {OnAssignCallback} from 'sentry/components/deprecatedAssigneeSelectorDropdown';
@@ -20,6 +21,7 @@ interface AssigneeSelectorProps {
group: Group;
handleAssigneeChange: (assignedActor: AssignableEntity | null) => void;
memberList?: User[];
+ owners?: Omit[];
}
export function useHandleAssigneeChange({
@@ -73,12 +75,14 @@ export function AssigneeSelector({
memberList,
assigneeLoading,
handleAssigneeChange,
+ owners,
}: AssigneeSelectorProps) {
return (
handleAssigneeChange(assignedActor)
}
diff --git a/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx b/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx
index 730daf8ddae4b..92a955cd0115a 100644
--- a/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx
+++ b/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx
@@ -13,6 +13,7 @@ import type {Event} from 'sentry/types/event';
import type {Group} from 'sentry/types/group';
import type {Project} from 'sentry/types/project';
import useOrganization from 'sentry/utils/useOrganization';
+import {Divider} from 'sentry/views/issueDetails/divider';
import useStreamLinedExternalIssueData from './hooks/useGroupExternalIssues';
@@ -36,17 +37,17 @@ export function StreamlinedExternalIssueList({
if (isLoading) {
return (
-
+
{t('Issue Tracking')}
-
+
);
}
return (
-
+
{t('Issue Tracking')}
{integrations.length || linkedIssues.length ? (
@@ -82,6 +83,7 @@ export function StreamlinedExternalIssueList({
))}
+ {integrations.length > 0 && linkedIssues.length > 0 ? : null}
{integrations.map(integration => {
const sharedButtonProps: ButtonProps = {
size: 'zero',
@@ -130,7 +132,7 @@ export function StreamlinedExternalIssueList({
)}
-
+
);
}
@@ -138,6 +140,7 @@ const IssueActionWrapper = styled('div')`
display: flex;
flex-wrap: wrap;
gap: ${space(1)};
+ line-height: 1.2;
`;
const StyledSectionTitle = styled(SidebarSection.Title)`
@@ -148,8 +151,7 @@ const LinkedIssue = styled(LinkButton)`
display: flex;
align-items: center;
padding: ${space(0.5)} ${space(0.75)};
- line-height: 1.05;
- border: 1px dashed ${p => p.theme.border};
+ border: 1px solid ${p => p.theme.border};
border-radius: ${p => p.theme.borderRadius};
font-weight: normal;
`;
@@ -158,7 +160,6 @@ const IssueActionButton = styled(Button)`
display: flex;
align-items: center;
padding: ${space(0.5)} ${space(0.75)};
- line-height: 1.05;
border: 1px dashed ${p => p.theme.border};
border-radius: ${p => p.theme.borderRadius};
font-weight: normal;
diff --git a/static/app/components/group/groupSummary.spec.tsx b/static/app/components/group/groupSummary.spec.tsx
index e1ad4eca9a3ef..d836956cd1a93 100644
--- a/static/app/components/group/groupSummary.spec.tsx
+++ b/static/app/components/group/groupSummary.spec.tsx
@@ -1,8 +1,7 @@
-import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary';
+import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
import {
GroupSummary,
- GroupSummaryHeader,
makeGroupSummaryQueryKey,
} from 'sentry/components/group/groupSummary';
import {IssueCategory} from 'sentry/types/group';
@@ -12,9 +11,10 @@ describe('GroupSummary', function () {
MockApiClient.clearMockResponses();
});
- it('renders the group summary', async function () {
+ it('renders the collapsed group summary', async function () {
const groupId = '1';
const organizationSlug = 'org-slug';
+
MockApiClient.addMockResponse({
url: makeGroupSummaryQueryKey(organizationSlug, groupId)[0],
method: 'POST',
@@ -22,6 +22,7 @@ describe('GroupSummary', function () {
groupId,
summary: 'Test summary',
impact: 'Test impact',
+ headline: 'Test headline',
},
});
@@ -46,107 +47,16 @@ describe('GroupSummary', function () {
render( );
- expect(await screen.findByText('Issue Summary')).toBeInTheDocument();
-
- expect(screen.getByText('Issue Summary')).toBeInTheDocument();
- expect(screen.getByText('Test summary')).toBeInTheDocument();
- expect(screen.getByText('Potential Impact')).toBeInTheDocument();
- expect(screen.getByText('Test impact')).toBeInTheDocument();
- });
-
- it('does not render the group summary if no consent', async function () {
- const groupId = '1';
- const organizationSlug = 'org-slug';
- MockApiClient.addMockResponse({
- url: makeGroupSummaryQueryKey(organizationSlug, groupId)[0],
- method: 'POST',
- body: {
- groupId,
- summary: 'Test summary',
- impact: 'Test impact',
- },
- });
-
- const setupCall = MockApiClient.addMockResponse({
- url: `/issues/${groupId}/autofix/setup/`,
- body: {
- genAIConsent: {ok: false},
- integration: {ok: true},
- githubWriteIntegration: {
- ok: true,
- repos: [
- {
- provider: 'integrations:github',
- owner: 'getsentry',
- name: 'sentry',
- external_id: '123',
- },
- ],
- },
- },
- });
-
- render( );
-
- await waitFor(() => {
- expect(setupCall).toHaveBeenCalled();
- });
-
- expect(screen.queryByText('Issue Summary')).not.toBeInTheDocument();
- expect(screen.queryByText('Test summary')).not.toBeInTheDocument();
- expect(screen.queryByText('Potential Impact')).not.toBeInTheDocument();
- expect(screen.queryByText('Test impact')).not.toBeInTheDocument();
+ // Verify the summary loads and renders the collapsed view
+ expect(await screen.findByText('Test headline')).toBeInTheDocument();
+ expect(screen.getByText('Details: Test summary')).toBeInTheDocument();
+ expect(screen.queryByText('Impact: Test impact')).not.toBeInTheDocument();
});
- it('does not render the group summary if not an error', function () {
+ it('expands the summary when clicked', async function () {
const groupId = '1';
const organizationSlug = 'org-slug';
- MockApiClient.addMockResponse({
- url: makeGroupSummaryQueryKey(organizationSlug, groupId)[0],
- method: 'POST',
- body: {
- groupId,
- summary: 'Test summary',
- impact: 'Test impact',
- },
- });
-
- MockApiClient.addMockResponse({
- url: `/issues/${groupId}/autofix/setup/`,
- body: {
- genAIConsent: {ok: true},
- integration: {ok: true},
- githubWriteIntegration: {
- ok: true,
- repos: [
- {
- provider: 'integrations:github',
- owner: 'getsentry',
- name: 'sentry',
- external_id: '123',
- },
- ],
- },
- },
- });
-
- render( );
-
- expect(screen.queryByText('Issue Summary')).not.toBeInTheDocument();
- expect(screen.queryByText('Test summary')).not.toBeInTheDocument();
- expect(screen.queryByText('Potential Impact')).not.toBeInTheDocument();
- expect(screen.queryByText('Test impact')).not.toBeInTheDocument();
- });
-});
-
-describe('GroupSummaryHeader', function () {
- beforeEach(() => {
- MockApiClient.clearMockResponses();
- });
- it('renders the group summary header', async function () {
- const groupId = '1';
- const organizationSlug = 'org-slug';
MockApiClient.addMockResponse({
url: makeGroupSummaryQueryKey(organizationSlug, groupId)[0],
method: 'POST',
@@ -177,14 +87,17 @@ describe('GroupSummaryHeader', function () {
},
});
- render( );
-
+ render( );
expect(await screen.findByText('Test headline')).toBeInTheDocument();
+
+ await userEvent.click(screen.getByText('Test headline'));
+ expect(screen.getByText('Test impact')).toBeInTheDocument();
});
- it('does not render the group summary headline if no consent', async function () {
+ it('does not render the summary if no consent', async function () {
const groupId = '1';
const organizationSlug = 'org-slug';
+
MockApiClient.addMockResponse({
url: makeGroupSummaryQueryKey(organizationSlug, groupId)[0],
method: 'POST',
@@ -215,18 +128,21 @@ describe('GroupSummaryHeader', function () {
},
});
- render( );
+ render( );
await waitFor(() => {
expect(setupCall).toHaveBeenCalled();
});
expect(screen.queryByText('Test headline')).not.toBeInTheDocument();
+ expect(screen.queryByText('Test summary')).not.toBeInTheDocument();
+ expect(screen.queryByText('Impact: Test impact')).not.toBeInTheDocument();
});
- it('does not render the group summary headline if not an error', function () {
+ it('does not render the summary if the issue is not in the error category', function () {
const groupId = '1';
const organizationSlug = 'org-slug';
+
MockApiClient.addMockResponse({
url: makeGroupSummaryQueryKey(organizationSlug, groupId)[0],
method: 'POST',
@@ -257,9 +173,10 @@ describe('GroupSummaryHeader', function () {
},
});
- render(
-
- );
+ render( );
+
expect(screen.queryByText('Test headline')).not.toBeInTheDocument();
+ expect(screen.queryByText('Test summary')).not.toBeInTheDocument();
+ expect(screen.queryByText('Impact: Test impact')).not.toBeInTheDocument();
});
});
diff --git a/static/app/components/group/groupSummary.tsx b/static/app/components/group/groupSummary.tsx
index 106c340661b6b..5779d1140dd66 100644
--- a/static/app/components/group/groupSummary.tsx
+++ b/static/app/components/group/groupSummary.tsx
@@ -1,17 +1,16 @@
+import {Fragment, useState} from 'react';
import styled from '@emotion/styled';
import FeatureBadge from 'sentry/components/badge/featureBadge';
import {Button} from 'sentry/components/button';
import {useAutofixSetup} from 'sentry/components/events/autofix/useAutofixSetup';
-import LoadingIndicator from 'sentry/components/loadingIndicator';
import Panel from 'sentry/components/panels/panel';
import Placeholder from 'sentry/components/placeholder';
-import * as SidebarSection from 'sentry/components/sidebarSection';
-import {IconMegaphone} from 'sentry/icons';
+import {IconChevron, IconFocus, IconMegaphone} from 'sentry/icons';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {IssueCategory} from 'sentry/types/group';
-import marked from 'sentry/utils/marked';
+import marked, {singleLineRenderer} from 'sentry/utils/marked';
import {type ApiQueryKey, useApiQuery} from 'sentry/utils/queryClient';
import {useFeedbackForm} from 'sentry/utils/useFeedbackForm';
import useOrganization from 'sentry/utils/useOrganization';
@@ -78,140 +77,143 @@ function GroupSummaryFeatureBadge() {
);
}
-export function GroupSummaryHeader({groupId, groupCategory}: GroupSummaryProps) {
- const {data, isPending, isError, hasGenAIConsent} = useGroupSummary(
- groupId,
- groupCategory
- );
- const isStreamlined = useHasStreamlinedUI();
-
- if (
- isError ||
- (!isPending && !data?.headline) ||
- !isSummaryEnabled(hasGenAIConsent, groupCategory)
- ) {
- // Don't render the summary headline if there's an error, the error is already shown in the sidebar
- // If there is no headline we also don't want to render anything
- return null;
- }
-
- const renderContent = () => {
- if (isPending) {
- return ;
- }
-
- return {data?.headline} ;
- };
-
- return (
-
- {renderContent()}
-
-
- );
-}
-
export function GroupSummary({groupId, groupCategory}: GroupSummaryProps) {
const {data, isPending, isError, hasGenAIConsent} = useGroupSummary(
groupId,
groupCategory
);
+ const [expanded, setExpanded] = useState(false);
+
const openForm = useFeedbackForm();
+ const isStreamlined = useHasStreamlinedUI();
+
if (!isSummaryEnabled(hasGenAIConsent, groupCategory)) {
// TODO: Render a banner for needing genai consent
return null;
}
return (
-
-
-
-
- {t('Issue Summary')}
-
-
- {isPending && }
-
-
+
+ setExpanded(!data ? false : !expanded)}>
+
+
+
+
+ {isPending && }
+ {isError ? {t('Error loading summary')}
: null}
+ {data && !expanded && (
+
+ {data.headline}
+
+
+ )}
+ {data && expanded && {data.headline} }
+
+
+
+
+
+ {expanded && (
+
{isError ? {t('Error loading summary')}
: null}
{data && (
- {t('Potential Impact')}
)}
-
- {openForm && !isPending && (
-
- {
- openForm({
- messagePlaceholder: t(
- 'How can we make this issue summary more useful?'
- ),
- tags: {
- ['feedback.source']: 'issue_details_ai_issue_summary',
- ['feedback.owner']: 'ml-ai',
- },
- });
- }}
- size="xs"
- icon={ }
- >
- Give Feedback
-
-
- )}
-
-
+ {openForm && !isPending && (
+
+ {
+ openForm({
+ messagePlaceholder: t(
+ 'How can we make this issue summary more useful?'
+ ),
+ tags: {
+ ['feedback.source']: 'issue_details_ai_issue_summary',
+ ['feedback.owner']: 'ml-ai',
+ },
+ });
+ }}
+ size="xs"
+ icon={ }
+ >
+ Give Feedback
+
+
+
+ )}
+
+ )}
+
);
}
-const Wrapper = styled(Panel)`
- display: flex;
- flex-direction: column;
- margin-bottom: 0;
- background: linear-gradient(
- 269.35deg,
- ${p => p.theme.backgroundTertiary} 0.32%,
- rgba(245, 243, 247, 0) 99.69%
- );
- padding: ${space(1.5)} ${space(2)};
+const Body = styled('div')`
+ padding: 0 ${space(4)} ${space(1.5)} ${space(4)};
+`;
+
+const HeadlinePreview = styled('span')`
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ margin-right: ${space(0.5)};
+ flex-shrink: 0;
+ max-width: 92%;
+`;
+
+const SummaryPreview = styled('span')`
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ flex-grow: 1;
+ color: ${p => p.theme.subText};
+`;
+
+const Wrapper = styled(Panel)<{isStreamlined: boolean}>`
+ margin-bottom: ${p => (p.isStreamlined ? 0 : space(1))};
+ padding: ${space(0.5)};
`;
const StyledTitleRow = styled('div')`
display: flex;
- align-items: center;
+ align-items: flex-start;
justify-content: space-between;
+ padding: ${space(1)} ${space(1)} ${space(1)} ${space(1)};
+
+ &:hover {
+ cursor: pointer;
+ background: ${p => p.theme.backgroundSecondary};
+ }
`;
-const StyledTitle = styled('div')`
- margin: 0;
- color: ${p => p.theme.text};
- font-size: ${p => p.theme.fontSizeMedium};
- font-weight: 600;
- align-items: center;
+const CollapsedRow = styled('div')`
display: flex;
+ width: 100%;
+ align-items: flex-start;
+ overflow: hidden;
`;
-const StyledFeatureBadge = styled(FeatureBadge)`
- margin-top: -1px;
-`;
+const StyledFeatureBadge = styled(FeatureBadge)``;
-const SummaryContent = styled('div')`
+const HeadlineContent = styled('span')`
overflow-wrap: break-word;
p {
margin: 0;
@@ -219,14 +221,17 @@ const SummaryContent = styled('div')`
code {
word-break: break-all;
}
+ width: 100%;
`;
-const StyledLoadingIndicator = styled(LoadingIndicator)`
- display: flex;
- align-items: center;
- justify-content: center;
- width: 16px;
- max-height: 16px;
+const SummaryContent = styled('div')`
+ overflow-wrap: break-word;
+ p {
+ margin: 0;
+ }
+ code {
+ word-break: break-all;
+ }
`;
const ImpactContent = styled('div')`
@@ -242,12 +247,20 @@ const Content = styled('div')`
const ButtonContainer = styled('div')`
margin-top: ${space(1.5)};
- margin-bottom: ${space(0.5)};
+ align-items: center;
+ display: flex;
`;
-const SummaryHeaderContainer = styled('div')<{isStreamlined: boolean}>`
- display: flex;
- align-items: center;
- margin-top: ${space(1)};
- color: ${p => (p.isStreamlined ? p.theme.subText : p.theme.text)};
+const IconContainer = styled('div')`
+ flex-shrink: 0;
+ margin-right: ${space(1)};
+ margin-top: ${space(0.25)};
+ max-height: ${space(2)};
+`;
+
+const IconContainerRight = styled('div')`
+ flex-shrink: 0;
+ margin-left: ${space(1)};
+ margin-top: ${space(0.25)};
+ max-height: ${space(2)};
`;
diff --git a/static/app/components/group/inboxBadges/shortId.tsx b/static/app/components/group/inboxBadges/shortId.tsx
index 04e77743da72f..88f5873beead9 100644
--- a/static/app/components/group/inboxBadges/shortId.tsx
+++ b/static/app/components/group/inboxBadges/shortId.tsx
@@ -37,5 +37,4 @@ const IdWrapper = styled('div')`
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
- margin-top: 1px;
`;
diff --git a/static/app/components/group/issueReplayCount.tsx b/static/app/components/group/issueReplayCount.tsx
index 5c343a07d53a7..fd99e28b6e179 100644
--- a/static/app/components/group/issueReplayCount.tsx
+++ b/static/app/components/group/issueReplayCount.tsx
@@ -54,6 +54,10 @@ const ReplayCountLink = styled(Link)`
color: ${p => p.theme.gray400};
font-size: ${p => p.theme.fontSizeSmall};
gap: 0 ${space(0.5)};
+
+ &:hover {
+ color: ${p => p.theme.linkHoverColor};
+ }
`;
export default IssueReplayCount;
diff --git a/static/app/components/group/streamlinedParticipantList.spec.tsx b/static/app/components/group/streamlinedParticipantList.spec.tsx
index 62ba12a621f79..915f31e17a660 100644
--- a/static/app/components/group/streamlinedParticipantList.spec.tsx
+++ b/static/app/components/group/streamlinedParticipantList.spec.tsx
@@ -19,22 +19,32 @@ describe('ParticipantList', () => {
it('expands and collapses the list when clicked', async () => {
render( );
expect(screen.queryByText('#team-1')).not.toBeInTheDocument();
- await userEvent.click(screen.getByText('JD'));
+ await userEvent.click(screen.getByText('JD'), {skipHover: true});
expect(await screen.findByText('#team-1')).toBeInTheDocument();
expect(await screen.findByText('Bob Alice')).toBeInTheDocument();
expect(screen.getByText('Teams (2)')).toBeInTheDocument();
expect(screen.getByText('Individuals (2)')).toBeInTheDocument();
- await userEvent.click(screen.getAllByText('JD')[0]);
+ await userEvent.click(screen.getAllByText('JD')[0], {skipHover: true});
expect(screen.queryByText('Bob Alice')).not.toBeInTheDocument();
});
it('does not display section headers when there is only users or teams', async () => {
render( );
- await userEvent.click(screen.getByText('JD'));
+ await userEvent.click(screen.getByText('JD'), {skipHover: true});
expect(await screen.findByText('Bob Alice')).toBeInTheDocument();
expect(screen.queryByText('Teams')).not.toBeInTheDocument();
});
+
+ it('skips duplicate information between name and email', async () => {
+ const duplicateInfoUsers = [
+ UserFixture({id: '1', name: 'john.doe@example.com', email: 'john.doe@example.com'}),
+ ];
+ render( );
+ await userEvent.click(screen.getByText('J'), {skipHover: true});
+ // Would find two elements if it was duplicated
+ expect(await screen.findByText('john.doe@example.com')).toBeInTheDocument();
+ });
});
diff --git a/static/app/components/group/streamlinedParticipantList.tsx b/static/app/components/group/streamlinedParticipantList.tsx
index 30763a9902f05..b64409f0f81fd 100644
--- a/static/app/components/group/streamlinedParticipantList.tsx
+++ b/static/app/components/group/streamlinedParticipantList.tsx
@@ -49,7 +49,9 @@ export default function ParticipantList({users, teams}: DropdownListProps) {
{`#${team.slug}`}
- {tn('%s member', '%s members', team.memberCount)}
+
+ {tn('%s member', '%s members', team.memberCount)}
+
))}
@@ -59,10 +61,12 @@ export default function ParticipantList({users, teams}: DropdownListProps) {
{users.map(user => (
-
- {user.name}
- {user.email}
-
+
+ {user.name}
+ {user.email !== user.name ? (
+ {user.email}
+ ) : null}
+
))}
@@ -82,6 +86,7 @@ const ParticipantListWrapper = styled('div')`
max-height: 325px;
overflow-y: auto;
border-radius: ${p => p.theme.borderRadius};
+ color: ${p => p.theme.textColor};
& > div:not(:last-child) {
border-bottom: 1px solid ${p => p.theme.border};
@@ -106,10 +111,16 @@ const UserRow = styled('div')`
gap: ${space(1)};
line-height: 1.2;
font-size: ${p => p.theme.fontSizeSmall};
+ min-height: 45px;
+`;
+
+const NameWrapper = styled('div')`
+ & > div:only-child {
+ margin-top: ${space(0.25)};
+ }
`;
-const SubText = styled('div')`
- color: ${p => p.theme.subText};
+const SmallText = styled('div')`
font-size: ${p => p.theme.fontSizeExtraSmall};
`;
diff --git a/static/app/components/issues/groupList.tsx b/static/app/components/issues/groupList.tsx
index 667e57d0a805f..7992c02d8b856 100644
--- a/static/app/components/issues/groupList.tsx
+++ b/static/app/components/issues/groupList.tsx
@@ -47,7 +47,9 @@ export type GroupListColumn =
| 'users'
| 'priority'
| 'assignee'
- | 'lastTriggered';
+ | 'lastTriggered'
+ | 'lastSeen'
+ | 'firstSeen';
type Props = WithRouterProps & {
api: Client;
diff --git a/static/app/components/keyValueTable.tsx b/static/app/components/keyValueTable.tsx
index e099864d2788e..c1c4b23f206a7 100644
--- a/static/app/components/keyValueTable.tsx
+++ b/static/app/components/keyValueTable.tsx
@@ -1,5 +1,5 @@
import {Fragment} from 'react';
-import type {Theme} from '@emotion/react';
+import {css, type Theme} from '@emotion/react';
import styled from '@emotion/styled';
import {space} from 'sentry/styles/space';
@@ -25,20 +25,18 @@ export function KeyValueTableRow({keyName, value, type}: Props) {
);
}
-const commonStyles = ({theme, type}: {type: Props['type']} & {theme: Theme}) => `
+const commonStyles = ({theme, type}: {type: Props['type']} & {theme: Theme}) => css`
font-size: ${theme.fontSizeMedium};
padding: ${space(0.5)} ${space(1)};
- font-weight: ${p => p.theme.fontWeightNormal};
+ font-weight: ${theme.fontWeightNormal};
line-height: inherit;
- ${p => p.theme.overflowEllipsis};
-
- background-color: ${
- type === 'error'
- ? theme.red100 + ' !important'
- : type === 'warning'
- ? 'var(--background-warning-default, rgba(245, 176, 0, 0.09)) !important'
- : 'inherit'
- };
+ ${theme.overflowEllipsis};
+
+ background-color: ${type === 'error'
+ ? theme.red100 + ' !important'
+ : type === 'warning'
+ ? 'var(--background-warning-default, rgba(245, 176, 0, 0.09)) !important'
+ : 'inherit'};
&:nth-of-type(2n-1) {
background-color: ${theme.backgroundSecondary};
}
diff --git a/static/app/components/lazyLoad.spec.tsx b/static/app/components/lazyLoad.spec.tsx
index 6915d87d407f3..d7eca30ff2908 100644
--- a/static/app/components/lazyLoad.spec.tsx
+++ b/static/app/components/lazyLoad.spec.tsx
@@ -51,21 +51,14 @@ describe('LazyLoad', function () {
it('renders with error message when promise is rejected', async function () {
// eslint-disable-next-line no-console
jest.spyOn(console, 'error').mockImplementation(jest.fn());
- const getComponent = jest.fn(
- () =>
- new Promise((_resolve, reject) =>
- reject(new Error('Could not load component'))
- )
- );
-
- try {
- render( );
- } catch (err) {
- // ignore
- }
+ const getComponent = () => Promise.reject(new Error('Could not load component'));
+
+ render( );
expect(
- await screen.findByText('There was an error loading a component.')
+ await screen.findByText('There was an error loading a component.', undefined, {
+ timeout: 5000,
+ })
).toBeInTheDocument();
// eslint-disable-next-line no-console
diff --git a/static/app/components/links/link.tsx b/static/app/components/links/link.tsx
index 4a2908ef0c267..c54d15f4e8a7d 100644
--- a/static/app/components/links/link.tsx
+++ b/static/app/components/links/link.tsx
@@ -1,7 +1,8 @@
import {forwardRef} from 'react';
-// biome-ignore lint/nursery/noRestrictedImports: Will be removed with react router 6
-import {Link as RouterLink} from 'react-router';
-import {Link as Router6Link} from 'react-router-dom';
+import {
+ Link as RouterLink,
+ type LinkProps as ReactRouterLinkProps,
+} from 'react-router-dom';
import styled from '@emotion/styled';
import type {LocationDescriptor} from 'history';
@@ -38,6 +39,7 @@ export interface LinkProps
* Forwarded ref
*/
forwardedRef?: React.Ref;
+ state?: ReactRouterLinkProps['state'];
}
/**
@@ -49,17 +51,9 @@ function BaseLink({disabled, to, forwardedRef, ...props}: LinkProps): React.Reac
to = normalizeUrl(to, location);
if (!disabled && location) {
- if (window.__SENTRY_USING_REACT_ROUTER_SIX) {
- return (
-
- );
- }
-
- return ;
+ return (
+
+ );
}
return ;
diff --git a/static/app/components/links/listLink.tsx b/static/app/components/links/listLink.tsx
index af7475b24b675..36680d62bcaff 100644
--- a/static/app/components/links/listLink.tsx
+++ b/static/app/components/links/listLink.tsx
@@ -1,5 +1,3 @@
-// biome-ignore lint/nursery/noRestrictedImports: Will be removed with react router 6
-import {Link as RouterLink} from 'react-router';
import {NavLink} from 'react-router-dom';
import styled from '@emotion/styled';
import classNames from 'classnames';
@@ -8,7 +6,6 @@ import type {LocationDescriptor} from 'history';
import {locationDescriptorToTo} from 'sentry/utils/reactRouter6Compat/location';
import normalizeUrl from 'sentry/utils/url/normalizeUrl';
import {useLocation} from 'sentry/utils/useLocation';
-import useRouter from 'sentry/utils/useRouter';
interface ListLinkProps
extends Omit<
@@ -36,31 +33,20 @@ function ListLink({
disabled = false,
...props
}: ListLinkProps) {
- const router = useRouter();
const location = useLocation();
const target = normalizeUrl(to);
const active =
isActive?.(target, index) ??
- // XXX(epurkhiser): our shim for router.isActive will throw an error in
- // react-router 6. Fallback to manually checking if the path is active
- (window.__SENTRY_USING_REACT_ROUTER_SIX
- ? location.pathname === (typeof target === 'string' ? target : target.pathname)
- : router.isActive(target, index));
-
- const link = window.__SENTRY_USING_REACT_ROUTER_SIX ? (
-
- {children}
-
- ) : (
-
- {children}
-
- );
+ // XXX(epurkhiser): This is carry over from the react-router 3 days.
+ // There's probably a a better way to detect active
+ location.pathname === (typeof target === 'string' ? target : target.pathname);
return (
- {link}
+
+ {children}
+
);
}
diff --git a/static/app/components/links/styles.tsx b/static/app/components/links/styles.tsx
index 3893f4a6f26c9..8f85e7f462b6c 100644
--- a/static/app/components/links/styles.tsx
+++ b/static/app/components/links/styles.tsx
@@ -1,6 +1,6 @@
-import type {Theme} from '@emotion/react';
+import {css, type Theme} from '@emotion/react';
-export const linkStyles = ({disabled, theme}: {theme: Theme; disabled?: boolean}) => `
+export const linkStyles = ({disabled, theme}: {theme: Theme; disabled?: boolean}) => css`
border-radius: ${theme.linkBorderRadius};
&:focus-visible {
@@ -9,14 +9,12 @@ export const linkStyles = ({disabled, theme}: {theme: Theme; disabled?: boolean}
outline: none;
}
- ${
- disabled &&
- `
- color:${theme.disabled};
- pointer-events: none;
- :hover {
- color: ${theme.disabled};
- }
- `
- }
+ ${disabled &&
+ css`
+ color: ${theme.disabled};
+ pointer-events: none;
+ :hover {
+ color: ${theme.disabled};
+ }
+ `}
`;
diff --git a/static/app/components/loadingIndicator.stories.tsx b/static/app/components/loadingIndicator.stories.tsx
new file mode 100644
index 0000000000000..d397d48e596e3
--- /dev/null
+++ b/static/app/components/loadingIndicator.stories.tsx
@@ -0,0 +1,10 @@
+import LoadingIndicator from 'sentry/components/loadingIndicator';
+import storyBook from 'sentry/stories/storyBook';
+
+export default storyBook(LoadingIndicator, story => {
+ story('Default', () => );
+
+ story('Mini', () => );
+
+ story('With Message', () => Loading... );
+});
diff --git a/static/app/components/loadingIndicator.tsx b/static/app/components/loadingIndicator.tsx
index 6147119f3fe49..c7ce58f5fb80f 100644
--- a/static/app/components/loadingIndicator.tsx
+++ b/static/app/components/loadingIndicator.tsx
@@ -39,7 +39,7 @@ function LoadingIndicator(props: Props) {
'loading-indicator': true,
});
- let loadingStyle = {};
+ let loadingStyle: React.CSSProperties = {};
if (size) {
loadingStyle = {
width: size,
diff --git a/static/app/components/metrics/metricSearchBar.spec.tsx b/static/app/components/metrics/metricSearchBar.spec.tsx
index 748b25d951a03..ec8b3083e27c6 100644
--- a/static/app/components/metrics/metricSearchBar.spec.tsx
+++ b/static/app/components/metrics/metricSearchBar.spec.tsx
@@ -14,11 +14,6 @@ describe('metricSearchBar', function () {
url: '/organizations/org-slug/metrics/tags/',
body: [],
});
- MockApiClient.addMockResponse({
- method: 'POST',
- url: '/organizations/org-slug/recent-searches/',
- body: [],
- });
MockApiClient.addMockResponse({
method: 'GET',
url: '/organizations/org-slug/recent-searches/',
@@ -36,57 +31,10 @@ describe('metricSearchBar', function () {
});
});
- describe('using SmartSearchBar', function () {
- it('does not allow illegal filters', async function () {
- render(
-
- );
- await screen.findByPlaceholderText('Filter by tags');
- await userEvent.type(screen.getByPlaceholderText('Filter by tags'), 'potato:db');
- expect(screen.getByTestId('search-autocomplete-item')).toHaveTextContent(
- "The field potato isn't supported here."
- );
- await userEvent.keyboard('{enter}');
- expect(onChange).not.toHaveBeenCalled();
- });
- it('does not allow insights filters when not using an insights mri', async function () {
- render(
-
- );
- await screen.findByPlaceholderText('Filter by tags');
- await userEvent.type(
- screen.getByPlaceholderText('Filter by tags'),
- 'span.module:db'
- );
- expect(screen.getByTestId('search-autocomplete-item')).toHaveTextContent(
- "The field span.module isn't supported here."
- );
- await userEvent.keyboard('{enter}');
- expect(onChange).not.toHaveBeenCalled();
- });
- it('allows insights specific filters when using an insights mri', async function () {
- render(
-
- );
- await screen.findByPlaceholderText('Filter by tags');
- await userEvent.type(
- screen.getByPlaceholderText('Filter by tags'),
- 'span.module:db'
- );
- expect(screen.queryByTestId('search-autocomplete-item')).not.toBeInTheDocument();
- await userEvent.keyboard('{enter}');
- expect(onChange).toHaveBeenCalledWith('span.module:"db"');
- });
- });
-
describe('using SearchQueryBuilder', function () {
- const organization = {features: ['search-query-builder-metrics']};
it('does not allow illegal filters', async function () {
render(
- ,
- {
- organization,
- }
+
);
await screen.findByPlaceholderText('Filter by tags');
await userEvent.type(screen.getByPlaceholderText('Filter by tags'), 'potato:db');
@@ -96,10 +44,7 @@ describe('metricSearchBar', function () {
});
it('does not allow insights filters when not using an insights mri', async function () {
render(
- ,
- {
- organization,
- }
+
);
await screen.findByPlaceholderText('Filter by tags');
await userEvent.type(
@@ -112,10 +57,7 @@ describe('metricSearchBar', function () {
});
it('allows insights specific filters when using an insights mri', async function () {
render(
- ,
- {
- organization,
- }
+
);
await screen.findByPlaceholderText('Filter by tags');
await userEvent.type(
diff --git a/static/app/components/metrics/metricSearchBar.tsx b/static/app/components/metrics/metricSearchBar.tsx
index 482b4e405de31..f3d8b58a7375c 100644
--- a/static/app/components/metrics/metricSearchBar.tsx
+++ b/static/app/components/metrics/metricSearchBar.tsx
@@ -1,6 +1,5 @@
import {useCallback, useMemo} from 'react';
import {css, type SerializedStyles} from '@emotion/react';
-import {useId} from '@react-aria/utils';
import {QueryFieldGroup} from 'sentry/components/metrics/queryFieldGroup';
import {
@@ -8,14 +7,10 @@ import {
type SearchQueryBuilderProps,
} from 'sentry/components/searchQueryBuilder';
import type {SmartSearchBarProps} from 'sentry/components/smartSearchBar';
-import SmartSearchBar from 'sentry/components/smartSearchBar';
import {t} from 'sentry/locale';
import {SavedSearchType, type TagCollection} from 'sentry/types/group';
import type {MRI} from 'sentry/types/metrics';
-import {
- hasMetricsNewInputs,
- hasMetricsNewSearchQueryBuilder,
-} from 'sentry/utils/metrics/features';
+import {hasMetricsNewInputs} from 'sentry/utils/metrics/features';
import {getUseCaseFromMRI} from 'sentry/utils/metrics/mri';
import type {MetricTag} from 'sentry/utils/metrics/types';
import {useMetricsTags} from 'sentry/utils/metrics/useMetricsTags';
@@ -64,14 +59,12 @@ export function MetricSearchBar({
onChange,
query,
projectIds,
- id: idProp,
...props
}: MetricSearchBarProps) {
const organization = useOrganization();
const api = useApi();
const {selection} = usePageFilters();
const selectedProjects = useSelectedProjects();
- const id = useId(idProp);
const projectIdNumbers = useMemo(
() => projectIds?.map(projectId => parseInt(projectId, 10)),
[projectIds]
@@ -172,37 +165,11 @@ export function MetricSearchBar({
css: wideSearchBarCss(disabled),
};
- const smartSearchProps: Partial & {css: SerializedStyles} = {
- id,
- disabled,
- maxMenuHeight: 220,
- organization,
- onGetTagValues: getTagValues,
- // don't highlight tags while loading as we don't know yet if they are supported
- highlightUnsupportedTags: !isPending,
- onClose: handleChange,
- onSearch: handleChange,
- placeholder: t('Filter by tags'),
- query,
- savedSearchType: SavedSearchType.METRIC,
- css: wideSearchBarCss(disabled),
- ...props,
- ...searchConfig,
- };
-
if (hasMetricsNewInputs(organization)) {
- if (hasMetricsNewSearchQueryBuilder(organization)) {
- return ;
- }
-
- return ;
- }
-
- if (hasMetricsNewSearchQueryBuilder(organization)) {
- return ;
+ return ;
}
- return ;
+ return ;
}
function wideSearchBarCss(disabled?: boolean) {
diff --git a/static/app/components/modals/inviteMembersModal/index.tsx b/static/app/components/modals/inviteMembersModal/index.tsx
index a4f4753780e98..88c3128b848b4 100644
--- a/static/app/components/modals/inviteMembersModal/index.tsx
+++ b/static/app/components/modals/inviteMembersModal/index.tsx
@@ -1,14 +1,24 @@
import {css} from '@emotion/react';
+import styled from '@emotion/styled';
import type {ModalRenderProps} from 'sentry/actionCreators/modal';
import ErrorBoundary from 'sentry/components/errorBoundary';
import LoadingError from 'sentry/components/loadingError';
import LoadingIndicator from 'sentry/components/loadingIndicator';
+import {
+ ErrorAlert,
+ InviteMessage,
+} from 'sentry/components/modals/inviteMembersModal/inviteHeaderMessages';
+import {InviteMembersContext} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import InviteMembersFooter from 'sentry/components/modals/inviteMembersModal/inviteMembersFooter';
import InviteMembersModalView from 'sentry/components/modals/inviteMembersModal/inviteMembersModalview';
+import InviteRowControl from 'sentry/components/modals/inviteMembersModal/inviteRowControlNew';
import type {InviteRow} from 'sentry/components/modals/inviteMembersModal/types';
import useInviteModal from 'sentry/components/modals/inviteMembersModal/useInviteModal';
import {InviteModalHook} from 'sentry/components/modals/memberInviteModalCustomization';
+import {ORG_ROLES} from 'sentry/constants';
import {t} from 'sentry/locale';
+import {space} from 'sentry/styles/space';
import {trackAnalytics} from 'sentry/utils/analytics';
import {isActiveSuperuser} from 'sentry/utils/isActiveSuperuser';
import useOrganization from 'sentry/utils/useOrganization';
@@ -19,6 +29,8 @@ interface InviteMembersModalProps extends ModalRenderProps {
}
function InviteMembersModal({
+ Header,
+ Body,
closeModal,
initialData,
source,
@@ -37,6 +49,7 @@ function InviteMembersModal({
setEmails,
setRole,
setTeams,
+ setInviteStatus,
willInvite,
complete,
inviteStatus,
@@ -70,7 +83,41 @@ function InviteMembersModal({
onSendInvites={sendInvites}
>
{({sendInvites: _sendInvites, canSend, headerInfo}) => {
- return (
+ return organization.features.includes('invite-members-new-modal') ? (
+
+
+
+ {t('Invite New Members')}
+
+
+
+ {headerInfo}
+
+
+
+
+ ) : (
p.theme.fontWeightNormal};
+ font-size: ${p => p.theme.headerFontSize};
+ margin-top: 0;
+ margin-bottom: ${space(0.75)};
+`;
+
+const StyledInviteRow = styled(InviteRowControl)`
+ margin-bottom: ${space(1.5)};
+`;
+
export default InviteMembersModal;
diff --git a/static/app/components/modals/inviteMembersModal/inviteHeaderMessages.tsx b/static/app/components/modals/inviteMembersModal/inviteHeaderMessages.tsx
new file mode 100644
index 0000000000000..a08daeb972a89
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteHeaderMessages.tsx
@@ -0,0 +1,33 @@
+import styled from '@emotion/styled';
+
+import Alert from 'sentry/components/alert';
+import {useInviteMembersContext} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import {t} from 'sentry/locale';
+import {space} from 'sentry/styles/space';
+
+export function ErrorAlert() {
+ const {error} = useInviteMembersContext();
+ return error ? (
+
+ {error}
+
+ ) : null;
+}
+
+export function InviteMessage() {
+ const {willInvite} = useInviteMembersContext();
+ return willInvite ? (
+ {t('Invite unlimited new members to join your organization.')}
+ ) : (
+
+ {t(
+ 'You can’t invite users directly, but we’ll forward your request to an org owner or manager for approval.'
+ )}
+
+ );
+}
+
+const Subtext = styled('p')`
+ color: ${p => p.theme.subText};
+ margin-bottom: ${space(3)};
+`;
diff --git a/static/app/components/modals/inviteMembersModal/inviteMembersContext.tsx b/static/app/components/modals/inviteMembersModal/inviteMembersContext.tsx
new file mode 100644
index 0000000000000..208b0002fd943
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteMembersContext.tsx
@@ -0,0 +1,56 @@
+import {createContext, useContext} from 'react';
+
+import type {
+ InviteRow,
+ InviteStatus,
+ NormalizedInvite,
+} from 'sentry/components/modals/inviteMembersModal/types';
+
+export type InviteMembersContextValue = {
+ complete: boolean;
+ inviteStatus: InviteStatus;
+ invites: NormalizedInvite[];
+ pendingInvites: InviteRow;
+ reset: () => void;
+ sendInvites: () => void;
+ sendingInvites: boolean;
+ setEmails: (emails: string[], index: number) => void;
+ setInviteStatus: (inviteStatus: InviteStatus) => void;
+ setRole: (role: string, index: number) => void;
+ setTeams: (teams: string[], index: number) => void;
+ willInvite: boolean;
+ error?: string;
+};
+
+export const defaultInviteProps = {
+ complete: false,
+ inviteStatus: {},
+ invites: [],
+ pendingInvites: {
+ emails: new Set(),
+ role: '',
+ teams: new Set(),
+ },
+ reset: () => {},
+ sendInvites: () => {},
+ sendingInvites: false,
+ setEmails: () => {},
+ setRole: () => {},
+ setTeams: () => {},
+ setInviteStatus: () => {},
+ willInvite: false,
+};
+
+export const InviteMembersContext = createContext(null);
+
+export function useInviteMembersContext(): InviteMembersContextValue {
+ const context = useContext(InviteMembersContext);
+
+ if (!context) {
+ throw new Error(
+ 'useInviteMembersContext must be used within a InviteMembersContext.Provider'
+ );
+ }
+
+ return context;
+}
diff --git a/static/app/components/modals/inviteMembersModal/inviteMembersFooter.spec.tsx b/static/app/components/modals/inviteMembersModal/inviteMembersFooter.spec.tsx
new file mode 100644
index 0000000000000..8cfc649a7c9f1
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteMembersFooter.spec.tsx
@@ -0,0 +1,88 @@
+import {OrganizationFixture} from 'sentry-fixture/organization';
+
+import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
+
+import {
+ defaultInviteProps,
+ InviteMembersContext,
+} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import InviteMembersFooter from 'sentry/components/modals/inviteMembersModal/inviteMembersFooter';
+
+describe('InviteRowControlNew', function () {
+ const renderComponent = props => {
+ render(
+
+
+ ,
+ {organization: OrganizationFixture({features: ['invite-members-new-modal']})}
+ );
+ };
+
+ it('disables send button when there are no emails', function () {
+ renderComponent({});
+
+ const sendButton = screen.getByLabelText(/send invite/i);
+ expect(sendButton).toBeDisabled();
+ });
+
+ it('enables send button when there are emails', async function () {
+ const mockSetInviteStatus = jest.fn();
+ const mockSendInvites = jest.fn();
+ renderComponent({
+ invites: [
+ {
+ email: 'moo-deng@email.com',
+ role: 'member',
+ teams: new Set(['moo-deng']),
+ },
+ ],
+ setInviteStatus: mockSetInviteStatus,
+ sendInvites: mockSendInvites,
+ });
+
+ const sendButton = screen.getByLabelText(/send invite/i);
+ expect(sendButton).toBeEnabled();
+ await userEvent.click(sendButton);
+ expect(mockSetInviteStatus).toHaveBeenCalled();
+ expect(mockSendInvites).toHaveBeenCalled();
+ });
+
+ it('displays correct status message for sent invites', function () {
+ renderComponent({
+ complete: true,
+ inviteStatus: {
+ 'moo-deng': {sent: true},
+ 'moo-waan': {sent: true},
+ },
+ willInvite: true,
+ });
+ expect(screen.getByTestId('sent-invites')).toHaveTextContent(/2/i);
+ expect(screen.queryByTestId('failed-invites')).not.toBeInTheDocument();
+ });
+
+ it('displays correct status message for failed invites', function () {
+ renderComponent({
+ complete: true,
+ inviteStatus: {
+ 'moo-deng': {sent: false, error: 'Error'},
+ 'moo-waan': {sent: false, error: 'Error'},
+ },
+ willInvite: true,
+ });
+ expect(screen.getByText(/2/i)).toBeInTheDocument();
+ });
+
+ it('displays correct status message for sent and failed invites', function () {
+ renderComponent({
+ complete: true,
+ inviteStatus: {
+ 'moo-deng': {sent: true},
+ 'moo-waan': {sent: true},
+ 'moo-toon': {sent: false, error: 'Error'},
+ },
+ willInvite: true,
+ });
+ expect(screen.getByTestId('sent-invites')).toHaveTextContent(/2/i);
+ expect(screen.getByTestId('failed-invites')).toHaveTextContent(/1/i);
+ });
+});
diff --git a/static/app/components/modals/inviteMembersModal/inviteMembersFooter.tsx b/static/app/components/modals/inviteMembersModal/inviteMembersFooter.tsx
new file mode 100644
index 0000000000000..a130cc89a194e
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteMembersFooter.tsx
@@ -0,0 +1,80 @@
+import {Fragment} from 'react';
+import styled from '@emotion/styled';
+
+import ButtonBar from 'sentry/components/buttonBar';
+import InviteButton from 'sentry/components/modals/inviteMembersModal/inviteButton';
+import {useInviteMembersContext} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import InviteStatusMessage from 'sentry/components/modals/inviteMembersModal/inviteStatusMessage';
+import {space} from 'sentry/styles/space';
+import useOrganization from 'sentry/utils/useOrganization';
+
+interface Props {
+ canSend: boolean;
+}
+
+export default function InviteMembersFooter({canSend}: Props) {
+ const organization = useOrganization();
+ const {
+ complete,
+ inviteStatus,
+ setInviteStatus,
+ invites,
+ pendingInvites,
+ sendInvites,
+ sendingInvites,
+ willInvite,
+ } = useInviteMembersContext();
+ const isValidInvites = invites.length > 0;
+
+ const removeSentInvites = () => {
+ const emails = Object.keys(inviteStatus);
+ let newInviteStatus = {};
+ emails.forEach(email => {
+ if (pendingInvites.emails.has(email)) {
+ newInviteStatus = {...newInviteStatus, [email]: inviteStatus[email]};
+ }
+ });
+ setInviteStatus(newInviteStatus);
+ };
+
+ return (
+
+
+ {/* TODO(mia): remove these props and use InviteMemberContext once old modal is removed */}
+
+
+
+
+ {
+ organization.features.includes('invite-members-new-modal') &&
+ removeSentInvites();
+ sendInvites();
+ }}
+ />
+
+
+
+ );
+}
+
+const FooterContent = styled('div')`
+ display: flex;
+ gap: ${space(1)};
+ align-items: center;
+ justify-content: space-between;
+ flex: 1;
+`;
diff --git a/static/app/components/modals/inviteMembersModal/inviteRowControlNew.spec.tsx b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.spec.tsx
new file mode 100644
index 0000000000000..7d5b3eb03792b
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.spec.tsx
@@ -0,0 +1,138 @@
+import {TeamFixture} from 'sentry-fixture/team';
+
+import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
+
+import {
+ defaultInviteProps,
+ InviteMembersContext,
+} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import InviteRowControlNew from 'sentry/components/modals/inviteMembersModal/inviteRowControlNew';
+import TeamStore from 'sentry/stores/teamStore';
+
+describe('InviteRowControlNew', function () {
+ const teamData = [
+ {
+ id: '1',
+ slug: 'moo-deng',
+ name: "Moo Deng's Team",
+ },
+ {
+ id: '2',
+ slug: 'moo-waan',
+ name: "Moo Waan's Team",
+ },
+ ];
+ const teams = teamData.map(data => TeamFixture(data));
+
+ const getComponent = props => (
+
+
+
+ );
+
+ beforeEach(function () {
+ TeamStore.loadInitialData(teams);
+ });
+
+ it('renders', function () {
+ render(getComponent(defaultInviteProps));
+
+ expect(screen.getByText('Email addresses')).toBeInTheDocument();
+ expect(screen.getByText('Role')).toBeInTheDocument();
+ expect(screen.getByText('Add to team')).toBeInTheDocument();
+ });
+
+ describe.each([
+ {email: 'test-space@example.com', delimiter: ' '},
+ {email: 'test-comma@example.com', delimiter: ','},
+ {email: 'test-newline@example.com', delimiter: '{enter}'},
+ ])('updates email addresses when new emails are inputted', ({email, delimiter}) => {
+ it(`invokes the mock correctly with one using delimiter "${delimiter}"`, async () => {
+ const mockSetEmails = jest.fn();
+ render(getComponent({...defaultInviteProps, setEmails: mockSetEmails}));
+ const emailInput = screen.getByLabelText('Email Addresses');
+ await userEvent.type(emailInput, `${email}${delimiter}`);
+ expect(mockSetEmails).toHaveBeenCalled();
+ });
+
+ it(`invokes the mock correctly with many using delimiter "${delimiter}"`, async () => {
+ const mockSetEmails = jest.fn();
+ render(getComponent({...defaultInviteProps, setEmails: mockSetEmails}));
+ const emailInput = screen.getByLabelText('Email Addresses');
+ await userEvent.type(emailInput, `${email}${delimiter}`);
+ await userEvent.type(emailInput, `${email}${delimiter}`);
+ await userEvent.type(emailInput, `${email}${delimiter}`);
+ expect(mockSetEmails).toHaveBeenCalledTimes(3);
+ });
+ });
+
+ it('updates email addresses when new emails are inputted and input is unfocussed', async function () {
+ const mockSetEmails = jest.fn();
+ render(getComponent({...defaultInviteProps, setEmails: mockSetEmails}));
+ const emailInput = screen.getByLabelText('Email Addresses');
+ await userEvent.type(emailInput, 'test-unfocus@example.com');
+ await userEvent.tab();
+ expect(mockSetEmails).toHaveBeenCalled();
+ });
+
+ it('updates role value when new role is selected', async function () {
+ const mockSetRole = jest.fn();
+ render(getComponent({...defaultInviteProps, setRole: mockSetRole}));
+ const roleInput = screen.getByLabelText('Role');
+ await userEvent.click(roleInput);
+ await userEvent.click(screen.getByText('Billing'));
+ expect(mockSetRole).toHaveBeenCalled();
+ });
+
+ it('disables team selection when team roles are not allowed', function () {
+ render(
+ getComponent({
+ ...defaultInviteProps,
+ pendingInvites: {
+ ...defaultInviteProps.pendingInvites,
+ role: 'billing',
+ },
+ })
+ );
+ const teamInput = screen.getByLabelText('Add to Team');
+ expect(teamInput).toBeDisabled();
+ });
+
+ it('enables team selection when team roles are allowed', async function () {
+ const mockSetTeams = jest.fn();
+ render(
+ getComponent({
+ ...defaultInviteProps,
+ pendingInvites: {
+ ...defaultInviteProps.pendingInvites,
+ role: 'member',
+ },
+ setTeams: mockSetTeams,
+ })
+ );
+ const teamInput = screen.getByLabelText('Add to Team');
+ expect(teamInput).toBeEnabled();
+ await userEvent.click(teamInput);
+ await userEvent.click(screen.getByText('#moo-deng'));
+ await userEvent.click(screen.getByText('#moo-waan'));
+ expect(mockSetTeams).toHaveBeenCalledTimes(2);
+ });
+});
diff --git a/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx
new file mode 100644
index 0000000000000..11965e0c0f930
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx
@@ -0,0 +1,219 @@
+import {useCallback, useState} from 'react';
+import type {MultiValueProps} from 'react-select';
+import type {Theme} from '@emotion/react';
+import {useTheme} from '@emotion/react';
+import styled from '@emotion/styled';
+
+import type {StylesConfig} from 'sentry/components/forms/controls/selectControl';
+import SelectControl from 'sentry/components/forms/controls/selectControl';
+import {useInviteMembersContext} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import RoleSelectControl from 'sentry/components/roleSelectControl';
+import TeamSelector from 'sentry/components/teamSelector';
+import {t} from 'sentry/locale';
+import {space} from 'sentry/styles/space';
+import type {SelectValue} from 'sentry/types/core';
+import type {OrgRole} from 'sentry/types/organization';
+
+import renderEmailValue from './renderEmailValue';
+import type {InviteStatus} from './types';
+
+type SelectOption = SelectValue;
+
+type Props = {
+ roleDisabledUnallowed: boolean;
+ roleOptions: OrgRole[];
+};
+
+function ValueComponent(
+ props: MultiValueProps,
+ inviteStatus: InviteStatus
+) {
+ return renderEmailValue(inviteStatus[props.data.value], props);
+}
+
+function mapToOptions(values: string[]): SelectOption[] {
+ return values.map(value => ({value, label: value}));
+}
+
+function InviteRowControl({roleDisabledUnallowed, roleOptions}: Props) {
+ const {inviteStatus, pendingInvites, setEmails, setRole, setTeams, reset} =
+ useInviteMembersContext();
+ const emails = [...(pendingInvites.emails ?? [])];
+ const role = pendingInvites.role ?? '';
+ const teams = [...(pendingInvites.teams ?? [])];
+
+ const onChangeEmails = (opts: SelectOption[]) => {
+ setEmails(opts?.map(v => v.value) ?? [], 0);
+ };
+ const onChangeRole = (value: SelectOption) => setRole(value?.value, 0);
+ const onChangeTeams = (opts: SelectOption[]) =>
+ setTeams(opts ? opts.map(v => v.value) : [], 0);
+
+ const [inputValue, setInputValue] = useState('');
+
+ const theme = useTheme();
+
+ const isTeamRolesAllowedForRole = useCallback<(roleId: string) => boolean>(
+ roleId => {
+ const roleOptionsMap = roleOptions.reduce(
+ (rolesMap, roleOption) => ({...rolesMap, [roleOption.id]: roleOption}),
+ {}
+ );
+ return roleOptionsMap[roleId]?.isTeamRolesAllowed ?? true;
+ },
+ [roleOptions]
+ );
+ const isTeamRolesAllowed = isTeamRolesAllowedForRole(role);
+
+ const handleKeyDown = (e: React.KeyboardEvent) => {
+ switch (e.key) {
+ case 'Enter':
+ case ',':
+ case ' ':
+ e.preventDefault();
+ handleInput(inputValue);
+ setInputValue('');
+ break;
+ default:
+ // do nothing.
+ }
+ };
+
+ const handleInput = input => {
+ const newEmails = input.trim() ? input.trim().split(/[\s,]+/) : [];
+ if (newEmails.length > 0) {
+ onChangeEmails([
+ ...mapToOptions(emails),
+ ...newEmails.map(email => ({label: email, value: email})),
+ ]);
+ }
+ };
+
+ return (
+
+
+ Email addresses
+ ValueComponent(props, inviteStatus),
+ DropdownIndicator: () => null,
+ }}
+ options={mapToOptions(emails)}
+ onBlur={(e: React.ChangeEvent) => {
+ handleInput(e.target.value);
+ }}
+ styles={getStyles(theme, inviteStatus)}
+ onInputChange={setInputValue}
+ onKeyDown={handleKeyDown}
+ onChange={onChangeEmails}
+ multiple
+ creatable
+ clearable
+ onClear={reset}
+ menuIsOpen={false}
+ />
+
+
+
+ Role
+ {
+ onChangeRole(roleOption);
+ if (!isTeamRolesAllowedForRole(roleOption.value)) {
+ onChangeTeams([]);
+ }
+ }}
+ />
+
+
+ Add to team
+
+
+
+
+ );
+}
+
+/**
+ * The email select control has custom selected item states as items
+ * show their delivery status after the form is submitted.
+ */
+function getStyles(theme: Theme, inviteStatus: InviteStatus): StylesConfig {
+ return {
+ multiValue: (provided, {data}: MultiValueProps) => {
+ const status = inviteStatus[data.value];
+ return {
+ ...provided,
+ ...(status?.error
+ ? {
+ color: theme.red400,
+ border: `1px solid ${theme.red300}`,
+ backgroundColor: theme.red100,
+ }
+ : {}),
+ };
+ },
+ multiValueLabel: (provided, {data}: MultiValueProps) => {
+ const status = inviteStatus[data.value];
+ return {
+ ...provided,
+ pointerEvents: 'all',
+ ...(status?.error ? {color: theme.red400} : {}),
+ };
+ },
+ multiValueRemove: (provided, {data}: MultiValueProps) => {
+ const status = inviteStatus[data.value];
+ return {
+ ...provided,
+ ...(status?.error
+ ? {
+ borderLeft: `1px solid ${theme.red300}`,
+ ':hover': {backgroundColor: theme.red100, color: theme.red400},
+ }
+ : {}),
+ };
+ },
+ };
+}
+
+const Heading = styled('div')`
+ margin-bottom: ${space(1)};
+ font-weight: ${p => p.theme.fontWeightBold};
+ text-transform: uppercase;
+ font-size: ${p => p.theme.fontSizeSmall};
+`;
+
+const RowWrapper = styled('div')`
+ display: flex;
+ flex-direction: column;
+ gap: ${space(1.5)};
+`;
+
+const RoleTeamWrapper = styled('div')`
+ display: grid;
+ gap: ${space(1.5)};
+ grid-template-columns: 1fr 1fr;
+ align-items: start;
+`;
+
+export default InviteRowControl;
diff --git a/static/app/components/modals/inviteMembersModal/inviteStatusMessage.tsx b/static/app/components/modals/inviteMembersModal/inviteStatusMessage.tsx
index b43b1aec77788..f8df7ec568875 100644
--- a/static/app/components/modals/inviteMembersModal/inviteStatusMessage.tsx
+++ b/static/app/components/modals/inviteMembersModal/inviteStatusMessage.tsx
@@ -4,10 +4,63 @@ import LoadingIndicator from 'sentry/components/loadingIndicator';
import {IconCheckmark, IconWarning} from 'sentry/icons';
import {t, tct, tn} from 'sentry/locale';
import {space} from 'sentry/styles/space';
+import useOrganization from 'sentry/utils/useOrganization';
import type {InviteStatus} from './types';
-interface Props {
+interface InviteCountProps {
+ count: number;
+ label: string;
+ isRequest?: boolean;
+}
+
+function InviteCount({count, label, isRequest}: InviteCountProps) {
+ return (
+
+ {isRequest
+ ? tn('%s invite request', '%s invite requests', count)
+ : tn('%s invite', '%s invites', count)}
+
+ );
+}
+
+interface CountMessageProps {
+ errorCount: number;
+ sentCount: number;
+ isRequest?: boolean;
+}
+
+function CountMessage({sentCount, errorCount, isRequest}: CountMessageProps) {
+ const invites = (
+
+ );
+ const failedInvites = (
+
+ );
+ const tctComponents = {
+ invites,
+ failed: errorCount,
+ failedInvites,
+ };
+ return (
+
+ {sentCount > 0 && (
+
+
+ {tct('[invites] sent.', tctComponents)}
+
+ )}
+ {errorCount > 0 && (
+
+
+ {tct('[failedInvites] failed to send.', tctComponents)}
+
+ )}
+
+ );
+}
+
+interface InviteStatusMessageProps {
complete: boolean;
hasDuplicateEmails: boolean;
inviteStatus: InviteStatus;
@@ -21,7 +74,10 @@ export default function InviteStatusMessage({
inviteStatus,
sendingInvites,
willInvite,
-}: Props) {
+}: InviteStatusMessageProps) {
+ const organization = useOrganization();
+ const isNewInviteModal = organization.features.includes('invite-members-new-modal');
+
if (sendingInvites) {
return (
@@ -38,8 +94,29 @@ export default function InviteStatusMessage({
const sentCount = statuses.filter(i => i.sent).length;
const errorCount = statuses.filter(i => i.error).length;
+ const statusIndicator =
+ hasDuplicateEmails || errorCount > 0 ? (
+
+ ) : (
+
+ );
+
+ if (isNewInviteModal) {
+ return (
+
+ );
+ }
+
if (willInvite) {
- const invites = {tn('%s invite', '%s invites', sentCount)} ;
+ const invites = (
+
+ {tn('%s invite', '%s invites', sentCount)}
+
+ );
const tctComponents = {
invites,
failed: errorCount,
@@ -47,7 +124,7 @@ export default function InviteStatusMessage({
return (
-
+ {statusIndicator}
{errorCount > 0
? tct('Sent [invites], [failed] failed to send.', tctComponents)
@@ -57,15 +134,18 @@ export default function InviteStatusMessage({
);
}
const inviteRequests = (
- {tn('%s invite request', '%s invite requests', sentCount)}
+
+ {tn('%s invite request', '%s invite requests', sentCount)}
+
);
const tctComponents = {
inviteRequests,
failed: errorCount,
};
+
return (
-
+ {statusIndicator}
{errorCount > 0
? tct(
'[inviteRequests] pending approval, [failed] failed to send.',
@@ -76,10 +156,11 @@ export default function InviteStatusMessage({
);
}
+ // TODO(mia): remove once old modal is removed
if (hasDuplicateEmails) {
return (
-
+
{t('Duplicate emails between invite rows.')}
);
@@ -88,14 +169,19 @@ export default function InviteStatusMessage({
return null;
}
-export const StatusMessage = styled('div')<{status?: 'success' | 'error'}>`
+export const StatusMessage = styled('div')<{
+ isNewInviteModal?: boolean;
+ status?: 'success' | 'error';
+}>`
display: flex;
gap: ${space(1)};
align-items: center;
font-size: ${p => p.theme.fontSizeMedium};
- color: ${p => (p.status === 'error' ? p.theme.errorText : p.theme.textColor)};
+ color: ${p =>
+ p.status === 'error' && !p.isNewInviteModal ? p.theme.errorText : p.theme.textColor};
+`;
- > :first-child {
- ${p => p.status === 'success' && `color: ${p.theme.successText}`};
- }
+export const BoldCount = styled('div')`
+ display: inline;
+ font-weight: bold;
`;
diff --git a/static/app/components/modals/inviteMembersModal/renderEmailValue.tsx b/static/app/components/modals/inviteMembersModal/renderEmailValue.tsx
index deb02ff34f764..a883838f0a4cd 100644
--- a/static/app/components/modals/inviteMembersModal/renderEmailValue.tsx
+++ b/static/app/components/modals/inviteMembersModal/renderEmailValue.tsx
@@ -6,6 +6,7 @@ import LoadingIndicator from 'sentry/components/loadingIndicator';
import {Tooltip} from 'sentry/components/tooltip';
import {IconCheckmark, IconWarning} from 'sentry/icons';
import {space} from 'sentry/styles/space';
+import useOrganization from 'sentry/utils/useOrganization';
import type {InviteStatus} from './types';
@@ -13,6 +14,7 @@ function renderEmailValue(
status: InviteStatus[string],
valueProps: MultiValueProps
) {
+ const organization = useOrganization();
const {children, ...props} = valueProps;
const error = status?.error;
@@ -25,7 +27,9 @@ function renderEmailValue (
{children}
{!status.sent && !status.error && }
{status.error && }
- {status.sent && }
+ {status.sent && !organization.features.includes('invite-members-new-modal') && (
+
+ )}
);
diff --git a/static/app/components/modals/inviteMembersModal/useInviteModal.tsx b/static/app/components/modals/inviteMembersModal/useInviteModal.tsx
index 3226afe66d1bb..3c5338b4534d9 100644
--- a/static/app/components/modals/inviteMembersModal/useInviteModal.tsx
+++ b/static/app/components/modals/inviteMembersModal/useInviteModal.tsx
@@ -2,6 +2,7 @@ import {useCallback, useEffect, useMemo, useRef, useState} from 'react';
import type {
InviteRow,
+ InviteStatus,
NormalizedInvite,
} from 'sentry/components/modals/inviteMembersModal/types';
import {t} from 'sentry/locale';
@@ -162,19 +163,52 @@ export default function useInviteModal({organization, initialData, source}: Prop
[api, organization, willInvite]
);
- const sendInvites = useCallback(async () => {
- setState(prev => ({...prev, sendingInvites: true}));
- await Promise.all(invites.map(sendInvite));
- setState(prev => ({...prev, sendingInvites: false, complete: true}));
+ const removeSentInvites = useCallback(() => {
+ setState(prev => {
+ const emails = prev.pendingInvites[0].emails;
+ const filteredEmails = Array.from(emails).filter(
+ email => !prev.inviteStatus[email]?.sent
+ );
+ return {
+ ...prev,
+ pendingInvites: [
+ {
+ ...prev.pendingInvites[0],
+ emails: new Set(filteredEmails),
+ },
+ ],
+ };
+ });
+ }, []);
+ useEffect(() => {
+ const statuses = Object.values(state.inviteStatus) as InviteStatus[];
+ const sentCount = statuses.filter(i => i.sent).length;
+ const errorCount = statuses.filter(i => i.error).length;
+ // Don't track if no invites have been sent or invites are still sending
+ if ((sentCount === 0 && errorCount === 0) || state.sendingInvites) {
+ return;
+ }
trackAnalytics(
willInvite ? 'invite_modal.invites_sent' : 'invite_modal.requests_sent',
{
organization,
modal_session: sessionId.current,
+ sent_invites: sentCount,
+ failed_invites: errorCount,
+ is_new_modal: organization.features.includes('invite-members-new-modal'),
}
);
- }, [organization, invites, sendInvite, willInvite]);
+ }, [organization, state.inviteStatus, state.sendingInvites, willInvite]);
+
+ const sendInvites = useCallback(async () => {
+ setState(prev => ({...prev, sendingInvites: true}));
+ await Promise.all(invites.map(sendInvite));
+ if (organization.features.includes('invite-members-new-modal')) {
+ removeSentInvites();
+ }
+ setState(prev => ({...prev, sendingInvites: false, complete: true}));
+ }, [organization, invites, sendInvite, removeSentInvites]);
const addInviteRow = useCallback(() => {
setState(prev => ({
@@ -210,6 +244,12 @@ export default function useInviteModal({organization, initialData, source}: Prop
});
}, []);
+ const setInviteStatus = useCallback((inviteStatus: InviteStatus) => {
+ setState(prev => {
+ return {...prev, inviteStatus};
+ });
+ }, []);
+
const removeInviteRow = useCallback((index: number) => {
setState(prev => {
const pendingInvites = [...prev.pendingInvites];
@@ -229,6 +269,7 @@ export default function useInviteModal({organization, initialData, source}: Prop
setEmails,
setRole,
setTeams,
+ setInviteStatus,
willInvite,
complete: state.complete,
inviteStatus: state.inviteStatus,
diff --git a/static/app/components/modals/metricWidgetViewerModal.tsx b/static/app/components/modals/metricWidgetViewerModal.tsx
index 837756b297867..c3bb40feeedba 100644
--- a/static/app/components/modals/metricWidgetViewerModal.tsx
+++ b/static/app/components/modals/metricWidgetViewerModal.tsx
@@ -17,6 +17,7 @@ import type {Organization} from 'sentry/types/organization';
import {defined} from 'sentry/utils';
import {getMetricsUrl} from 'sentry/utils/metrics';
import {toDisplayType} from 'sentry/utils/metrics/dashboard';
+import {hasCustomMetrics} from 'sentry/utils/metrics/features';
import {parseMRI} from 'sentry/utils/metrics/mri';
import {MetricExpressionType} from 'sentry/utils/metrics/types';
import {useVirtualMetricsContext} from 'sentry/utils/metrics/virtualMetricsContext';
@@ -298,13 +299,14 @@ function MetricWidgetViewerModal({
const handleClose = useCallback(() => {
if (
userHasModified &&
+ hasCustomMetrics(organization) &&
// eslint-disable-next-line no-alert
!window.confirm(t('You have unsaved changes, are you sure you want to close?'))
) {
return;
}
closeModal();
- }, [userHasModified, closeModal]);
+ }, [userHasModified, closeModal, organization]);
const {mri, aggregation, query, condition} = metricQueries[0];
@@ -325,7 +327,7 @@ function MetricWidgetViewerModal({
-
+
{t('Open in Metrics')}
-
- {t('Save changes')}
-
+ {hasCustomMetrics(organization) && (
+
+ {t('Save changes')}
+
+ )}
diff --git a/static/app/components/modals/metricWidgetViewerModal/queries.tsx b/static/app/components/modals/metricWidgetViewerModal/queries.tsx
index 8fb9cc77b3507..363f28c2e1d67 100644
--- a/static/app/components/modals/metricWidgetViewerModal/queries.tsx
+++ b/static/app/components/modals/metricWidgetViewerModal/queries.tsx
@@ -30,7 +30,11 @@ import {
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {isCustomMetric} from 'sentry/utils/metrics';
-import {hasMetricAlertFeature, hasMetricsNewInputs} from 'sentry/utils/metrics/features';
+import {
+ hasCustomMetrics,
+ hasMetricAlertFeature,
+ hasMetricsNewInputs,
+} from 'sentry/utils/metrics/features';
import {MetricExpressionType} from 'sentry/utils/metrics/types';
import useOrganization from 'sentry/utils/useOrganization';
import usePageFilters from 'sentry/utils/usePageFilters';
@@ -336,9 +340,9 @@ function QueryContextMenu({
},
};
- return customMetric
+ return hasCustomMetrics(organization)
? [duplicateQueryItem, aliasItem, addAlertItem, removeQueryItem, settingsItem]
- : [duplicateQueryItem, aliasItem, addAlertItem, removeQueryItem];
+ : [duplicateQueryItem, aliasItem, removeQueryItem, settingsItem];
}, [
metricsQuery.mri,
createAlert,
diff --git a/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx b/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx
index acf80d95ce101..00346df0e3007 100644
--- a/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx
+++ b/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx
@@ -83,9 +83,8 @@ describe('CreateSavedSearchModal', function () {
await userEvent.click(screen.getByRole('textbox', {name: /name/i}));
await userEvent.paste('new search name');
- await userEvent.clear(screen.getByRole('textbox', {name: /filter issues/i}));
- await userEvent.click(screen.getByRole('textbox', {name: /filter issues/i}));
- await userEvent.paste('is:resolved');
+ await userEvent.click(screen.getAllByRole('combobox').at(-1)!);
+ await userEvent.paste('event.type:error');
await selectEvent.select(screen.getByText('Last Seen'), 'Trends');
await userEvent.click(screen.getByRole('button', {name: 'Save'}));
@@ -96,7 +95,7 @@ describe('CreateSavedSearchModal', function () {
expect.objectContaining({
data: {
name: 'new search name',
- query: 'is:resolved',
+ query: 'is:unresolved assigned:lyn@sentry.io event.type:error',
sort: IssueSortOptions.TRENDS,
type: 0,
visibility: SavedSearchVisibility.OWNER,
diff --git a/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx b/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx
index 0f40f68a9a957..919763334c32a 100644
--- a/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx
+++ b/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx
@@ -60,7 +60,7 @@ describe('EditSavedSearchModal', function () {
body: {
id: 'saved-search-id',
name: 'test',
- query: 'is:unresolved browser:firefox',
+ query: 'is:unresolved browser:firefox event.type:error',
sort: IssueSortOptions.TRENDS,
visibility: SavedSearchVisibility.OWNER,
},
@@ -71,11 +71,13 @@ describe('EditSavedSearchModal', function () {
await userEvent.clear(screen.getByRole('textbox', {name: /name/i}));
await userEvent.paste('new search name');
- await userEvent.clear(screen.getByRole('textbox', {name: /filter issues/i}));
- await userEvent.paste('test');
-
await selectEvent.select(screen.getByText('Last Seen'), 'Trends');
+ await userEvent.click(
+ screen.getAllByRole('combobox', {name: 'Add a search term'}).at(-1)!
+ );
+ await userEvent.paste('event.type:error');
+
await selectEvent.select(screen.getByText('Only me'), 'Users in my organization');
await userEvent.click(screen.getByRole('button', {name: 'Save'}));
@@ -86,7 +88,7 @@ describe('EditSavedSearchModal', function () {
expect.objectContaining({
data: expect.objectContaining({
name: 'new search name',
- query: 'test',
+ query: 'is:unresolved browser:firefox event.type:error',
visibility: SavedSearchVisibility.ORGANIZATION,
}),
})
@@ -119,11 +121,6 @@ describe('EditSavedSearchModal', function () {
await userEvent.clear(screen.getByRole('textbox', {name: /name/i}));
await userEvent.paste('new search name');
- await userEvent.clear(screen.getByTestId('smart-search-input'));
- await userEvent.paste('test');
-
- await selectEvent.select(screen.getByText('Last Seen'), 'Trends');
-
// Hovering over the visibility dropdown shows disabled reason
await userEvent.hover(screen.getByText(/only me/i));
await screen.findByText(/only organization admins can create global saved searches/i);
@@ -136,7 +133,7 @@ describe('EditSavedSearchModal', function () {
expect.objectContaining({
data: expect.objectContaining({
name: 'new search name',
- query: 'test',
+ query: 'is:unresolved browser:firefox',
visibility: SavedSearchVisibility.OWNER,
}),
})
diff --git a/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx b/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx
index b6505d703fdb9..c07d25cc90630 100644
--- a/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx
+++ b/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx
@@ -55,19 +55,15 @@ export function SavedSearchModalContent({organization}: SavedSearchModalContentP
flexibleControlStateSize
required
>
- {({id, name, onChange, onBlur, disabled, value}) => (
+ {({onChange, onBlur, disabled, value}) => (
{
+ onChange={newValue => {
onChange(newValue, {});
onBlur(newValue, {});
}}
- includeLabel={false}
- useFormWrapper={false}
disabled={disabled}
- query={value}
+ initialQuery={value}
searchSource="saved_searches_modal"
/>
)}
diff --git a/static/app/components/modals/sudoModal.tsx b/static/app/components/modals/sudoModal.tsx
index 123c8399d4b82..30a02563026aa 100644
--- a/static/app/components/modals/sudoModal.tsx
+++ b/static/app/components/modals/sudoModal.tsx
@@ -18,7 +18,8 @@ import {space} from 'sentry/styles/space';
import type {Authenticator} from 'sentry/types/auth';
import useApi from 'sentry/utils/useApi';
import {useLocation} from 'sentry/utils/useLocation';
-import useRouter from 'sentry/utils/useRouter';
+import {useNavigate} from 'sentry/utils/useNavigate';
+import {useUser} from 'sentry/utils/useUser';
import {OrganizationLoaderContext} from 'sentry/views/organizationContext';
import TextBlock from 'sentry/views/settings/components/text/textBlock';
@@ -61,7 +62,8 @@ function SudoModal({
Body,
closeButton,
}: Props) {
- const router = useRouter();
+ const user = useUser();
+ const navigate = useNavigate();
const api = useApi();
const [state, setState] = useState({
authenticators: [] as Authenticator[],
@@ -155,7 +157,10 @@ function SudoModal({
const handleSuccess = () => {
if (isSuperuser) {
- router.replace({pathname: location.pathname, state: {forceUpdate: new Date()}});
+ navigate(
+ {pathname: location.pathname, state: {forceUpdate: new Date()}},
+ {replace: true}
+ );
if (needsReload) {
window.location.reload();
}
@@ -223,7 +228,6 @@ function SudoModal({
};
const renderBodyContent = () => {
- const user = ConfigStore.get('user');
const isSelfHosted = ConfigStore.get('isSelfHosted');
const validateSUForm = ConfigStore.get('validateSUForm');
diff --git a/static/app/components/modals/widgetViewerModal.spec.tsx b/static/app/components/modals/widgetViewerModal.spec.tsx
index 53812d39919cb..8cb4cddeba763 100644
--- a/static/app/components/modals/widgetViewerModal.spec.tsx
+++ b/static/app/components/modals/widgetViewerModal.spec.tsx
@@ -26,7 +26,7 @@ jest.mock('echarts-for-react/lib/core', () => {
const stubEl = (props: {children?: React.ReactNode}) => {props.children}
;
-let eventsMetaMock;
+let eventsMetaMock: jest.Mock;
const waitForMetaToHaveBeenCalled = async () => {
await waitFor(() => {
diff --git a/static/app/components/modals/widgetViewerModal.tsx b/static/app/components/modals/widgetViewerModal.tsx
index 439d70954a4c4..d425ac6ab8f85 100644
--- a/static/app/components/modals/widgetViewerModal.tsx
+++ b/static/app/components/modals/widgetViewerModal.tsx
@@ -52,8 +52,8 @@ import {MEPSettingProvider} from 'sentry/utils/performance/contexts/metricsEnhan
import {decodeInteger, decodeList, decodeScalar} from 'sentry/utils/queryString';
import useApi from 'sentry/utils/useApi';
import {useLocation} from 'sentry/utils/useLocation';
+import {useNavigate} from 'sentry/utils/useNavigate';
import useProjects from 'sentry/utils/useProjects';
-import useRouter from 'sentry/utils/useRouter';
import withPageFilters from 'sentry/utils/withPageFilters';
import type {DashboardFilters, Widget} from 'sentry/views/dashboards/types';
import {DisplayType, WidgetType} from 'sentry/views/dashboards/types';
@@ -130,7 +130,13 @@ const shouldWidgetCardChartMemo = (prevProps, props) => {
);
const isNotTopNWidget =
props.widget.displayType !== DisplayType.TOP_N && !defined(props.widget.limit);
- return selectionMatches && chartZoomOptionsMatches && (sortMatches || isNotTopNWidget);
+ const legendMatches = isEqual(props.legendOptions, prevProps.legendOptions);
+ return (
+ selectionMatches &&
+ chartZoomOptionsMatches &&
+ (sortMatches || isNotTopNWidget) &&
+ legendMatches
+ );
};
// WidgetCardChartContainer and WidgetCardChart rerenders if selection was changed.
@@ -184,7 +190,7 @@ function WidgetViewerModal(props: Props) {
} = props;
const location = useLocation();
const {projects} = useProjects();
- const router = useRouter();
+ const navigate = useNavigate();
const shouldShowSlider = organization.features.includes('widget-viewer-modal-minimap');
// TODO(Tele-Team): Re-enable this when we have a better way to determine if the data is transaction only
// let widgetContentLoadingStatus: boolean | undefined = undefined;
@@ -441,13 +447,16 @@ function WidgetViewerModal(props: Props) {
);
widths.forEach((width, index) => (newWidths[index] = parseInt(width, 10)));
newWidths[columnIndex] = newWidth;
- router.replace({
- pathname: location.pathname,
- query: {
- ...location.query,
- [WidgetViewerQueryField.WIDTH]: newWidths,
+ navigate(
+ {
+ pathname: location.pathname,
+ query: {
+ ...location.query,
+ [WidgetViewerQueryField.WIDTH]: newWidths,
+ },
},
- });
+ {replace: true}
+ );
};
// Get discover result totals
@@ -465,15 +474,18 @@ function WidgetViewerModal(props: Props) {
function onLegendSelectChanged({selected}: {selected: Record}) {
setDisabledLegends(selected);
- router.replace({
- pathname: location.pathname,
- query: {
- ...location.query,
- [WidgetViewerQueryField.LEGEND]: Object.keys(selected).filter(
- key => !selected[key]
- ),
+ navigate(
+ {
+ pathname: location.pathname,
+ query: {
+ ...location.query,
+ [WidgetViewerQueryField.LEGEND]: Object.keys(selected).filter(
+ key => !selected[key]
+ ),
+ },
},
- });
+ {replace: true}
+ );
trackAnalytics('dashboards_views.widget_viewer.toggle_legend', {
organization,
widget_type: widget.widgetType ?? WidgetType.DISCOVER,
@@ -527,13 +539,16 @@ function WidgetViewerModal(props: Props) {
{
- router.replace({
- pathname: location.pathname,
- query: {
- ...location.query,
- [WidgetViewerQueryField.CURSOR]: newCursor,
+ navigate(
+ {
+ pathname: location.pathname,
+ query: {
+ ...location.query,
+ [WidgetViewerQueryField.CURSOR]: newCursor,
+ },
},
- });
+ {replace: true}
+ );
if (widget.displayType === DisplayType.TABLE) {
setChartUnmodified(false);
@@ -600,14 +615,17 @@ function WidgetViewerModal(props: Props) {
newCursor = undefined;
nextPage = 0;
}
- router.replace({
- pathname: location.pathname,
- query: {
- ...location.query,
- [WidgetViewerQueryField.CURSOR]: newCursor,
- [WidgetViewerQueryField.PAGE]: nextPage,
+ navigate(
+ {
+ pathname: location.pathname,
+ query: {
+ ...location.query,
+ [WidgetViewerQueryField.CURSOR]: newCursor,
+ [WidgetViewerQueryField.PAGE]: nextPage,
+ },
},
- });
+ {replace: true}
+ );
if (widget.displayType === DisplayType.TABLE) {
setChartUnmodified(false);
@@ -668,13 +686,16 @@ function WidgetViewerModal(props: Props) {
{
- router.replace({
- pathname: location.pathname,
- query: {
- ...location.query,
- [WidgetViewerQueryField.CURSOR]: newCursor,
+ navigate(
+ {
+ pathname: location.pathname,
+ query: {
+ ...location.query,
+ [WidgetViewerQueryField.CURSOR]: newCursor,
+ },
},
- });
+ {replace: true}
+ );
trackAnalytics('dashboards_views.widget_viewer.paginate', {
organization,
widget_type: WidgetType.RELEASE,
@@ -719,7 +740,7 @@ function WidgetViewerModal(props: Props) {
period: null,
},
});
- router.push({
+ navigate({
pathname: location.pathname,
query: {
...location.query,
@@ -861,7 +882,6 @@ function WidgetViewerModal(props: Props) {
location={location}
widget={widget}
selection={selection}
- router={router}
organization={organization}
onZoom={onZoom}
onLegendSelectChanged={onLegendSelectChanged}
@@ -904,15 +924,18 @@ function WidgetViewerModal(props: Props) {
value={selectedQueryIndex}
options={queryOptions}
onChange={(option: SelectValue) => {
- router.replace({
- pathname: location.pathname,
- query: {
- ...location.query,
- [WidgetViewerQueryField.QUERY]: option.value,
- [WidgetViewerQueryField.PAGE]: undefined,
- [WidgetViewerQueryField.CURSOR]: undefined,
+ navigate(
+ {
+ pathname: location.pathname,
+ query: {
+ ...location.query,
+ [WidgetViewerQueryField.QUERY]: option.value,
+ [WidgetViewerQueryField.PAGE]: undefined,
+ [WidgetViewerQueryField.CURSOR]: undefined,
+ },
},
- });
+ {replace: true}
+ );
trackAnalytics('dashboards_views.widget_viewer.select_query', {
organization,
diff --git a/static/app/components/nav/config.tsx b/static/app/components/nav/config.tsx
new file mode 100644
index 0000000000000..27504b725927b
--- /dev/null
+++ b/static/app/components/nav/config.tsx
@@ -0,0 +1,174 @@
+import type {NavConfig} from 'sentry/components/nav/utils';
+import {
+ IconDashboard,
+ IconGraph,
+ IconIssues,
+ IconLightning,
+ IconProject,
+ IconSearch,
+ IconSettings,
+ IconSiren,
+} from 'sentry/icons';
+import {t} from 'sentry/locale';
+import type {Organization} from 'sentry/types/organization';
+import {getDiscoverLandingUrl} from 'sentry/utils/discover/urls';
+import {MODULE_BASE_URLS} from 'sentry/views/insights/common/utils/useModuleURL';
+import {MODULE_SIDEBAR_TITLE as MODULE_TITLE_HTTP} from 'sentry/views/insights/http/settings';
+import {INSIGHTS_BASE_URL, MODULE_TITLES} from 'sentry/views/insights/settings';
+import {getSearchForIssueGroup, IssueGroup} from 'sentry/views/issueList/utils';
+
+/**
+ * Global nav settings for all Sentry users.
+ * Links are generated per-organization with the proper `/organization/:slug/` prefix.
+ *
+ * To permission-gate certain items, include props to be passed to the `` component
+ */
+export function createNavConfig({organization}: {organization: Organization}): NavConfig {
+ const prefix = `organizations/${organization.slug}`;
+ const insightsPrefix = `${prefix}/${INSIGHTS_BASE_URL}`;
+
+ return {
+ main: [
+ {
+ label: t('Issues'),
+ icon: ,
+ submenu: [
+ {
+ label: t('All'),
+ to: `/${prefix}/issues/?query=is:unresolved`,
+ },
+ {
+ label: t('Error & Outage'),
+ to: `/${prefix}/issues/${getSearchForIssueGroup(IssueGroup.ERROR_OUTAGE)}`,
+ },
+ {
+ label: t('Trend'),
+ to: `/${prefix}/issues/${getSearchForIssueGroup(IssueGroup.TREND)}`,
+ },
+ {
+ label: t('Craftsmanship'),
+ to: `/${prefix}/issues/${getSearchForIssueGroup(IssueGroup.CRAFTSMANSHIP)}`,
+ },
+ {
+ label: t('Security'),
+ to: `/${prefix}/issues/${getSearchForIssueGroup(IssueGroup.SECURITY)}`,
+ },
+ {label: t('Feedback'), to: `/${prefix}/feedback/`},
+ ],
+ },
+ {label: t('Projects'), to: `/${prefix}/projects/`, icon: },
+ {
+ label: t('Explore'),
+ icon: ,
+ submenu: [
+ {
+ label: t('Traces'),
+ to: `/${prefix}/traces/`,
+ feature: {features: 'performance-trace-explorer'},
+ },
+ {
+ label: t('Metrics'),
+ to: `/${prefix}/metrics/`,
+ feature: {features: 'custom-metrics'},
+ },
+ {
+ label: t('Profiles'),
+ to: `/${prefix}/profiling/`,
+ feature: {
+ features: 'profiling',
+ hookName: 'feature-disabled:profiling-sidebar-item',
+ requireAll: false,
+ },
+ },
+ {
+ label: t('Replays'),
+ to: `/${prefix}/replays/`,
+ feature: {
+ features: 'session-replay-ui',
+ hookName: 'feature-disabled:replay-sidebar-item',
+ },
+ },
+ {
+ label: t('Discover'),
+ to: getDiscoverLandingUrl(organization),
+ feature: {
+ features: 'discover-basic',
+ hookName: 'feature-disabled:discover2-sidebar-item',
+ },
+ },
+ {label: t('Releases'), to: `/${prefix}/releases/`},
+ {label: t('Crons'), to: `/${prefix}/crons/`},
+ ],
+ },
+ {
+ label: t('Insights'),
+ icon: ,
+ feature: {features: 'insights-entry-points'},
+ submenu: [
+ {
+ label: MODULE_TITLE_HTTP,
+ to: `/${insightsPrefix}/${MODULE_BASE_URLS.http}/`,
+ },
+ {label: MODULE_TITLES.db, to: `/${insightsPrefix}/${MODULE_BASE_URLS.db}/`},
+ {
+ label: MODULE_TITLES.resource,
+ to: `/${insightsPrefix}/${MODULE_BASE_URLS.resource}/`,
+ },
+ {
+ label: MODULE_TITLES.app_start,
+ to: `/${insightsPrefix}/${MODULE_BASE_URLS.app_start}/`,
+ },
+ {
+ label: MODULE_TITLES['mobile-screens'],
+ to: `/${insightsPrefix}/${MODULE_BASE_URLS['mobile-screens']}/`,
+ feature: {features: 'insights-mobile-screens-module'},
+ },
+ {
+ label: MODULE_TITLES.vital,
+ to: `/${insightsPrefix}/${MODULE_BASE_URLS.vital}/`,
+ },
+ {
+ label: MODULE_TITLES.cache,
+ to: `/${insightsPrefix}/${MODULE_BASE_URLS.cache}/`,
+ },
+ {
+ label: MODULE_TITLES.queue,
+ to: `/${insightsPrefix}/${MODULE_BASE_URLS.queue}/`,
+ },
+ {
+ label: MODULE_TITLES.ai,
+ to: `/${insightsPrefix}/${MODULE_BASE_URLS.ai}/`,
+ feature: {features: 'insights-entry-points'},
+ },
+ ],
+ },
+ {
+ label: t('Perf.'),
+ to: '/performance/',
+ icon: ,
+ feature: {
+ features: 'performance-view',
+ hookName: 'feature-disabled:performance-sidebar-item',
+ },
+ },
+ {
+ label: t('Boards'),
+ to: '/dashboards/',
+ icon: ,
+ feature: {
+ features: ['discover', 'discover-query', 'dashboards-basic', 'dashboards-edit'],
+ hookName: 'feature-disabled:dashboards-sidebar-item',
+ requireAll: false,
+ },
+ },
+ {label: t('Alerts'), to: `/${prefix}/alerts/rules/`, icon: },
+ ],
+ footer: [
+ {
+ label: t('Settings'),
+ to: `/settings/${organization.slug}/`,
+ icon: ,
+ },
+ ],
+ };
+}
diff --git a/static/app/components/nav/context.tsx b/static/app/components/nav/context.tsx
new file mode 100644
index 0000000000000..33a99ecf0d979
--- /dev/null
+++ b/static/app/components/nav/context.tsx
@@ -0,0 +1,61 @@
+import {createContext, useContext, useMemo} from 'react';
+
+import {createNavConfig} from 'sentry/components/nav/config';
+import type {
+ NavConfig,
+ NavItemLayout,
+ NavSidebarItem,
+ NavSubmenuItem,
+} from 'sentry/components/nav/utils';
+import {isNavItemActive, isSubmenuItemActive} from 'sentry/components/nav/utils';
+import {useLocation} from 'sentry/utils/useLocation';
+import useOrganization from 'sentry/utils/useOrganization';
+
+export interface NavContext {
+ /** Raw config for entire nav items */
+ config: Readonly;
+ /** Currently active submenu items, if any */
+ submenu?: Readonly>;
+}
+
+const NavContext = createContext({config: {main: []}});
+
+export function useNavContext(): NavContext {
+ const navContext = useContext(NavContext);
+ return navContext;
+}
+
+export function NavContextProvider({children}) {
+ const organization = useOrganization();
+ const location = useLocation();
+ /** Raw nav configuration values */
+ const config = useMemo(() => createNavConfig({organization}), [organization]);
+ /**
+ * Active submenu items derived from the nav config and current `location`.
+ * These are returned in a normalized layout format for ease of use.
+ */
+ const submenu = useMemo(() => {
+ for (const item of config.main) {
+ if (isNavItemActive(item, location) || isSubmenuItemActive(item, location)) {
+ return normalizeSubmenu(item.submenu);
+ }
+ }
+ if (config.footer) {
+ for (const item of config.footer) {
+ if (isNavItemActive(item, location) || isSubmenuItemActive(item, location)) {
+ return normalizeSubmenu(item.submenu);
+ }
+ }
+ }
+ return undefined;
+ }, [config, location]);
+
+ return {children} ;
+}
+
+const normalizeSubmenu = (submenu: NavSidebarItem['submenu']): NavContext['submenu'] => {
+ if (Array.isArray(submenu)) {
+ return {main: submenu};
+ }
+ return submenu;
+};
diff --git a/static/app/components/nav/index.spec.tsx b/static/app/components/nav/index.spec.tsx
new file mode 100644
index 0000000000000..0ca7e1efb43bd
--- /dev/null
+++ b/static/app/components/nav/index.spec.tsx
@@ -0,0 +1,168 @@
+import {LocationFixture} from 'sentry-fixture/locationFixture';
+import {OrganizationFixture} from 'sentry-fixture/organization';
+import {RouterFixture} from 'sentry-fixture/routerFixture';
+
+import {getAllByRole, render, screen} from 'sentry-test/reactTestingLibrary';
+
+import Nav from 'sentry/components/nav';
+
+const ALL_AVAILABLE_FEATURES = [
+ 'insights-entry-points',
+ 'discover',
+ 'discover-basic',
+ 'discover-query',
+ 'dashboards-basic',
+ 'dashboards-edit',
+ 'custom-metrics',
+ 'user-feedback-ui',
+ 'session-replay-ui',
+ 'performance-view',
+ 'performance-trace-explorer',
+ 'starfish-mobile-ui-module',
+ 'profiling',
+];
+
+describe('Nav', function () {
+ describe('default', function () {
+ beforeEach(() => {
+ render( , {
+ router: RouterFixture({
+ location: LocationFixture({pathname: '/organizations/org-slug/issues/'}),
+ }),
+ organization: OrganizationFixture({features: ALL_AVAILABLE_FEATURES}),
+ });
+ });
+ it('renders primary navigation', async function () {
+ expect(
+ await screen.findByRole('navigation', {name: 'Primary Navigation'})
+ ).toBeInTheDocument();
+ });
+ it('renders secondary navigation', async function () {
+ expect(
+ await screen.findByRole('navigation', {name: 'Secondary Navigation'})
+ ).toBeInTheDocument();
+ });
+
+ it('renders expected primary nav items', function () {
+ const links = getAllByRole(
+ screen.getByRole('navigation', {name: 'Primary Navigation'}),
+ 'link'
+ );
+ expect(links).toHaveLength(8);
+
+ [
+ 'Issues',
+ 'Projects',
+ 'Explore',
+ 'Insights',
+ 'Perf.',
+ 'Boards',
+ 'Alerts',
+ 'Settings',
+ ].forEach((title, index) => {
+ expect(links[index]).toHaveAccessibleName(title);
+ });
+ });
+ });
+
+ describe('issues', function () {
+ beforeEach(() => {
+ render( , {
+ router: RouterFixture({
+ location: LocationFixture({
+ pathname: '/organizations/org-slug/issues/',
+ search: '?query=is:unresolved',
+ }),
+ }),
+ organization: OrganizationFixture({features: ALL_AVAILABLE_FEATURES}),
+ });
+ });
+
+ it('renders secondary navigation', async function () {
+ expect(
+ await screen.findByRole('navigation', {name: 'Secondary Navigation'})
+ ).toBeInTheDocument();
+ });
+
+ it('includes expected submenu items', function () {
+ const container = screen.getByRole('navigation', {name: 'Secondary Navigation'});
+ const links = getAllByRole(container, 'link');
+ expect(links).toHaveLength(6);
+
+ ['All', 'Error & Outage', 'Trend', 'Craftsmanship', 'Security', 'Feedback'].forEach(
+ (title, index) => {
+ expect(links[index]).toHaveAccessibleName(title);
+ }
+ );
+ });
+ });
+
+ describe('insights', function () {
+ beforeEach(() => {
+ render( , {
+ router: RouterFixture({
+ location: LocationFixture({pathname: '/organizations/org-slug/insights/http/'}),
+ }),
+ organization: OrganizationFixture({features: ALL_AVAILABLE_FEATURES}),
+ });
+ });
+
+ it('renders secondary navigation', async function () {
+ expect(
+ await screen.findByRole('navigation', {name: 'Secondary Navigation'})
+ ).toBeInTheDocument();
+ });
+
+ it('includes expected submenu items', function () {
+ const container = screen.getByRole('navigation', {name: 'Secondary Navigation'});
+ const links = getAllByRole(container, 'link');
+ expect(links).toHaveLength(8);
+ [
+ 'Requests',
+ 'Queries',
+ 'Assets',
+ 'App Starts',
+ 'Web Vitals',
+ 'Caches',
+ 'Queues',
+ 'LLM Monitoring',
+ ].forEach((title, index) => {
+ expect(links[index]).toHaveAccessibleName(title);
+ });
+ });
+ });
+
+ describe('explore', function () {
+ beforeEach(() => {
+ render( , {
+ router: RouterFixture({
+ location: LocationFixture({pathname: '/organizations/org-slug/traces/'}),
+ }),
+ organization: OrganizationFixture({features: ALL_AVAILABLE_FEATURES}),
+ });
+ });
+
+ it('renders secondary navigation', async function () {
+ expect(
+ await screen.findByRole('navigation', {name: 'Secondary Navigation'})
+ ).toBeInTheDocument();
+ });
+
+ it('includes expected submenu items', function () {
+ const container = screen.getByRole('navigation', {name: 'Secondary Navigation'});
+ const links = getAllByRole(container, 'link');
+ expect(links).toHaveLength(7);
+ [
+ 'Traces',
+ 'Metrics',
+ 'Profiles',
+ 'Replays',
+ 'Discover',
+ 'Releases',
+ 'Crons',
+ ].forEach((title, index) => {
+ expect(links[index]).toHaveAccessibleName(title);
+ });
+ });
+ });
+});
diff --git a/static/app/components/nav/index.tsx b/static/app/components/nav/index.tsx
new file mode 100644
index 0000000000000..676624e08b927
--- /dev/null
+++ b/static/app/components/nav/index.tsx
@@ -0,0 +1,31 @@
+import styled from '@emotion/styled';
+
+import {NavContextProvider} from 'sentry/components/nav/context';
+import MobileTopbar from 'sentry/components/nav/mobileTopbar';
+import Sidebar from 'sentry/components/nav/sidebar';
+import {useBreakpoints} from 'sentry/utils/metrics/useBreakpoints';
+
+function Nav() {
+ const screen = useBreakpoints();
+
+ return (
+
+ {screen.medium ? : }
+
+ );
+}
+
+const NavContainer = styled('div')`
+ display: flex;
+ position: sticky;
+ top: 0;
+ z-index: ${p => p.theme.zIndex.sidebarPanel};
+
+ @media screen and (min-width: ${p => p.theme.breakpoints.medium}) {
+ bottom: 0;
+ height: 100vh;
+ height: 100dvh;
+ }
+`;
+
+export default Nav;
diff --git a/static/app/components/nav/mobileTopbar.tsx b/static/app/components/nav/mobileTopbar.tsx
new file mode 100644
index 0000000000000..50d9e76b8d362
--- /dev/null
+++ b/static/app/components/nav/mobileTopbar.tsx
@@ -0,0 +1,122 @@
+import {useCallback, useEffect, useLayoutEffect, useState} from 'react';
+import {createPortal} from 'react-dom';
+import styled from '@emotion/styled';
+
+import {SidebarItems} from 'sentry/components/nav/sidebar';
+import {IconClose, IconMenu, IconSentry} from 'sentry/icons';
+import {space} from 'sentry/styles/space';
+import theme from 'sentry/utils/theme';
+import {useLocation} from 'sentry/utils/useLocation';
+
+type NavView = 'primary' | 'secondary' | 'closed';
+
+function MobileTopbar() {
+ const location = useLocation();
+ const [view, setView] = useState('closed');
+ /** Sync menu state with `body` attributes */
+ useLayoutEffect(() => {
+ updateNavStyleAttributes(view);
+ }, [view]);
+ /** Automatically close the menu after any navigation */
+ useEffect(() => {
+ setView('closed');
+ }, [location.pathname]);
+ const handleClick = useCallback(() => {
+ setView(v => (v === 'closed' ? 'primary' : 'closed'));
+ }, [setView]);
+
+ return (
+
+
+
+
+
+ {view === 'closed' ? : }
+
+ {view !== 'closed' ? (
+
+
+
+ ) : null}
+
+ );
+}
+
+export default MobileTopbar;
+
+/** When the mobile menu opens, set the main content to `inert` and disable `body` scrolling */
+function updateNavStyleAttributes(view: NavView) {
+ const mainContent = document.getElementById('main');
+ if (!mainContent) {
+ throw new Error(
+ 'Unable to match "#main" element. Please add `id="main"` to the element which wraps the app content.'
+ );
+ }
+
+ if (view !== 'closed') {
+ mainContent.setAttribute('inert', '');
+ document.body.style.setProperty('overflow', 'hidden');
+ } else {
+ mainContent.removeAttribute('inert');
+ document.body.style.removeProperty('overflow');
+ }
+}
+
+function OverlayPortal({children}) {
+ return createPortal({children} , document.body);
+}
+
+const Topbar = styled('div')`
+ height: 40px;
+ width: 100vw;
+ padding: ${space(0.5)} ${space(1.5)} ${space(0.5)} ${space(1)};
+ border-bottom: 1px solid ${p => p.theme.translucentGray100};
+ background: #3e2648;
+ background: linear-gradient(180deg, #3e2648 0%, #442c4e 100%);
+ display: flex;
+ flex-direction: row;
+ align-items: center;
+ justify-content: space-between;
+ position: sticky;
+ top: 0;
+ z-index: ${theme.zIndex.sidebar};
+
+ svg {
+ display: block;
+ width: var(--size);
+ height: var(--size);
+ color: currentColor;
+ }
+ button {
+ all: initial;
+ --size: ${space(2)};
+ }
+ a {
+ --size: ${space(3)};
+ }
+ a,
+ button {
+ color: rgba(255, 255, 255, 0.85);
+ padding: ${space(1)};
+ margin: -${space(1)};
+ cursor: pointer;
+
+ &:hover {
+ color: white;
+ }
+ }
+`;
+
+const Overlay = styled('div')`
+ position: fixed;
+ top: 40px;
+ right: 0;
+ bottom: 0;
+ left: 0;
+ display: flex;
+ flex-direction: column;
+ background: ${p => p.theme.surface300};
+ z-index: ${p => p.theme.zIndex.modal};
+ --color: ${p => p.theme.textColor};
+ --color-hover: ${p => p.theme.activeText};
+`;
diff --git a/static/app/components/nav/sidebar.tsx b/static/app/components/nav/sidebar.tsx
new file mode 100644
index 0000000000000..7856cb2fa940b
--- /dev/null
+++ b/static/app/components/nav/sidebar.tsx
@@ -0,0 +1,190 @@
+import {Fragment} from 'react';
+import styled from '@emotion/styled';
+
+import Feature from 'sentry/components/acl/feature';
+import InteractionStateLayer from 'sentry/components/interactionStateLayer';
+import Link from 'sentry/components/links/link';
+import {useNavContext} from 'sentry/components/nav/context';
+import Submenu from 'sentry/components/nav/submenu';
+import {
+ isNavItemActive,
+ isNonEmptyArray,
+ isSubmenuItemActive,
+ makeLinkPropsFromTo,
+ type NavSidebarItem,
+ resolveNavItemTo,
+} from 'sentry/components/nav/utils';
+import SidebarDropdown from 'sentry/components/sidebar/sidebarDropdown';
+import {space} from 'sentry/styles/space';
+import theme from 'sentry/utils/theme';
+import {useLocation} from 'sentry/utils/useLocation';
+
+function Sidebar() {
+ return (
+
+
+
+
+
+
+
+
+
+ );
+}
+
+export default Sidebar;
+
+export function SidebarItems() {
+ const {config} = useNavContext();
+ return (
+
+
+ {config.main.map(item => (
+
+ ))}
+
+ {isNonEmptyArray(config.footer) && (
+
+ {config.footer.map(item => (
+
+ ))}
+
+ )}
+
+ );
+}
+
+const SidebarWrapper = styled('div')`
+ height: 40px;
+ width: 100vw;
+ padding: ${space(2)} 0;
+ border-right: 1px solid ${theme.translucentGray100};
+ /* these colors should be moved to the "theme" object */
+ background: #3e2648;
+ background: linear-gradient(180deg, #3e2648 0%, #442c4e 100%);
+ display: flex;
+ flex-direction: column;
+ z-index: ${theme.zIndex.sidebar};
+
+ @media screen and (min-width: ${p => p.theme.breakpoints.medium}) {
+ height: unset;
+ width: 74px;
+ }
+`;
+
+const SidebarItemList = styled('ul')`
+ position: relative;
+ list-style: none;
+ margin: 0;
+ padding: 0;
+ padding-top: ${space(1)};
+ display: flex;
+ flex-direction: column;
+ width: 100%;
+ color: rgba(255, 255, 255, 0.85);
+
+ @media screen and (min-width: ${p => p.theme.breakpoints.medium}) {
+ gap: ${space(1)};
+ }
+`;
+
+function SidebarItem({item}: {item: NavSidebarItem}) {
+ const location = useLocation();
+ const isActive = isNavItemActive(item, location);
+ const isSubmenuActive = isSubmenuItemActive(item, location);
+ const _to = resolveNavItemTo(item);
+ const linkProps = _to ? makeLinkPropsFromTo(_to) : {to: '#'};
+
+ const FeatureGuard = item.feature ? Feature : Fragment;
+ const featureGuardProps: any = item.feature ?? {};
+
+ return (
+
+
+
+
+ {item.icon}
+ {item.label}
+
+
+
+ );
+}
+
+const SidebarLink = styled(Link)`
+ position: relative;
+`;
+
+const SidebarItemWrapper = styled('li')`
+ svg {
+ --size: 14px;
+ width: var(--size);
+ height: var(--size);
+
+ @media (min-width: ${p => p.theme.breakpoints.medium}) {
+ --size: 18px;
+ padding-top: ${space(0.5)};
+ }
+ }
+ a {
+ display: flex;
+ flex-direction: row;
+ height: 32px;
+ gap: ${space(1.5)};
+ align-items: center;
+ padding: 0 ${space(1.5)};
+ color: var(--color, currentColor);
+ font-size: ${theme.fontSizeMedium};
+ font-weight: ${theme.fontWeightNormal};
+ line-height: 177.75%;
+
+ & > * {
+ pointer-events: none;
+ }
+
+ @media (min-width: ${p => p.theme.breakpoints.medium}) {
+ flex-direction: column;
+ justify-content: center;
+ height: 52px;
+ padding: ${space(0.5)} ${space(0.75)};
+ border-radius: ${theme.borderRadius};
+ font-size: ${theme.fontSizeExtraSmall};
+ margin-inline: ${space(1)};
+ gap: ${space(0.5)};
+ }
+ }
+`;
+
+const SidebarFooterWrapper = styled('div')`
+ position: relative;
+ border-top: 1px solid ${theme.translucentGray200};
+ display: flex;
+ flex-direction: row;
+ align-items: stretch;
+ padding-bottom: ${space(0.5)};
+ margin-top: auto;
+`;
+
+const SidebarHeader = styled('header')`
+ position: relative;
+ display: flex;
+ justify-content: center;
+ margin-bottom: ${space(1.5)};
+`;
+
+function SidebarBody({children}) {
+ return {children} ;
+}
+
+function SidebarFooter({children}) {
+ return (
+
+ {children}
+
+ );
+}
diff --git a/static/app/components/nav/submenu.tsx b/static/app/components/nav/submenu.tsx
new file mode 100644
index 0000000000000..c550ff32ce871
--- /dev/null
+++ b/static/app/components/nav/submenu.tsx
@@ -0,0 +1,135 @@
+import {Fragment} from 'react';
+import styled from '@emotion/styled';
+
+import Feature from 'sentry/components/acl/feature';
+import InteractionStateLayer from 'sentry/components/interactionStateLayer';
+import Link from 'sentry/components/links/link';
+import {useNavContext} from 'sentry/components/nav/context';
+import type {NavSubmenuItem} from 'sentry/components/nav/utils';
+import {
+ isNavItemActive,
+ isNonEmptyArray,
+ makeLinkPropsFromTo,
+} from 'sentry/components/nav/utils';
+import {space} from 'sentry/styles/space';
+import {useLocation} from 'sentry/utils/useLocation';
+
+function Submenu() {
+ const nav = useNavContext();
+ if (!nav.submenu) {
+ return null;
+ }
+
+ return (
+
+
+ {nav.submenu.main.map(item => (
+
+ ))}
+
+ {isNonEmptyArray(nav.submenu.footer) && (
+
+ {nav.submenu.footer.map(item => (
+
+ ))}
+
+ )}
+
+ );
+}
+
+export default Submenu;
+
+const SubmenuWrapper = styled('div')`
+ position: relative;
+ border-right: 1px solid ${p => p.theme.translucentGray200};
+ background: ${p => p.theme.surface300};
+ display: flex;
+ align-items: stretch;
+ justify-content: space-between;
+ flex-direction: column;
+ width: 150px;
+ z-index: ${p => p.theme.zIndex.sidebarPanel};
+`;
+
+function SubmenuItem({item}: {item: NavSubmenuItem}) {
+ const location = useLocation();
+ const isActive = isNavItemActive(item, location);
+ const linkProps = makeLinkPropsFromTo(item.to);
+
+ const FeatureGuard = item.feature ? Feature : Fragment;
+ const featureGuardProps: any = item.feature ?? {};
+
+ return (
+
+
+
+
+ {item.label}
+
+
+
+ );
+}
+
+const SubmenuLink = styled(Link)`
+ position: relative;
+`;
+
+const SubmenuItemList = styled('ul')`
+ list-style: none;
+ margin: 0;
+ padding: 0;
+ padding-top: ${space(1)};
+ display: flex;
+ flex-direction: column;
+ width: 100%;
+ color: ${p => p.theme.gray400};
+`;
+
+const SubmenuItemWrapper = styled('li')`
+ a {
+ display: flex;
+ padding: 5px ${space(1.5)};
+ height: 32px;
+ align-items: center;
+ color: inherit;
+ font-size: ${p => p.theme.fontSizeMedium};
+ font-weight: ${p => p.theme.fontWeightNormal};
+ line-height: 177.75%;
+ margin-inline: ${space(1)};
+ border-radius: ${p => p.theme.borderRadius};
+
+ &.active {
+ color: ${p => p.theme.gray500};
+ background: rgba(62, 52, 70, 0.09);
+ border: 1px solid ${p => p.theme.translucentGray100};
+ }
+ }
+`;
+
+const SubmenuFooterWrapper = styled('div')`
+ position: relative;
+ border-top: 1px solid ${p => p.theme.translucentGray200};
+ background: ${p => p.theme.surface300};
+ display: flex;
+ flex-direction: row;
+ align-items: stretch;
+ padding-block: ${space(1)};
+`;
+
+function SubmenuBody({children}) {
+ return {children} ;
+}
+
+function SubmenuFooter({children}) {
+ return (
+
+ {children}
+
+ );
+}
diff --git a/static/app/components/nav/utils.tsx b/static/app/components/nav/utils.tsx
new file mode 100644
index 0000000000000..4a7e6fdd7d74a
--- /dev/null
+++ b/static/app/components/nav/utils.tsx
@@ -0,0 +1,174 @@
+import type {ComponentProps} from 'react';
+import type {LocationDescriptor} from 'history';
+
+import type Feature from 'sentry/components/acl/feature';
+import {isItemActive} from 'sentry/components/sidebar/sidebarItem';
+import {SIDEBAR_NAVIGATION_SOURCE} from 'sentry/components/sidebar/utils';
+import normalizeUrl from 'sentry/utils/url/normalizeUrl';
+import type {useLocation} from 'sentry/utils/useLocation';
+
+/** NavItem is the base class for both SidebarItem and SubmenuItem */
+interface NavItem {
+ /** User-facing item label, surfaced in the UI. Should be translated! */
+ label: string;
+ /** Optionally, props which should be passed to a wrapping `` guard */
+ feature?: Omit, 'children'>;
+}
+
+/** NavItems are displayed in either `main` and `footer` sections */
+export interface NavItemLayout- {
+ main: Item[];
+ footer?: Item[];
+}
+
+/** SidebarItem is a top-level NavItem which is always displayed in the app sidebar */
+export interface NavSidebarItem extends NavItem {
+ /** The icon to render in the sidebar */
+ icon: JSX.Element;
+ /** Optionally, the submenu items to display when this SidebarItem is active */
+ submenu?: NavSubmenuItem[] | NavItemLayout
;
+ /**
+ * The pathname (including `search` params) to navigate to when the item is clicked.
+ * Defaults to the `to` property of the first `SubmenuItem` if excluded.
+ */
+ to?: string;
+}
+
+/** SubmenuItem is a secondary NavItem which is only displayed when its parent SidebarItem is active */
+export interface NavSubmenuItem extends NavItem {
+ /**
+ * The pathname (including `search` params) to navigate to when the item is clicked.
+ */
+ to: string;
+}
+
+export type NavConfig = NavItemLayout;
+
+export type NavigationItemStatus = 'inactive' | 'active' | 'active-parent';
+
+/** Determine if a given SidebarItem or SubmenuItem is active */
+export function isNavItemActive(
+ item: NavSidebarItem | NavSubmenuItem,
+ location: ReturnType
+): boolean {
+ const to = resolveNavItemTo(item);
+ if (!to) {
+ return false;
+ }
+
+ /**
+ * Issue submenu is special cased because it is matched based on query params
+ * rather than the pathname.
+ */
+ if (location.pathname.includes('/issues/') && to.includes('/issues/')) {
+ const {label} = item;
+ const matches = hasMatchingQueryParam({to, label}, location);
+ const isDefault = label === 'All';
+ if (location.search) {
+ return matches || isDefault;
+ }
+ return isDefault;
+ }
+
+ const normalizedTo = normalizeUrl(to);
+ const normalizedCurrent = normalizeUrl(location.pathname);
+ // Shortcut for exact matches
+ if (normalizedTo === normalizedCurrent) {
+ return true;
+ }
+ // Fallback to legacy nav logic
+ return isItemActive({to, label: item.label});
+}
+
+export function isSubmenuItemActive(
+ item: NavSidebarItem,
+ location: ReturnType
+): boolean {
+ if (!item.submenu) {
+ return false;
+ }
+ if (isNonEmptyArray(item.submenu)) {
+ return item.submenu.some(subitem => isNavItemActive(subitem, location));
+ }
+ return (
+ item.submenu.main.some(subitem => isNavItemActive(subitem, location)) ||
+ item.submenu.footer?.some(subitem => isNavItemActive(subitem, location)) ||
+ false
+ );
+}
+
+/** Creates a `LocationDescriptor` from a URL string that may contain search params */
+export function makeLinkPropsFromTo(to: string): {
+ state: object;
+ to: LocationDescriptor;
+} {
+ const [pathname, search] = to.split('?');
+
+ return {
+ to: {
+ pathname,
+ search: search ? `?${search}` : undefined,
+ },
+ state: {source: SIDEBAR_NAVIGATION_SOURCE},
+ };
+}
+
+export function isNonEmptyArray(item: unknown): item is any[] {
+ return Array.isArray(item) && item.length > 0;
+}
+
+/** SidebarItem `to` can be derived from the first submenu item if necessary */
+export function resolveNavItemTo(
+ item: NavSidebarItem | NavSubmenuItem
+): string | undefined {
+ if (item.to) {
+ return item.to;
+ }
+ if (isSidebarItem(item) && isNonEmptyArray(item.submenu)) {
+ return item.submenu[0].to;
+ }
+ return undefined;
+}
+
+/**
+ * Unique logic for query param matches.
+ *
+ * `location` might have additional query params,
+ * but it considered active if it contains *all* of the params in `item`.
+ */
+function hasMatchingQueryParam(
+ item: Required>,
+ location: ReturnType
+): boolean {
+ if (location.search.length === 0) {
+ return false;
+ }
+ if (item.to.includes('?')) {
+ const search = new URLSearchParams(location.search);
+ const itemSearch = new URLSearchParams(item.to.split('?').at(-1));
+ const itemQuery = itemSearch.get('query');
+ const query = search.get('query');
+ /**
+ * The "Issues / All" tab is a special case!
+ * It is considered active if no other queries are.
+ */
+ if (item?.label === 'All') {
+ return !query && !itemQuery;
+ }
+ if (itemQuery && query) {
+ let match = false;
+ for (const key of itemQuery?.split(' ')) {
+ match = query.includes(key);
+ if (!match) {
+ continue;
+ }
+ }
+ return match;
+ }
+ }
+ return false;
+}
+
+function isSidebarItem(item: NavSidebarItem | NavSubmenuItem): item is NavSidebarItem {
+ return Object.hasOwn(item, 'icon');
+}
diff --git a/static/app/components/navigationButtonGroup.tsx b/static/app/components/navigationButtonGroup.tsx
deleted file mode 100644
index 566fbc798eb25..0000000000000
--- a/static/app/components/navigationButtonGroup.tsx
+++ /dev/null
@@ -1,69 +0,0 @@
-import type {LocationDescriptor} from 'history';
-
-import type {ButtonProps} from 'sentry/components/button';
-import {LinkButton} from 'sentry/components/button';
-import ButtonBar from 'sentry/components/buttonBar';
-import {IconNext, IconPrevious} from 'sentry/icons';
-import {t} from 'sentry/locale';
-
-type Props = {
- /**
- * A set of LocationDescriptors that will be used in the buttons in the following order:
- * [Oldest, Older, Newer, Newest]
- */
- links: [LocationDescriptor, LocationDescriptor, LocationDescriptor, LocationDescriptor];
- className?: string;
- hasNext?: boolean;
- hasPrevious?: boolean;
- onNewerClick?: () => void;
- onNewestClick?: () => void;
- onOlderClick?: () => void;
- onOldestClick?: () => void;
- size?: ButtonProps['size'];
-};
-
-function NavigationButtonGroup({
- links,
- hasNext = false,
- hasPrevious = false,
- className,
- size,
- onOldestClick,
- onOlderClick,
- onNewerClick,
- onNewestClick,
-}: Props) {
- return (
-
- }
- onClick={onOldestClick}
- />
-
- {t('Older')}
-
-
- {t('Newer')}
-
- }
- onClick={onNewestClick}
- />
-
- );
-}
-
-export default NavigationButtonGroup;
diff --git a/static/app/components/noProjectMessage.tsx b/static/app/components/noProjectMessage.tsx
index 2757415354c46..0142dd62fc654 100644
--- a/static/app/components/noProjectMessage.tsx
+++ b/static/app/components/noProjectMessage.tsx
@@ -7,10 +7,10 @@ import NoProjectEmptyState from 'sentry/components/illustrations/NoProjectEmptyS
import * as Layout from 'sentry/components/layouts/thirds';
import {canCreateProject} from 'sentry/components/projects/canCreateProject';
import {t} from 'sentry/locale';
-import ConfigStore from 'sentry/stores/configStore';
import {space} from 'sentry/styles/space';
import type {Organization} from 'sentry/types/organization';
import useProjects from 'sentry/utils/useProjects';
+import {useUser} from 'sentry/utils/useUser';
type Props = {
organization: Organization;
@@ -23,17 +23,16 @@ function NoProjectMessage({
organization,
superuserNeedsToBeProjectMember,
}: Props) {
+ const user = useUser();
const {projects, initiallyLoaded: projectsLoaded} = useProjects();
const orgSlug = organization.slug;
const canUserCreateProject = canCreateProject(organization);
const canJoinTeam = organization.access.includes('team:read');
- const {isSuperuser} = ConfigStore.get('user');
-
const orgHasProjects = !!projects?.length;
const hasProjectAccess =
- isSuperuser && !superuserNeedsToBeProjectMember
+ user.isSuperuser && !superuserNeedsToBeProjectMember
? !!projects?.some(p => p.hasAccess)
: !!projects?.some(p => p.isMember && p.hasAccess);
diff --git a/static/app/components/objectInspector.tsx b/static/app/components/objectInspector.tsx
deleted file mode 100644
index 39a041465cc6a..0000000000000
--- a/static/app/components/objectInspector.tsx
+++ /dev/null
@@ -1,101 +0,0 @@
-import type {ComponentProps, MouseEvent} from 'react';
-import {useMemo} from 'react';
-import {useTheme} from '@emotion/react';
-import styled from '@emotion/styled';
-import {
- chromeDark,
- chromeLight,
- ObjectInspector as OrigObjectInspector,
-} from '@sentry-internal/react-inspector';
-
-import {CopyToClipboardButton} from 'sentry/components/copyToClipboardButton';
-import ConfigStore from 'sentry/stores/configStore';
-import {useLegacyStore} from 'sentry/stores/useLegacyStore';
-import {space} from 'sentry/styles/space';
-
-type Props = Omit, 'theme'> & {
- onCopy?: (copiedCode: string) => void;
- showCopyButton?: boolean;
- theme?: Record;
-};
-
-/**
- * @deprecated use `StructuredEventData` or `StructuredData` instead.
- */
-function ObjectInspector({data, onCopy, showCopyButton, theme, ...props}: Props) {
- const config = useLegacyStore(ConfigStore);
- const emotionTheme = useTheme();
- const isDark = config.theme === 'dark';
-
- const INSPECTOR_THEME = useMemo(
- () => ({
- ...(isDark ? chromeDark : chromeLight),
-
- // Reset some theme values
- BASE_COLOR: 'inherit',
- ERROR_COLOR: emotionTheme.red400,
- TREENODE_FONT_FAMILY: emotionTheme.text.familyMono,
- TREENODE_FONT_SIZE: 'inherit',
- TREENODE_LINE_HEIGHT: 'inherit',
- BASE_BACKGROUND_COLOR: 'none',
- ARROW_FONT_SIZE: '10px',
-
- OBJECT_PREVIEW_OBJECT_MAX_PROPERTIES: 1,
- ...theme,
- }),
- [isDark, theme, emotionTheme.red400, emotionTheme.text]
- );
-
- const inspector = (
-
- );
- if (showCopyButton) {
- return (
-
-
- {inspector}
-
- );
- }
-
- return inspector;
-}
-
-const InspectorWrapper = styled('div')`
- margin-right: ${space(4)};
-`;
-
-const Wrapper = styled('div')`
- position: relative;
-
- /*
- We need some minimum vertical height so the copy button has room.
- But don't try to use min-height because then whitespace would be inconsistent.
- */
- padding-bottom: ${space(1.5)};
-`;
-
-const StyledCopyButton = styled(CopyToClipboardButton)`
- position: absolute;
- top: 0;
- right: ${space(0.5)};
-`;
-
-export type OnExpandCallback = (
- path: string,
- expandedState: Record,
- event: MouseEvent
-) => void;
-
-export default ObjectInspector;
diff --git a/static/app/components/onboarding/frameworkSuggestionModal.tsx b/static/app/components/onboarding/frameworkSuggestionModal.tsx
index f1e21cdb56933..877dcfcd3ff9b 100644
--- a/static/app/components/onboarding/frameworkSuggestionModal.tsx
+++ b/static/app/components/onboarding/frameworkSuggestionModal.tsx
@@ -46,6 +46,7 @@ export const topJavascriptFrameworks: PlatformKey[] = [
'javascript-nextjs',
'javascript-react',
'javascript-vue',
+ 'javascript-nuxt',
'javascript-angular',
'javascript-solid',
'javascript-solidstart',
diff --git a/static/app/components/onboarding/gettingStartedDoc/layout.tsx b/static/app/components/onboarding/gettingStartedDoc/layout.tsx
deleted file mode 100644
index 1571a468e1dd5..0000000000000
--- a/static/app/components/onboarding/gettingStartedDoc/layout.tsx
+++ /dev/null
@@ -1,126 +0,0 @@
-import type {ComponentProps} from 'react';
-import {Fragment} from 'react';
-import styled from '@emotion/styled';
-
-import HookOrDefault from 'sentry/components/hookOrDefault';
-import ExternalLink from 'sentry/components/links/externalLink';
-import List from 'sentry/components/list';
-import ListItem from 'sentry/components/list/listItem';
-import {AuthTokenGeneratorProvider} from 'sentry/components/onboarding/gettingStartedDoc/authTokenGenerator';
-import type {StepProps} from 'sentry/components/onboarding/gettingStartedDoc/step';
-import {Step} from 'sentry/components/onboarding/gettingStartedDoc/step';
-import type {NextStep} from 'sentry/components/onboarding/gettingStartedDoc/types';
-import {PlatformOptionsControl} from 'sentry/components/onboarding/platformOptionsControl';
-import {ProductSelection} from 'sentry/components/onboarding/productSelection';
-import {t} from 'sentry/locale';
-import {space} from 'sentry/styles/space';
-import type {PlatformKey} from 'sentry/types/project';
-import useOrganization from 'sentry/utils/useOrganization';
-
-const ProductSelectionAvailabilityHook = HookOrDefault({
- hookName: 'component:product-selection-availability',
- defaultComponent: ProductSelection,
-});
-
-export type LayoutProps = {
- projectSlug: string;
- steps: StepProps[];
- /**
- * An introduction displayed before the steps
- */
- introduction?: React.ReactNode;
- newOrg?: boolean;
- nextSteps?: NextStep[];
- platformKey?: PlatformKey;
- platformOptions?: ComponentProps['platformOptions'];
-};
-
-export function Layout({
- steps,
- platformKey,
- newOrg,
- nextSteps = [],
- platformOptions,
- introduction,
- projectSlug,
-}: LayoutProps) {
- const organization = useOrganization();
-
- return (
-
-
-
- {introduction && {introduction} }
-
- {platformOptions ? (
-
- ) : null}
-
-
-
- {steps.map(step => (
-
- ))}
-
- {nextSteps.length > 0 && (
-
-
- {t('Next Steps')}
-
- {nextSteps.map(step => (
-
- {step.name}
- {': '}
- {step.description}
-
- ))}
-
-
- )}
-
-
- );
-}
-
-const Header = styled('div')`
- display: flex;
- flex-direction: column;
- gap: ${space(2)};
-`;
-
-const Divider = styled('hr')<{withBottomMargin?: boolean}>`
- height: 1px;
- width: 100%;
- background: ${p => p.theme.border};
- border: none;
- ${p => p.withBottomMargin && `margin-bottom: ${space(3)}`}
-`;
-
-const Steps = styled('div')`
- display: flex;
- flex-direction: column;
- gap: 1.5rem;
-`;
-
-const Introduction = styled('div')`
- display: flex;
- flex-direction: column;
- gap: ${space(1)};
-`;
-
-const Wrapper = styled('div')`
- h4 {
- margin-bottom: 0.5em;
- }
- && {
- p {
- margin-bottom: 0;
- }
- h5 {
- margin-bottom: 0;
- }
- }
-`;
diff --git a/static/app/components/onboarding/gettingStartedDoc/onboardingLayout.tsx b/static/app/components/onboarding/gettingStartedDoc/onboardingLayout.tsx
index 568a1e5a79d98..aa9a84de62961 100644
--- a/static/app/components/onboarding/gettingStartedDoc/onboardingLayout.tsx
+++ b/static/app/components/onboarding/gettingStartedDoc/onboardingLayout.tsx
@@ -170,7 +170,7 @@ export function OnboardingLayout({
{nextSteps.length > 0 && (
- {t('Next Steps')}
+ {t('Additional Information')}
{nextSteps
.filter((step): step is Exclude => step !== null)
diff --git a/static/app/components/onboarding/gettingStartedDoc/step.tsx b/static/app/components/onboarding/gettingStartedDoc/step.tsx
index 029f04c000f7e..862d864e8afda 100644
--- a/static/app/components/onboarding/gettingStartedDoc/step.tsx
+++ b/static/app/components/onboarding/gettingStartedDoc/step.tsx
@@ -126,15 +126,18 @@ interface BaseStepProps {
* Content that goes directly above the code snippet
*/
codeHeader?: React.ReactNode;
+ /**
+ * Whether the step instructions are collapsible
+ */
+ collapsible?: boolean;
+ /**
+ * An array of configurations to be displayed
+ */
configurations?: Configuration[];
/**
* A brief description of the step
*/
description?: React.ReactNode | React.ReactNode[];
- /**
- * Whether the step is optional
- */
- isOptional?: boolean;
/**
* Fired when the optional toggle is clicked.
* Useful for when we want to fire analytics events.
@@ -205,7 +208,7 @@ export function Step({
additionalInfo,
description,
onOptionalToggleClick,
- isOptional = false,
+ collapsible = false,
codeHeader,
}: StepProps) {
const [showOptionalConfig, setShowOptionalConfig] = useState(false);
@@ -248,25 +251,23 @@ export function Step({
);
- return isOptional ? (
+ return collapsible ? (
-
+ {
+ onOptionalToggleClick?.(!showOptionalConfig);
+ setShowOptionalConfig(!showOptionalConfig);
+ }}
+ >
+ {title ?? StepTitle[type]}
}
aria-label={t('Toggle optional configuration')}
- onClick={() => {
- onOptionalToggleClick?.(!showOptionalConfig);
- setShowOptionalConfig(!showOptionalConfig);
- }}
- >
-
- {title ?? StepTitle[type]}
- {t(' (Optional)')}
-
-
+ />
{showOptionalConfig ? config : null}
@@ -307,13 +308,15 @@ const GeneralAdditionalInfo = styled(Description)`
margin-top: ${space(2)};
`;
-const OptionalConfigWrapper = styled('div')`
+const OptionalConfigWrapper = styled('div')<{expanded: boolean}>`
display: flex;
+ gap: ${space(1)};
+ margin-bottom: ${p => (p.expanded ? space(2) : 0)};
cursor: pointer;
- margin-bottom: 0.5em;
`;
const ToggleButton = styled(Button)`
+ padding: 0;
&,
:hover {
color: ${p => p.theme.gray500};
diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/index.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/index.tsx
index bffd82e105987..aa3b91ed13b4d 100644
--- a/static/app/components/onboarding/gettingStartedDoc/utils/index.tsx
+++ b/static/app/components/onboarding/gettingStartedDoc/utils/index.tsx
@@ -11,11 +11,10 @@ export function getUploadSourceMapsStep({
projectId,
newOrg,
isSelfHosted,
- urlPrefix,
}: DocsParams & {
guideLink: string;
}) {
- const urlParam = !isSelfHosted && urlPrefix ? `--url ${urlPrefix}` : '';
+ const urlParam = isSelfHosted ? '' : '--saas';
return {
title: t('Upload Source Maps'),
description: (
diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx
index c30188815101e..d0dac3f24ad4a 100644
--- a/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx
+++ b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx
@@ -1,5 +1,4 @@
-// biome-ignore lint/nursery/noRestrictedImports: Will be removed with react router 6
-import {createMemoryHistory, Route, Router, RouterContext} from 'react-router';
+import {createMemoryRouter, RouterProvider} from 'react-router-dom';
import {ProjectFixture} from 'sentry-fixture/project';
import {act, renderHook} from 'sentry-test/reactTestingLibrary';
@@ -16,31 +15,25 @@ import {
import PageFiltersStore from 'sentry/stores/pageFiltersStore';
import ProjectsStore from 'sentry/stores/projectsStore';
import type {Project} from 'sentry/types/project';
-import {RouteContext} from 'sentry/views/routeContext';
function createWrapper(projectSlug?: string) {
- const memoryHistory = createMemoryHistory();
+ return function Wrapper({children}) {
+ const memoryRouter = createMemoryRouter([
+ {
+ path: '/',
+ element: children,
+ },
+ {
+ path: '/:projectId/',
+ element: children,
+ },
+ ]);
- if (projectSlug) {
- memoryHistory.push(`/${projectSlug}/`);
- }
+ if (projectSlug) {
+ memoryRouter.navigate(`/${projectSlug}/`);
+ }
- return function Wrapper({children}) {
- return (
- {
- return (
-
-
-
- );
- }}
- >
- children} />
- children} />
-
- );
+ return ;
};
}
diff --git a/static/app/components/onboarding/productSelection.spec.tsx b/static/app/components/onboarding/productSelection.spec.tsx
index 21d5404f2405b..e460ecb8f4b40 100644
--- a/static/app/components/onboarding/productSelection.spec.tsx
+++ b/static/app/components/onboarding/productSelection.spec.tsx
@@ -2,7 +2,6 @@ import {OrganizationFixture} from 'sentry-fixture/organization';
import {initializeOrg} from 'sentry-test/initializeOrg';
import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
-import {textWithMarkupMatcher} from 'sentry-test/utils';
import {
platformProductAvailability,
@@ -39,14 +38,6 @@ describe('Onboarding Product Selection', function () {
router,
});
- // Introduction
- expect(
- screen.getByText(
- textWithMarkupMatcher(/In this quick guide you’ll use npm or yarn/)
- )
- ).toBeInTheDocument();
- expect(screen.queryByText('Prefer to set up Sentry using')).not.toBeInTheDocument();
-
// Error monitoring shall be checked and disabled by default
expect(screen.getByRole('checkbox', {name: 'Error Monitoring'})).toBeChecked();
@@ -233,42 +224,6 @@ describe('Onboarding Product Selection', function () {
expect(screen.getByRole('checkbox', {name: 'Session Replay'})).toBeDisabled();
});
- it('renders npm & yarn info text', function () {
- const {router} = initializeOrg({
- router: {
- location: {
- query: {product: [ProductSolution.PERFORMANCE_MONITORING]},
- },
- params: {},
- },
- });
-
- render( , {
- router,
- });
-
- expect(screen.queryByText('npm')).toBeInTheDocument();
- expect(screen.queryByText('yarn')).toBeInTheDocument();
- });
-
- it('does not render npm & yarn info text', function () {
- const {router} = initializeOrg({
- router: {
- location: {
- query: {product: [ProductSolution.PERFORMANCE_MONITORING]},
- },
- params: {},
- },
- });
-
- render( , {
- router,
- });
-
- expect(screen.queryByText('npm')).not.toBeInTheDocument();
- expect(screen.queryByText('yarn')).not.toBeInTheDocument();
- });
-
it('triggers onChange callback', async function () {
const {router} = initializeOrg({
router: {
diff --git a/static/app/components/onboarding/productSelection.tsx b/static/app/components/onboarding/productSelection.tsx
index f2f3e809d9b2f..d8d1798c5d04c 100644
--- a/static/app/components/onboarding/productSelection.tsx
+++ b/static/app/components/onboarding/productSelection.tsx
@@ -1,5 +1,5 @@
import type {ReactNode} from 'react';
-import {Fragment, useCallback, useEffect, useMemo} from 'react';
+import {useCallback, useEffect, useMemo} from 'react';
import {css} from '@emotion/react';
import styled from '@emotion/styled';
@@ -17,7 +17,6 @@ import {space} from 'sentry/styles/space';
import type {Organization} from 'sentry/types/organization';
import type {PlatformKey} from 'sentry/types/project';
import {useOnboardingQueryParams} from 'sentry/views/onboarding/components/useOnboardingQueryParams';
-import TextBlock from 'sentry/views/settings/components/text/textBlock';
// TODO(aknaus): move to types
export enum ProductSolution {
@@ -120,6 +119,10 @@ export const platformProductAvailability = {
ProductSolution.PERFORMANCE_MONITORING,
ProductSolution.SESSION_REPLAY,
],
+ 'javascript-nuxt': [
+ ProductSolution.PERFORMANCE_MONITORING,
+ ProductSolution.SESSION_REPLAY,
+ ],
'javascript-angular': [
ProductSolution.PERFORMANCE_MONITORING,
ProductSolution.SESSION_REPLAY,
@@ -380,72 +383,54 @@ export function ProductSelection({
return null;
}
- // TODO(aknaus): clean up
- // The package manager info is only shown for javascript platforms
- // until we improve multi snippet suppport
- const showPackageManagerInfo =
- (platform?.indexOf('javascript') === 0 || platform?.indexOf('node') === 0) &&
- platform !== 'javascript-astro' &&
- platform !== 'javascript';
-
return (
-
- {showPackageManagerInfo && (
-
- {tct('In this quick guide you’ll use [npm] or [yarn] to set up:', {
- npm: npm ,
- yarn: yarn ,
- })}
-
+
+
+ {products.includes(ProductSolution.PERFORMANCE_MONITORING) && (
+ handleClickProduct(ProductSolution.PERFORMANCE_MONITORING)}
+ disabled={disabledProducts[ProductSolution.PERFORMANCE_MONITORING]}
+ checked={urlProducts.includes(ProductSolution.PERFORMANCE_MONITORING)}
+ />
)}
-
+ {products.includes(ProductSolution.SESSION_REPLAY) && (
handleClickProduct(ProductSolution.SESSION_REPLAY)}
+ disabled={disabledProducts[ProductSolution.SESSION_REPLAY]}
+ checked={urlProducts.includes(ProductSolution.SESSION_REPLAY)}
/>
- {products.includes(ProductSolution.PERFORMANCE_MONITORING) && (
- handleClickProduct(ProductSolution.PERFORMANCE_MONITORING)}
- disabled={disabledProducts[ProductSolution.PERFORMANCE_MONITORING]}
- checked={urlProducts.includes(ProductSolution.PERFORMANCE_MONITORING)}
- />
- )}
- {products.includes(ProductSolution.SESSION_REPLAY) && (
- handleClickProduct(ProductSolution.SESSION_REPLAY)}
- disabled={disabledProducts[ProductSolution.SESSION_REPLAY]}
- checked={urlProducts.includes(ProductSolution.SESSION_REPLAY)}
- />
- )}
- {products.includes(ProductSolution.PROFILING) && (
- ,
- }
- )}
- docLink="https://docs.sentry.io/platforms/python/profiling/"
- onClick={() => handleClickProduct(ProductSolution.PROFILING)}
- disabled={disabledProducts[ProductSolution.PROFILING]}
- checked={urlProducts.includes(ProductSolution.PROFILING)}
- />
- )}
-
-
+ )}
+ {products.includes(ProductSolution.PROFILING) && (
+ ,
+ }
+ )}
+ docLink="https://docs.sentry.io/platforms/python/profiling/"
+ onClick={() => handleClickProduct(ProductSolution.PROFILING)}
+ disabled={disabledProducts[ProductSolution.PROFILING]}
+ checked={urlProducts.includes(ProductSolution.PROFILING)}
+ />
+ )}
+
);
}
diff --git a/static/app/components/onboardingWizard/newSidebar.tsx b/static/app/components/onboardingWizard/newSidebar.tsx
new file mode 100644
index 0000000000000..5848d5b77d16f
--- /dev/null
+++ b/static/app/components/onboardingWizard/newSidebar.tsx
@@ -0,0 +1,484 @@
+import {
+ Fragment,
+ useCallback,
+ useContext,
+ useEffect,
+ useMemo,
+ useRef,
+ useState,
+} from 'react';
+import {css} from '@emotion/react';
+import styled from '@emotion/styled';
+import partition from 'lodash/partition';
+
+import {navigateTo} from 'sentry/actionCreators/navigation';
+import {updateOnboardingTask} from 'sentry/actionCreators/onboardingTasks';
+import {Button} from 'sentry/components/button';
+import {Chevron} from 'sentry/components/chevron';
+import InteractionStateLayer from 'sentry/components/interactionStateLayer';
+import {
+ OnboardingContext,
+ type OnboardingContextProps,
+} from 'sentry/components/onboarding/onboardingContext';
+import SkipConfirm from 'sentry/components/onboardingWizard/skipConfirm';
+import {findCompleteTasks, taskIsDone} from 'sentry/components/onboardingWizard/utils';
+import ProgressRing from 'sentry/components/progressRing';
+import SidebarPanel from 'sentry/components/sidebar/sidebarPanel';
+import type {CommonSidebarProps} from 'sentry/components/sidebar/types';
+import {IconCheckmark, IconClose} from 'sentry/icons';
+import {t, tct} from 'sentry/locale';
+import DemoWalkthroughStore from 'sentry/stores/demoWalkthroughStore';
+import {space} from 'sentry/styles/space';
+import {
+ type OnboardingTask,
+ OnboardingTaskGroup,
+ type OnboardingTaskKey,
+} from 'sentry/types/onboarding';
+import type {Organization} from 'sentry/types/organization';
+import type {Project} from 'sentry/types/project';
+import {trackAnalytics} from 'sentry/utils/analytics';
+import {isDemoWalkthrough} from 'sentry/utils/demoMode';
+import useApi from 'sentry/utils/useApi';
+import useOrganization from 'sentry/utils/useOrganization';
+import useProjects from 'sentry/utils/useProjects';
+import useRouter from 'sentry/utils/useRouter';
+
+import {getMergedTasks} from './taskConfig';
+
+/**
+ * How long (in ms) to delay before beginning to mark tasks complete
+ */
+const INITIAL_MARK_COMPLETE_TIMEOUT = 600;
+
+/**
+ * How long (in ms) to delay between marking each unseen task as complete.
+ */
+const COMPLETION_SEEN_TIMEOUT = 800;
+
+function useOnboardingTasks(
+ organization: Organization,
+ projects: Project[],
+ onboardingContext: OnboardingContextProps
+) {
+ return useMemo(() => {
+ const all = getMergedTasks({
+ organization,
+ projects,
+ onboardingContext,
+ }).filter(task => task.display);
+ return {
+ allTasks: all,
+ gettingStartedTasks: all.filter(
+ task => task.group === OnboardingTaskGroup.GETTING_STARTED
+ ),
+ beyondBasicsTasks: all.filter(
+ task => task.group !== OnboardingTaskGroup.GETTING_STARTED
+ ),
+ };
+ }, [organization, projects, onboardingContext]);
+}
+
+function groupTasksByCompletion(tasks: OnboardingTask[]) {
+ const [completedTasks, incompletedTasks] = partition(tasks, task =>
+ findCompleteTasks(task)
+ );
+ return {
+ completedTasks,
+ incompletedTasks,
+ };
+}
+
+function getPanelDescription(walkthrough: boolean) {
+ if (walkthrough) {
+ return {
+ title: t('Guided Tours'),
+ description: t('Take a guided tour to see what Sentry can do for you'),
+ };
+ }
+ return {
+ title: t('Quick Start'),
+ description: t('Walk through this guide to get the most out of Sentry right away.'),
+ };
+}
+
+interface TaskProps {
+ hidePanel: () => void;
+ task: OnboardingTask;
+ completed?: boolean;
+}
+
+function Task({task, completed, hidePanel}: TaskProps) {
+ const api = useApi();
+ const organization = useOrganization();
+ const router = useRouter();
+
+ const handleClick = useCallback(
+ (e: React.MouseEvent) => {
+ trackAnalytics('quick_start.task_card_clicked', {
+ organization,
+ todo_id: task.task,
+ todo_title: task.title,
+ action: 'clickthrough',
+ });
+
+ e.stopPropagation();
+
+ if (isDemoWalkthrough()) {
+ DemoWalkthroughStore.activateGuideAnchor(task.task);
+ }
+
+ if (task.actionType === 'external') {
+ window.open(task.location, '_blank');
+ }
+
+ if (task.actionType === 'action') {
+ task.action(router);
+ }
+
+ if (task.actionType === 'app') {
+ // Convert all paths to a location object
+ let to =
+ typeof task.location === 'string' ? {pathname: task.location} : task.location;
+ // Add referrer to all links
+ to = {...to, query: {...to.query, referrer: 'onboarding_task'}};
+
+ navigateTo(to, router);
+ }
+ hidePanel();
+ },
+ [task, organization, router, hidePanel]
+ );
+
+ const handleMarkComplete = useCallback(
+ (taskKey: OnboardingTaskKey) => {
+ updateOnboardingTask(api, organization, {
+ task: taskKey,
+ status: 'complete',
+ completionSeen: true,
+ });
+ },
+ [api, organization]
+ );
+
+ const handleMarkSkipped = useCallback(
+ (taskKey: OnboardingTaskKey) => {
+ trackAnalytics('quick_start.task_card_clicked', {
+ organization,
+ todo_id: task.task,
+ todo_title: task.title,
+ action: 'skipped',
+ });
+ updateOnboardingTask(api, organization, {
+ task: taskKey,
+ status: 'skipped',
+ completionSeen: true,
+ });
+ },
+ [task, organization, api]
+ );
+
+ if (completed) {
+ return (
+
+ {task.title}
+
+
+ );
+ }
+
+ return (
+
+
+
+
{task.title}
+
{task.description}
+
+ {task.requisiteTasks.length === 0 && (
+
+ {task.skippable && (
+ handleMarkSkipped(task.task)}>
+ {({skip}) => (
+ }
+ onClick={skip}
+ />
+ )}
+
+ )}
+ {task.SupplementComponent && (
+ handleMarkComplete(task.task)}
+ />
+ )}
+
+ )}
+
+ );
+}
+
+interface TaskGroupProps {
+ description: string;
+ hidePanel: () => void;
+ tasks: OnboardingTask[];
+ title: string;
+ expanded?: boolean;
+}
+
+function TaskGroup({title, description, tasks, expanded, hidePanel}: TaskGroupProps) {
+ const [isExpanded, setIsExpanded] = useState(expanded);
+ const {completedTasks, incompletedTasks} = groupTasksByCompletion(tasks);
+
+ useEffect(() => {
+ setIsExpanded(expanded);
+ }, [expanded]);
+
+ return (
+
+ setIsExpanded(!isExpanded)}>
+
+
+
{title}
+
{description}
+
+
+
+ {isExpanded && (
+
+
+
+
+ {tct('[totalCompletedTasks] out of [totalTasks] tasks completed', {
+ totalCompletedTasks: completedTasks.length,
+ totalTasks: tasks.length,
+ })}
+
+
+ {incompletedTasks.map(task => (
+
+ ))}
+ {completedTasks.length > 0 && (
+
+ {t('Completed')}
+ {completedTasks.map(task => (
+
+ ))}
+
+ )}
+
+
+ )}
+
+ );
+}
+
+interface NewSidebarProps extends Pick {
+ onClose: () => void;
+}
+
+export function NewOnboardingSidebar({onClose, orientation, collapsed}: NewSidebarProps) {
+ const api = useApi();
+ const organization = useOrganization();
+ const onboardingContext = useContext(OnboardingContext);
+ const {projects} = useProjects();
+ const walkthrough = isDemoWalkthrough();
+ const {title, description} = getPanelDescription(walkthrough);
+ const {allTasks, gettingStartedTasks, beyondBasicsTasks} = useOnboardingTasks(
+ organization,
+ projects,
+ onboardingContext
+ );
+
+ const markCompletionTimeout = useRef();
+ const markCompletionSeenTimeout = useRef();
+
+ function completionTimeout(time: number): Promise {
+ window.clearTimeout(markCompletionTimeout.current);
+ return new Promise(resolve => {
+ markCompletionTimeout.current = window.setTimeout(resolve, time);
+ });
+ }
+
+ function seenTimeout(time: number): Promise {
+ window.clearTimeout(markCompletionSeenTimeout.current);
+ return new Promise(resolve => {
+ markCompletionSeenTimeout.current = window.setTimeout(resolve, time);
+ });
+ }
+
+ const markTasksAsSeen = useCallback(
+ async function () {
+ const unseenTasks = allTasks
+ .filter(task => taskIsDone(task) && !task.completionSeen)
+ .map(task => task.task);
+
+ // Incrementally mark tasks as seen. This gives the card completion
+ // animations time before we move each task into the completed section.
+ for (const task of unseenTasks) {
+ await seenTimeout(COMPLETION_SEEN_TIMEOUT);
+ updateOnboardingTask(api, organization, {task, completionSeen: true});
+ }
+ },
+ [api, organization, allTasks]
+ );
+
+ const markSeenOnOpen = useCallback(
+ async function () {
+ // Add a minor delay to marking tasks complete to account for the animation
+ // opening of the sidebar panel
+ await completionTimeout(INITIAL_MARK_COMPLETE_TIMEOUT);
+ markTasksAsSeen();
+ },
+ [markTasksAsSeen]
+ );
+
+ useEffect(() => {
+ markSeenOnOpen();
+
+ return () => {
+ window.clearTimeout(markCompletionTimeout.current);
+ window.clearTimeout(markCompletionSeenTimeout.current);
+ };
+ }, [markSeenOnOpen]);
+
+ return (
+
+
+ {description}
+
+
+
+
+ );
+}
+
+const Wrapper = styled(SidebarPanel)`
+ width: 100%;
+ @media (min-width: ${p => p.theme.breakpoints.xsmall}) {
+ width: 450px;
+ }
+`;
+
+const Content = styled('div')`
+ padding: ${space(3)};
+ display: flex;
+ flex-direction: column;
+ gap: ${space(1)};
+
+ p {
+ margin-bottom: ${space(1)};
+ }
+`;
+
+const TaskGroupWrapper = styled('div')`
+ border: 1px solid ${p => p.theme.border};
+ border-radius: ${p => p.theme.borderRadius};
+ padding: ${space(1)};
+
+ hr {
+ border-color: ${p => p.theme.translucentBorder};
+ margin: ${space(1)} -${space(1)};
+ }
+`;
+
+const TaskGroupHeader = styled('div')`
+ cursor: pointer;
+ display: grid;
+ grid-template-columns: 1fr max-content;
+ padding: ${space(1)} ${space(1.5)};
+ gap: ${space(1.5)};
+ position: relative;
+ border-radius: ${p => p.theme.borderRadius};
+ align-items: center;
+
+ p {
+ margin: 0;
+ font-size: ${p => p.theme.fontSizeSmall};
+ color: ${p => p.theme.subText};
+ }
+`;
+
+const TaskGroupBody = styled('div')`
+ border-radius: ${p => p.theme.borderRadius};
+`;
+
+const TaskGroupProgress = styled('div')<{completed?: boolean}>`
+ font-size: ${p => p.theme.fontSizeSmall};
+ font-weight: ${p => p.theme.fontWeightBold};
+ padding: ${space(0.75)} ${space(1.5)};
+ ${p =>
+ p.completed
+ ? css`
+ color: ${p.theme.green300};
+ `
+ : css`
+ color: ${p.theme.subText};
+ display: grid;
+ grid-template-columns: 1fr max-content;
+ align-items: center;
+ gap: ${space(1)};
+ `}
+`;
+
+const TaskWrapper = styled('div')<{completed?: boolean}>`
+ padding: ${space(1)} ${space(1.5)};
+ border-radius: ${p => p.theme.borderRadius};
+ display: grid;
+ grid-template-columns: 1fr max-content;
+ gap: ${space(1)};
+
+ p {
+ margin: 0;
+ font-size: ${p => p.theme.fontSizeSmall};
+ color: ${p => p.theme.subText};
+ }
+
+ ${p =>
+ p.completed
+ ? css`
+ strong {
+ opacity: 0.5;
+ }
+ align-items: center;
+ `
+ : css`
+ position: relative;
+ cursor: pointer;
+ align-items: flex-start;
+ `}
+`;
diff --git a/static/app/components/onboardingWizard/onboardingProjectsCard.tsx b/static/app/components/onboardingWizard/onboardingProjectsCard.tsx
index 79f5e81e5b299..fdb3721674604 100644
--- a/static/app/components/onboardingWizard/onboardingProjectsCard.tsx
+++ b/static/app/components/onboardingWizard/onboardingProjectsCard.tsx
@@ -26,14 +26,12 @@ export default function OnboardingProjectsCard({
});
};
- // TODO(Priscila): Reflect on this logic
const selectedProjectSlug = onboardingContext.data.selectedSDK?.key;
const project = selectedProjectSlug
? allProjects.find(p => p.slug === selectedProjectSlug)
- : undefined;
+ : allProjects[0];
- // Project selected during onboarding but not received first event
const projectHasFirstEvent = !project?.firstEvent;
if (!project || !projectHasFirstEvent) {
diff --git a/static/app/components/onboardingWizard/task.tsx b/static/app/components/onboardingWizard/task.tsx
index 58bc9e403d340..474d683a227f3 100644
--- a/static/app/components/onboardingWizard/task.tsx
+++ b/static/app/components/onboardingWizard/task.tsx
@@ -30,7 +30,7 @@ const recordAnalytics = (
organization: Organization,
action: string
) =>
- trackAnalytics('onboarding.wizard_clicked', {
+ trackAnalytics('quick_start.task_card_clicked', {
organization,
todo_id: task.task,
todo_title: task.title,
diff --git a/static/app/components/onboardingWizard/taskConfig.tsx b/static/app/components/onboardingWizard/taskConfig.tsx
index d9314500789b9..5b4fd84bdd50f 100644
--- a/static/app/components/onboardingWizard/taskConfig.tsx
+++ b/static/app/components/onboardingWizard/taskConfig.tsx
@@ -1,3 +1,4 @@
+import {css} from '@emotion/react';
import styled from '@emotion/styled';
import {openInviteMembersModal} from 'sentry/actionCreators/modal';
@@ -5,9 +6,13 @@ import {navigateTo} from 'sentry/actionCreators/navigation';
import type {Client} from 'sentry/api';
import type {OnboardingContextProps} from 'sentry/components/onboarding/onboardingContext';
import {filterSupportedTasks} from 'sentry/components/onboardingWizard/filterSupportedTasks';
-import {taskIsDone} from 'sentry/components/onboardingWizard/utils';
+import {
+ hasQuickStartUpdatesFeature,
+ taskIsDone,
+} from 'sentry/components/onboardingWizard/utils';
import {filterProjects} from 'sentry/components/performanceOnboarding/utils';
import {SidebarPanelKey} from 'sentry/components/sidebar/types';
+import {Tooltip} from 'sentry/components/tooltip';
import {sourceMaps} from 'sentry/data/platformCategories';
import {t} from 'sentry/locale';
import SidebarPanelStore from 'sentry/stores/sidebarPanelStore';
@@ -18,7 +23,7 @@ import type {
OnboardingTask,
OnboardingTaskDescriptor,
} from 'sentry/types/onboarding';
-import {OnboardingTaskKey} from 'sentry/types/onboarding';
+import {OnboardingTaskGroup, OnboardingTaskKey} from 'sentry/types/onboarding';
import type {Organization} from 'sentry/types/organization';
import type {Project} from 'sentry/types/project';
import {isDemoWalkthrough} from 'sentry/utils/demoMode';
@@ -168,14 +173,19 @@ export function getOnboardingTasks({
{
task: OnboardingTaskKey.FIRST_PROJECT,
title: t('Create a project'),
- description: t(
- "Monitor in seconds by adding a simple lines of code to your project. It's as easy as microwaving leftover pizza."
- ),
+ description: hasQuickStartUpdatesFeature(organization)
+ ? t(
+ "Monitor in seconds by adding a few lines of code to your project. It's as easy as microwaving leftover pizza."
+ )
+ : t(
+ "Monitor in seconds by adding a simple lines of code to your project. It's as easy as microwaving leftover pizza."
+ ),
skippable: false,
requisites: [],
actionType: 'app',
location: `/organizations/${organization.slug}/projects/new/`,
display: true,
+ group: OnboardingTaskGroup.GETTING_STARTED,
},
{
task: OnboardingTaskKey.FIRST_EVENT,
@@ -200,10 +210,16 @@ export function getOnboardingTasks({
eventType="error"
onIssueReceived={() => !taskIsDone(task) && onCompleteTask()}
>
- {() => }
+ {() => (
+
+ )}
) : null
),
+ group: OnboardingTaskGroup.GETTING_STARTED,
},
{
task: OnboardingTaskKey.INVITE_MEMBER,
@@ -216,6 +232,7 @@ export function getOnboardingTasks({
actionType: 'action',
action: () => openInviteMembersModal({source: 'onboarding_widget'}),
display: true,
+ group: OnboardingTaskGroup.GETTING_STARTED,
},
{
task: OnboardingTaskKey.FIRST_INTEGRATION,
@@ -227,7 +244,34 @@ export function getOnboardingTasks({
requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_EVENT],
actionType: 'app',
location: `/settings/${organization.slug}/integrations/`,
- display: true,
+ display: !hasQuickStartUpdatesFeature(organization),
+ group: OnboardingTaskGroup.GETTING_STARTED,
+ },
+ {
+ task: OnboardingTaskKey.REAL_TIME_NOTIFICATIONS,
+ title: t('Real-time notifications'),
+ description: t(
+ 'Triage and resolving issues faster by integrating Sentry with messaging platforms like Slack, Discord and MS Teams.'
+ ),
+ skippable: true,
+ requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_EVENT],
+ actionType: 'app',
+ location: `/settings/${organization.slug}/integrations/?category=chat`,
+ display: hasQuickStartUpdatesFeature(organization),
+ group: OnboardingTaskGroup.GETTING_STARTED,
+ },
+ {
+ task: OnboardingTaskKey.LINK_SENTRY_TO_SOURCE_CODE,
+ title: t('Link Sentry to Source Code'),
+ description: t(
+ 'Resolve bugs faster with commit data and stack trace linking to your source code in GitHub, Gitlab and more.'
+ ),
+ skippable: true,
+ requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_EVENT],
+ actionType: 'app',
+ location: `/settings/${organization.slug}/integrations/?category=codeowners`,
+ display: hasQuickStartUpdatesFeature(organization),
+ group: OnboardingTaskGroup.GETTING_STARTED,
},
{
task: OnboardingTaskKey.SECOND_PLATFORM,
@@ -301,7 +345,11 @@ export function getOnboardingTasks({
eventType="transaction"
onIssueReceived={() => !taskIsDone(task) && onCompleteTask()}
>
- {() => }
+ {() => (
+
+ )}
) : null
),
@@ -354,7 +402,12 @@ export function getOnboardingTasks({
eventType="replay"
onIssueReceived={() => !taskIsDone(task) && onCompleteTask()}
>
- {() => }
+ {() => (
+
+ )}
) : null
),
@@ -362,14 +415,19 @@ export function getOnboardingTasks({
{
task: OnboardingTaskKey.RELEASE_TRACKING,
title: t('Track releases'),
- description: t(
- 'Take an in-depth look at the health of each and every release with crash analytics, errors, related issues and suspect commits.'
- ),
+ description: hasQuickStartUpdatesFeature(organization)
+ ? t(
+ 'Identify which release introduced an issue and track release health with crash analytics, errors, and adoption data.'
+ )
+ : t(
+ 'Take an in-depth look at the health of each and every release with crash analytics, errors, related issues and suspect commits.'
+ ),
skippable: true,
requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_EVENT],
actionType: 'app',
location: `/settings/${organization.slug}/projects/:projectId/release-tracking/`,
display: true,
+ group: OnboardingTaskGroup.GETTING_STARTED,
},
{
task: OnboardingTaskKey.SOURCEMAPS,
@@ -383,30 +441,6 @@ export function getOnboardingTasks({
location: 'https://docs.sentry.io/platforms/javascript/sourcemaps/',
display: hasPlatformWithSourceMaps(projects),
},
- {
- task: OnboardingTaskKey.USER_REPORTS,
- title: t('User crash reports'),
- description: t('Collect user feedback when your application crashes'),
- skippable: true,
- requisites: [
- OnboardingTaskKey.FIRST_PROJECT,
- OnboardingTaskKey.FIRST_EVENT,
- OnboardingTaskKey.USER_CONTEXT,
- ],
- actionType: 'app',
- location: `/settings/${organization.slug}/projects/:projectId/user-reports/`,
- display: false,
- },
- {
- task: OnboardingTaskKey.ISSUE_TRACKER,
- title: t('Set up issue tracking'),
- description: t('Link to Sentry issues within your issue tracker'),
- skippable: true,
- requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_EVENT],
- actionType: 'app',
- location: `/settings/${organization.slug}/projects/:projectId/plugins/`,
- display: false,
- },
{
task: OnboardingTaskKey.ALERT_RULE,
title: t('Configure an Issue Alert'),
@@ -418,6 +452,7 @@ export function getOnboardingTasks({
actionType: 'app',
location: getIssueAlertUrl({projects, organization, onboardingContext}),
display: true,
+ group: OnboardingTaskGroup.GETTING_STARTED,
},
{
task: OnboardingTaskKey.METRIC_ALERT,
@@ -473,22 +508,54 @@ export function getMergedTasks({organization, projects, onboardingContext}: Opti
}));
}
-const PulsingIndicator = styled('div')`
+const PulsingIndicator = styled('div')<{hasQuickStartUpdatesFeature?: boolean}>`
${pulsingIndicatorStyles};
- margin-right: ${space(1)};
+ ${p =>
+ p.hasQuickStartUpdatesFeature
+ ? css`
+ margin: 0 ${space(0.5)};
+ `
+ : css`
+ margin-right: ${space(1)};
+ `}
`;
const EventWaitingIndicator = styled(
- (p: React.HTMLAttributes & {text?: string}) => (
-
-
- {p.text || t('Waiting for event')}
-
- )
+ ({
+ hasQuickStartUpdatesFeature: quickStartUpdatesFeature,
+ text,
+ ...p
+ }: React.HTMLAttributes & {
+ hasQuickStartUpdatesFeature: boolean;
+ text?: string;
+ }) => {
+ if (quickStartUpdatesFeature) {
+ return (
+
+ );
+ }
+ return (
+
+
+ {text || t('Waiting for event')}
+
+ );
+ }
)`
display: flex;
align-items: center;
- flex-grow: 1;
- font-size: ${p => p.theme.fontSizeMedium};
- color: ${p => p.theme.pink400};
+ ${p =>
+ p.hasQuickStartUpdatesFeature
+ ? css`
+ height: 16px;
+ `
+ : css`
+ flex-grow: 1;
+ font-size: ${p.theme.fontSizeMedium};
+ color: ${p.theme.pink400};
+ `}
`;
diff --git a/static/app/components/onboardingWizard/utils.tsx b/static/app/components/onboardingWizard/utils.tsx
index 10935b1ca44ea..1374b3fc828b0 100644
--- a/static/app/components/onboardingWizard/utils.tsx
+++ b/static/app/components/onboardingWizard/utils.tsx
@@ -1,4 +1,5 @@
import type {OnboardingTask} from 'sentry/types/onboarding';
+import type {Organization} from 'sentry/types/organization';
export const taskIsDone = (task: OnboardingTask) =>
['complete', 'skipped'].includes(task.status);
@@ -11,3 +12,7 @@ export const findActiveTasks = (task: OnboardingTask) =>
export const findUpcomingTasks = (task: OnboardingTask) =>
task.requisiteTasks.length > 0 && !findCompleteTasks(task);
+
+export function hasQuickStartUpdatesFeature(organization: Organization) {
+ return organization.features?.includes('quick-start-updates');
+}
diff --git a/static/app/components/organizations/pageFilterBar.tsx b/static/app/components/organizations/pageFilterBar.tsx
index 19591a991d78e..24819cf3da231 100644
--- a/static/app/components/organizations/pageFilterBar.tsx
+++ b/static/app/components/organizations/pageFilterBar.tsx
@@ -70,7 +70,7 @@ const PageFilterBar = styled('div')<{condensed?: boolean}>`
/* Prevent date filter from shrinking below 6.5rem */
&:last-child {
- min-width: 6.5rem;
+ min-width: 4rem;
}
}
diff --git a/static/app/components/organizations/projectPageFilter/index.spec.tsx b/static/app/components/organizations/projectPageFilter/index.spec.tsx
index 1e9f62142b76c..dcd45ef9612bd 100644
--- a/static/app/components/organizations/projectPageFilter/index.spec.tsx
+++ b/static/app/components/organizations/projectPageFilter/index.spec.tsx
@@ -128,18 +128,10 @@ describe('ProjectPageFilter', function () {
// Activating the link triggers a route change
await userEvent.keyboard('{Enter}');
- // TODO(__SENTRY_USING_REACT_ROUTER_SIX): This first variant can be removed
- // once react-router 3 has been removed.
- try {
- expect(router.push).toHaveBeenCalledWith(
- `/organizations/${organization.slug}/projects/project-1/?project=1`
- );
- } catch {
- expect(router.push).toHaveBeenCalledWith({
- pathname: '/organizations/org-slug/projects/project-1/',
- query: {project: '1'},
- });
- }
+ expect(router.push).toHaveBeenCalledWith({
+ pathname: '/organizations/org-slug/projects/project-1/',
+ query: {project: '1'},
+ });
// Move focus to "Project Settings" link
await userEvent.keyboard('{ArrowRight}');
diff --git a/static/app/components/organizations/projectPageFilter/index.tsx b/static/app/components/organizations/projectPageFilter/index.tsx
index 8767b0bd55008..6917dd03b8dec 100644
--- a/static/app/components/organizations/projectPageFilter/index.tsx
+++ b/static/app/components/organizations/projectPageFilter/index.tsx
@@ -17,7 +17,6 @@ import BookmarkStar from 'sentry/components/projects/bookmarkStar';
import {ALL_ACCESS_PROJECTS} from 'sentry/constants/pageFilters';
import {IconOpen, IconSettings} from 'sentry/icons';
import {t, tct} from 'sentry/locale';
-import ConfigStore from 'sentry/stores/configStore';
import type {Project} from 'sentry/types/project';
import {trackAnalytics} from 'sentry/utils/analytics';
import getRouteStringFromRoutes from 'sentry/utils/getRouteStringFromRoutes';
@@ -26,6 +25,7 @@ import usePageFilters from 'sentry/utils/usePageFilters';
import useProjects from 'sentry/utils/useProjects';
import useRouter from 'sentry/utils/useRouter';
import {useRoutes} from 'sentry/utils/useRoutes';
+import {useUser} from 'sentry/utils/useUser';
import {DesyncedFilterMessage} from '../pageFilters/desyncedFilter';
@@ -93,6 +93,7 @@ export function ProjectPageFilter({
footerMessage,
...selectProps
}: ProjectPageFilterProps) {
+ const user = useUser();
const router = useRouter();
const routes = useRoutes();
const organization = useOrganization();
@@ -106,13 +107,12 @@ export function ProjectPageFilter({
);
const showNonMemberProjects = useMemo(() => {
- const {isSuperuser} = ConfigStore.get('user');
const isOrgAdminOrManager =
organization.orgRole === 'owner' || organization.orgRole === 'manager';
const isOpenMembership = organization.features.includes('open-membership');
- return isSuperuser || isOrgAdminOrManager || isOpenMembership;
- }, [organization.orgRole, organization.features]);
+ return user.isSuperuser || isOrgAdminOrManager || isOpenMembership;
+ }, [user, organization.orgRole, organization.features]);
const nonMemberProjects = useMemo(
() => (showNonMemberProjects ? otherProjects : []),
diff --git a/static/app/components/overlay.tsx b/static/app/components/overlay.tsx
index bf29d2407ef63..26e0ecbf1ca09 100644
--- a/static/app/components/overlay.tsx
+++ b/static/app/components/overlay.tsx
@@ -124,7 +124,7 @@ const Overlay = styled(
: {style};
return (
-
+
{defined(arrowProps) && }
{children}
diff --git a/static/app/components/percentChange.spec.tsx b/static/app/components/percentChange.spec.tsx
index d44798124e696..463bcbaf93f19 100644
--- a/static/app/components/percentChange.spec.tsx
+++ b/static/app/components/percentChange.spec.tsx
@@ -26,4 +26,10 @@ describe('PercentChange', function () {
expect(screen.getByText('+5.52%')).toHaveAttribute('data-rating', 'good');
});
+
+ it('respects lack of polarity preference', () => {
+ render( );
+
+ expect(screen.getByText('+5.52%')).toHaveAttribute('data-rating', 'neutral');
+ });
});
diff --git a/static/app/components/percentChange.tsx b/static/app/components/percentChange.tsx
index df4ef92d26b91..b4a4ff31f7b81 100644
--- a/static/app/components/percentChange.tsx
+++ b/static/app/components/percentChange.tsx
@@ -34,7 +34,7 @@ export function PercentChange({
);
}
-function getPolarity(value: number): Polarity {
+export function getPolarity(value: number): Polarity {
if (value > 0) {
return '+';
}
@@ -46,7 +46,14 @@ function getPolarity(value: number): Polarity {
return '';
}
-function getPolarityRating(polarity: Polarity, preferredPolarity: Polarity): Rating {
+export function getPolarityRating(
+ polarity: Polarity,
+ preferredPolarity: Polarity
+): Rating {
+ if (preferredPolarity === '') {
+ return 'neutral';
+ }
+
if (polarity === preferredPolarity) {
return 'good';
}
@@ -58,7 +65,7 @@ function getPolarityRating(polarity: Polarity, preferredPolarity: Polarity): Rat
return 'neutral';
}
-const ColorizedRating = styled('div')<{
+export const ColorizedRating = styled('div')<{
rating: Rating;
}>`
color: ${p =>
diff --git a/static/app/components/performance/spanSearchQueryBuilder.tsx b/static/app/components/performance/spanSearchQueryBuilder.tsx
index 4b61f1827c3a2..66073db199cfb 100644
--- a/static/app/components/performance/spanSearchQueryBuilder.tsx
+++ b/static/app/components/performance/spanSearchQueryBuilder.tsx
@@ -76,11 +76,11 @@ export function SpanSearchQueryBuilder({
return placeholder ?? t('Search for spans, users, tags, and more');
}, [placeholder]);
- const customTags = useSpanFieldCustomTags({
+ const {data: customTags} = useSpanFieldCustomTags({
projects: projects ?? selection.projects,
});
- const supportedTags = useSpanFieldSupportedTags({
+ const {data: supportedTags} = useSpanFieldSupportedTags({
projects: projects ?? selection.projects,
});
@@ -140,7 +140,86 @@ export function SpanSearchQueryBuilder({
searchSource={searchSource}
filterKeySections={filterKeySections}
getTagValues={getSpanFilterTagValues}
- disallowFreeText
+ disallowUnsupportedFilters
+ recentSearches={SavedSearchType.SPAN}
+ showUnsubmittedIndicator
+ />
+ );
+}
+
+interface EAPSpanSearchQueryBuilderProps extends SpanSearchQueryBuilderProps {
+ numberTags: TagCollection;
+ stringTags: TagCollection;
+}
+
+export function EAPSpanSearchQueryBuilder({
+ initialQuery,
+ placeholder,
+ onSearch,
+ searchSource,
+ numberTags,
+ stringTags,
+}: EAPSpanSearchQueryBuilderProps) {
+ const api = useApi();
+ const organization = useOrganization();
+ const {selection} = usePageFilters();
+
+ const placeholderText = placeholder ?? t('Search for spans, users, tags, and more');
+
+ const tags = useMemo(() => {
+ return {...numberTags, ...stringTags};
+ }, [numberTags, stringTags]);
+
+ const filterKeySections = useMemo(() => {
+ const predefined = new Set(
+ SPANS_FILTER_KEY_SECTIONS.flatMap(section => section.children)
+ );
+ return [
+ ...SPANS_FILTER_KEY_SECTIONS,
+ {
+ value: 'custom_fields',
+ label: 'Custom Tags',
+ children: Object.keys(stringTags).filter(key => !predefined.has(key)),
+ },
+ ];
+ }, [stringTags]);
+
+ const getSpanFilterTagValues = useCallback(
+ async (tag: Tag, queryString: string) => {
+ if (isAggregateField(tag.key) || numberTags.hasOwnProperty(tag.key)) {
+ // We can't really auto suggest values for aggregate fields
+ // or measurements, so we simply don't
+ return Promise.resolve([]);
+ }
+
+ try {
+ const results = await fetchSpanFieldValues({
+ api,
+ orgSlug: organization.slug,
+ fieldKey: tag.key,
+ search: queryString,
+ projectIds: selection.projects.map(String),
+ endpointParams: normalizeDateTimeParams(selection.datetime),
+ dataset: 'spans',
+ });
+ return results.filter(({name}) => defined(name)).map(({name}) => name);
+ } catch (e) {
+ throw new Error(`Unable to fetch event field values: ${e}`);
+ }
+ },
+ [api, organization.slug, selection.projects, selection.datetime, numberTags]
+ );
+
+ return (
+ `
+const commonStyles = ({theme}: {theme: Theme}) => css`
cursor: default;
border-radius: ${theme.borderRadius};
box-shadow: 0 0 0 1px ${theme.background};
diff --git a/static/app/components/profiling/flamegraph/flamegraph.spec.tsx b/static/app/components/profiling/flamegraph/flamegraph.spec.tsx
index 4fefaa85b409a..e47d2700716ef 100644
--- a/static/app/components/profiling/flamegraph/flamegraph.spec.tsx
+++ b/static/app/components/profiling/flamegraph/flamegraph.spec.tsx
@@ -165,7 +165,9 @@ describe('Flamegraph', function () {
{organization: initializeOrg().organization}
);
- const frames = await findAllByTestId(document.body, 'flamegraph-frame');
+ const frames = await findAllByTestId(document.body, 'flamegraph-frame', undefined, {
+ timeout: 5000,
+ });
// 1 for main view and 1 for minimap
expect(frames.length).toBe(2);
diff --git a/static/app/components/profiling/functionsMiniGrid.tsx b/static/app/components/profiling/functionsMiniGrid.tsx
index 6921775aab9ab..f1b71d308e54c 100644
--- a/static/app/components/profiling/functionsMiniGrid.tsx
+++ b/static/app/components/profiling/functionsMiniGrid.tsx
@@ -14,15 +14,7 @@ import {defined} from 'sentry/utils';
import type {EventsResults} from 'sentry/utils/profiling/hooks/types';
import {generateProfileFlamechartRouteWithHighlightFrame} from 'sentry/utils/profiling/routes';
-const functionsFields = [
- 'package',
- 'function',
- 'count()',
- 'sum()',
- 'examples()',
-] as const;
-
-type FunctionsField = (typeof functionsFields)[number];
+type FunctionsField = 'package' | 'function' | 'count()' | 'sum()' | 'examples()';
interface FunctionsMiniGridProps {
functions: EventsResults['data'];
diff --git a/static/app/components/profiling/profileEventsTable.tsx b/static/app/components/profiling/profileEventsTable.tsx
index 901bf51502675..c7c3282446320 100644
--- a/static/app/components/profiling/profileEventsTable.tsx
+++ b/static/app/components/profiling/profileEventsTable.tsx
@@ -316,43 +316,40 @@ function getProjectForRow(
return project ?? null;
}
-const FIELDS = [
- 'id',
- 'profile.id',
- 'profiler.id',
- 'thread.id',
- 'trace.transaction',
- 'trace',
- 'transaction',
- 'transaction.duration',
- 'precise.start_ts',
- 'precise.finish_ts',
- 'profile.duration',
- 'project',
- 'project.id',
- 'project.name',
- 'environment',
- 'timestamp',
- 'release',
- 'platform.name',
- 'device.arch',
- 'device.classification',
- 'device.locale',
- 'device.manufacturer',
- 'device.model',
- 'os.build',
- 'os.name',
- 'os.version',
- 'last_seen()',
- 'p50()',
- 'p75()',
- 'p95()',
- 'p99()',
- 'count()',
- 'user_misery()',
-] as const;
-
-type FieldType = (typeof FIELDS)[number];
+type FieldType =
+ | 'id'
+ | 'profile.id'
+ | 'profiler.id'
+ | 'thread.id'
+ | 'trace.transaction'
+ | 'trace'
+ | 'transaction'
+ | 'transaction.duration'
+ | 'precise.start_ts'
+ | 'precise.finish_ts'
+ | 'profile.duration'
+ | 'project'
+ | 'project.id'
+ | 'project.name'
+ | 'environment'
+ | 'timestamp'
+ | 'release'
+ | 'platform.name'
+ | 'device.arch'
+ | 'device.classification'
+ | 'device.locale'
+ | 'device.manufacturer'
+ | 'device.model'
+ | 'os.build'
+ | 'os.name'
+ | 'os.version'
+ | 'last_seen()'
+ | 'p50()'
+ | 'p75()'
+ | 'p95()'
+ | 'p99()'
+ | 'count()'
+ | 'user_misery()';
const RIGHT_ALIGNED_FIELDS = new Set([
'transaction.duration',
diff --git a/static/app/components/profiling/suspectFunctions/functionsTable.tsx b/static/app/components/profiling/suspectFunctions/functionsTable.tsx
index 94f2b72f9ff1d..73f2c84209951 100644
--- a/static/app/components/profiling/suspectFunctions/functionsTable.tsx
+++ b/static/app/components/profiling/suspectFunctions/functionsTable.tsx
@@ -167,16 +167,13 @@ function ProfilingFunctionsTableCell({
}
}
-const FIELDS = [
- 'function',
- 'package',
- 'count()',
- 'p75()',
- 'sum()',
- 'examples()',
-] as const;
-
-type TableColumnKey = (typeof FIELDS)[number];
+type TableColumnKey =
+ | 'function'
+ | 'package'
+ | 'count()'
+ | 'p75()'
+ | 'sum()'
+ | 'examples()';
type TableDataRow = Record;
diff --git a/static/app/components/quickTrace/index.spec.tsx b/static/app/components/quickTrace/index.spec.tsx
index 1565160f69181..254aa2acefc80 100644
--- a/static/app/components/quickTrace/index.spec.tsx
+++ b/static/app/components/quickTrace/index.spec.tsx
@@ -3,11 +3,12 @@ import {render, screen} from 'sentry-test/reactTestingLibrary';
import QuickTrace from 'sentry/components/quickTrace';
import type {Event} from 'sentry/types/event';
+import type {Organization} from 'sentry/types/organization';
import type {QuickTraceEvent} from 'sentry/utils/performance/quickTrace/types';
describe('Quick Trace', function () {
- let location;
- let organization;
+ let location: any;
+ let organization: Organization;
const initialize = () => {
const context = initializeOrg();
diff --git a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx
index a1ad9f1214b6e..ae80f6b365c4d 100644
--- a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx
+++ b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx
@@ -22,7 +22,6 @@ import {space} from 'sentry/styles/space';
import type {Extraction} from 'sentry/utils/replays/extractHtml';
import {getReplayDiffOffsetsFromFrame} from 'sentry/utils/replays/getDiffTimestamps';
import getFrameDetails from 'sentry/utils/replays/getFrameDetails';
-import useExtractDomNodes from 'sentry/utils/replays/hooks/useExtractDomNodes';
import type ReplayReader from 'sentry/utils/replays/replayReader';
import type {
ErrorFrame,
@@ -45,6 +44,7 @@ import useOrganization from 'sentry/utils/useOrganization';
import useProjectFromSlug from 'sentry/utils/useProjectFromSlug';
import IconWrapper from 'sentry/views/replays/detail/iconWrapper';
import TimestampButton from 'sentry/views/replays/detail/timestampButton';
+import type {OnExpandCallback} from 'sentry/views/replays/detail/useVirtualizedInspector';
type MouseCallback = (frame: ReplayFrame, nodeId?: number) => void;
@@ -53,7 +53,7 @@ const FRAMES_WITH_BUTTONS = ['replay.hydrate-error'];
interface Props {
frame: ReplayFrame;
onClick: null | MouseCallback;
- onInspectorExpanded: (path: string, expandedState: Record) => void;
+ onInspectorExpanded: OnExpandCallback;
onMouseEnter: MouseCallback;
onMouseLeave: MouseCallback;
startTimestampMs: number;
@@ -104,7 +104,7 @@ function BreadcrumbItem({
}, [description, expandPaths, onInspectorExpanded]);
const renderComparisonButton = useCallback(() => {
- return isBreadcrumbFrame(frame) && isHydrationErrorFrame(frame) ? (
+ return isBreadcrumbFrame(frame) && isHydrationErrorFrame(frame) && replay ? (
) : null;
}, [frame, replay]);
@@ -112,7 +112,7 @@ function BreadcrumbItem({
const renderWebVital = useCallback(() => {
return isSpanFrame(frame) && isWebVitalFrame(frame) ? (
) : null;
- }, [expandPaths, frame, onInspectorExpanded, onMouseEnter, onMouseLeave, replay]);
+ }, [
+ expandPaths,
+ extraction?.selectors,
+ frame,
+ onInspectorExpanded,
+ onMouseEnter,
+ onMouseLeave,
+ ]);
const renderCodeSnippet = useCallback(() => {
return (
@@ -221,7 +228,7 @@ function BreadcrumbItem({
}
function WebVitalData({
- replay,
+ selectors,
frame,
expandPaths,
onInspectorExpanded,
@@ -230,14 +237,11 @@ function WebVitalData({
}: {
expandPaths: string[] | undefined;
frame: WebVitalFrame;
- onInspectorExpanded: (path: string, expandedState: Record) => void;
+ onInspectorExpanded: OnExpandCallback;
onMouseEnter: MouseCallback;
onMouseLeave: MouseCallback;
- replay: ReplayReader | null;
+ selectors: Map | undefined;
}) {
- const {data: frameToExtraction} = useExtractDomNodes({replay});
- const selectors = frameToExtraction?.get(frame)?.selectors;
-
const webVitalData = {value: frame.data.value};
if (isCLSFrame(frame) && frame.data.attributions && selectors) {
const layoutShifts: {[x: string]: ReactNode[]}[] = [];
@@ -277,7 +281,7 @@ function WebVitalData({
if (layoutShifts.length) {
webVitalData['Layout shifts'] = layoutShifts;
}
- } else if (selectors?.size) {
+ } else if (selectors) {
selectors.forEach((key, value) => {
webVitalData[key] = (
) : null}
diff --git a/static/app/components/replays/breadcrumbs/selectorList.tsx b/static/app/components/replays/breadcrumbs/selectorList.tsx
index 26c1ec9b0b5d2..feda0ae8f644e 100644
--- a/static/app/components/replays/breadcrumbs/selectorList.tsx
+++ b/static/app/components/replays/breadcrumbs/selectorList.tsx
@@ -13,12 +13,12 @@ export default function SelectorList({frame}: {frame: ClickFrame}) {
const organization = useOrganization();
const componentName = frame.data.node?.attributes['data-sentry-component'];
- const lastComponentIndex =
- frame.message.lastIndexOf('>') === -1 ? 0 : frame.message.lastIndexOf('>') + 2;
+ const indexOfArrow = frame.message?.lastIndexOf('>') ?? -1;
+ const lastComponentIndex = indexOfArrow === -1 ? 0 : indexOfArrow + 2;
return componentName ? (
- {frame.message.substring(0, lastComponentIndex)}
+ {frame.message?.substring(0, lastComponentIndex)}
= [];
- let start = -1;
- let end = -1;
+ let start = startTimestampMs;
- for (const currFrame of frames) {
- // add metrics for frame coming after a background frame to see how often we have bad data
- if (start !== -1) {
- trackAnalytics('replay.frame-after-background', {
- organization,
- frame: getFrameOpOrCategory(currFrame),
- });
- }
-
- // only considered start of gap if background frame hasn't been found yet
- if (start === -1 && isBackgroundFrame(currFrame)) {
- start = currFrame.timestampMs - startTimestampMs;
- }
-
- // gap only ends if a frame that's not a background frame or error frame has been found
- if (start !== -1 && !isBackgroundFrame(currFrame) && !isErrorFrame(currFrame)) {
- end = currFrame.timestampMs - startTimestampMs;
- }
-
- // create gap if we found have start (background frame) and end (another frame)
- if (start !== -1 && end !== -1) {
+ // create gap in timeline when there is a gap between video events
+ for (const video of videoEvents) {
+ if (start < video.timestamp) {
ranges.push({
- left: toPercent(start / durationMs),
- width: toPercent((end - start) / durationMs),
+ left: toPercent((start - startTimestampMs) / durationMs),
+ width: toPercent((video.timestamp - start) / durationMs),
});
- start = -1;
- end = -1;
}
+ start = video.timestamp + video.duration;
}
- // create gap if we still have start (background frame) until end of replay
- if (start !== -1) {
+ // add gap at the end if the last video segment ends before the replay ends
+ if (videoEvents.length && start < startTimestampMs + durationMs) {
ranges.push({
- left: toPercent(start / durationMs),
- width: toPercent((durationMs - start) / durationMs),
+ left: toPercent((start - startTimestampMs) / durationMs),
+ width: toPercent(durationMs / durationMs),
});
}
@@ -73,7 +43,7 @@ export default function TimelineGaps({durationMs, startTimestampMs, frames}: Pro
return (
{t('Slider Diff')}
{t('Side By Side Diff')}
- {t('HTML Diff')}
+
+ {t('HTML Diff')}
+
diff --git a/static/app/components/replays/diff/replaySideBySideImageDiff.tsx b/static/app/components/replays/diff/replaySideBySideImageDiff.tsx
index 1457afe04c9d2..757f13e8fa192 100644
--- a/static/app/components/replays/diff/replaySideBySideImageDiff.tsx
+++ b/static/app/components/replays/diff/replaySideBySideImageDiff.tsx
@@ -1,21 +1,22 @@
import styled from '@emotion/styled';
import {Flex} from 'sentry/components/container/flex';
-import {Provider as ReplayContextProvider} from 'sentry/components/replays/replayContext';
-import ReplayPlayer from 'sentry/components/replays/replayPlayer';
+import ReplayPlayer from 'sentry/components/replays/player/replayPlayer';
+import ReplayPlayerMeasurer from 'sentry/components/replays/player/replayPlayerMeasurer';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
+import {ReplayPlayerEventsContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerEventsContext';
+import {ReplayPlayerPluginsContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerPluginsContext';
+import {ReplayPlayerStateContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerStateContext';
import type ReplayReader from 'sentry/utils/replays/replayReader';
interface Props {
leftOffsetMs: number;
- replay: null | ReplayReader;
+ replay: ReplayReader;
rightOffsetMs: number;
}
export function ReplaySideBySideImageDiff({leftOffsetMs, replay, rightOffsetMs}: Props) {
- const fetching = false;
-
return (
@@ -28,31 +29,24 @@ export function ReplaySideBySideImageDiff({leftOffsetMs, replay, rightOffsetMs}:
-
-
-
-
-
-
-
- {rightOffsetMs > 0 ? (
+
+
+
+
+
+ {style => }
+
+
+
-
+
+
+ {style => }
+
+
- ) : (
-
- )}
-
+
+
);
diff --git a/static/app/components/replays/diff/replaySliderDiff.tsx b/static/app/components/replays/diff/replaySliderDiff.tsx
index aceaae8ae40ee..82cae408c87be 100644
--- a/static/app/components/replays/diff/replaySliderDiff.tsx
+++ b/static/app/components/replays/diff/replaySliderDiff.tsx
@@ -19,7 +19,7 @@ import {useResizableDrawer} from 'sentry/utils/useResizableDrawer';
interface Props {
leftOffsetMs: number;
- replay: null | ReplayReader;
+ replay: ReplayReader;
rightOffsetMs: number;
minHeight?: `${number}px` | `${number}%`;
}
@@ -67,7 +67,19 @@ export function ReplaySliderDiff({
);
}
-function DiffSides({leftOffsetMs, replay, rightOffsetMs, viewDimensions, width}) {
+function DiffSides({
+ leftOffsetMs,
+ replay,
+ rightOffsetMs,
+ viewDimensions,
+ width,
+}: {
+ leftOffsetMs: number;
+ replay: ReplayReader;
+ rightOffsetMs: number;
+ viewDimensions: {height: number; width: number};
+ width: string | undefined;
+}) {
const rightSideElem = useRef(null);
const dividerElem = useRef(null);
diff --git a/static/app/components/replays/diff/replayTextDiff.tsx b/static/app/components/replays/diff/replayTextDiff.tsx
index ab653650d1521..6f5890f40da9d 100644
--- a/static/app/components/replays/diff/replayTextDiff.tsx
+++ b/static/app/components/replays/diff/replayTextDiff.tsx
@@ -12,7 +12,7 @@ import type ReplayReader from 'sentry/utils/replays/replayReader';
interface Props {
leftOffsetMs: number;
- replay: null | ReplayReader;
+ replay: ReplayReader;
rightOffsetMs: number;
}
diff --git a/static/app/components/replays/playerDOMAlert.spec.tsx b/static/app/components/replays/playerDOMAlert.spec.tsx
deleted file mode 100644
index b0f65c4a5eb90..0000000000000
--- a/static/app/components/replays/playerDOMAlert.spec.tsx
+++ /dev/null
@@ -1,51 +0,0 @@
-import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
-import {resetMockDate, setMockDate} from 'sentry-test/utils';
-
-import localStorage from 'sentry/utils/localStorage';
-
-import PlayerDOMAlert from './playerDOMAlert';
-
-jest.mock('sentry/utils/localStorage');
-
-const mockGetItem = jest.mocked(localStorage.getItem);
-
-const now = new Date('2020-01-01');
-
-describe('PlayerDOMAlert', () => {
- beforeEach(() => {
- mockGetItem.mockReset();
- setMockDate(now);
- });
- afterEach(() => {
- resetMockDate();
- });
-
- it('should render the alert when local storage key is not set', () => {
- render( );
-
- expect(screen.getByTestId('player-dom-alert')).toBeVisible();
- });
-
- it('should not render the alert when the local storage key is set', () => {
- mockGetItem.mockImplementationOnce(() => now.getTime().toString());
- render( );
-
- expect(screen.queryByTestId('player-dom-alert')).not.toBeInTheDocument();
- });
-
- it('should be dismissable', async () => {
- render( );
-
- expect(screen.getByTestId('player-dom-alert')).toBeVisible();
-
- await userEvent.click(screen.getByLabelText('Close Alert'));
-
- expect(screen.queryByTestId('player-dom-alert')).not.toBeInTheDocument();
- await waitFor(() =>
- expect(localStorage.setItem).toHaveBeenCalledWith(
- 'replay-player-dom-alert-dismissed',
- '"1577836800000"'
- )
- );
- });
-});
diff --git a/static/app/components/replays/preferences/replayPreferenceDropdown.stories.tsx b/static/app/components/replays/preferences/replayPreferenceDropdown.stories.tsx
index 2ce28b8155cca..f180102919ed2 100644
--- a/static/app/components/replays/preferences/replayPreferenceDropdown.stories.tsx
+++ b/static/app/components/replays/preferences/replayPreferenceDropdown.stories.tsx
@@ -1,6 +1,5 @@
import {Fragment} from 'react';
-import ObjectInspector from 'sentry/components/objectInspector';
import ReplayPreferenceDropdown from 'sentry/components/replays/preferences/replayPreferenceDropdown';
import {
LocalStorageReplayPreferences,
@@ -9,6 +8,7 @@ import {
} from 'sentry/components/replays/preferences/replayPreferences';
import JSXNode from 'sentry/components/stories/jsxNode';
import SideBySide from 'sentry/components/stories/sideBySide';
+import StructuredEventData from 'sentry/components/structuredEventData';
import storyBook from 'sentry/stories/storyBook';
import {
ReplayPreferencesContextProvider,
@@ -98,5 +98,5 @@ export default storyBook(ReplayPreferenceDropdown, story => {
function DebugReplayPrefsState() {
const [prefs] = useReplayPrefs();
- return ;
+ return ;
}
diff --git a/static/app/components/replays/preferences/replayPreferenceDropdown.tsx b/static/app/components/replays/preferences/replayPreferenceDropdown.tsx
index b174d3caf2f8a..9c87dec13b416 100644
--- a/static/app/components/replays/preferences/replayPreferenceDropdown.tsx
+++ b/static/app/components/replays/preferences/replayPreferenceDropdown.tsx
@@ -10,9 +10,11 @@ const timestampOptions: ('relative' | 'absolute')[] = ['relative', 'absolute'];
export default function ReplayPreferenceDropdown({
speedOptions,
hideFastForward = false,
+ isLoading,
}: {
speedOptions: number[];
hideFastForward?: boolean;
+ isLoading?: boolean;
}) {
const [prefs, setPrefs] = useReplayPrefs();
@@ -20,6 +22,7 @@ export default function ReplayPreferenceDropdown({
return (
(
void;
hideFastForward?: boolean;
+ isLoading?: boolean;
speedOptions?: number[];
}
-function ReplayPlayPauseBar() {
+function ReplayPlayPauseBar({isLoading}: {isLoading?: boolean}) {
const {currentTime, replay, setCurrentTime} = useReplayContext();
return (
@@ -35,9 +36,11 @@ function ReplayPlayPauseBar() {
icon={ }
onClick={() => setCurrentTime(currentTime - 10 * SECOND)}
aria-label={t('Rewind 10 seconds')}
+ disabled={isLoading}
/>
-
+
}
@@ -64,6 +67,7 @@ export default function ReplayController({
toggleFullscreen,
hideFastForward = false,
speedOptions = [0.1, 0.25, 0.5, 1, 2, 4, 8, 16],
+ isLoading,
}: Props) {
const barRef = useRef(null);
const [isCompact, setIsCompact] = useState(false);
@@ -83,12 +87,13 @@ export default function ReplayController({
return (
-
+
-
+
diff --git a/static/app/components/replays/replayPlayPauseButton.tsx b/static/app/components/replays/replayPlayPauseButton.tsx
index 4c09a7d6893ac..0ffc20236a080 100644
--- a/static/app/components/replays/replayPlayPauseButton.tsx
+++ b/static/app/components/replays/replayPlayPauseButton.tsx
@@ -4,7 +4,7 @@ import {useReplayContext} from 'sentry/components/replays/replayContext';
import {IconPause, IconPlay, IconRefresh} from 'sentry/icons';
import {t} from 'sentry/locale';
-function ReplayPlayPauseButton(props: BaseButtonProps) {
+function ReplayPlayPauseButton(props: BaseButtonProps & {isLoading?: boolean}) {
const {isFinished, isPlaying, restart, togglePlayPause} = useReplayContext();
return isFinished ? (
@@ -23,6 +23,7 @@ function ReplayPlayPauseButton(props: BaseButtonProps) {
onClick={() => togglePlayPause(!isPlaying)}
aria-label={isPlaying ? t('Pause') : t('Play')}
priority="primary"
+ disabled={props.isLoading}
{...props}
/>
);
diff --git a/static/app/components/replays/replayPlayer.tsx b/static/app/components/replays/replayPlayer.tsx
index c63a4836b81d7..9b3160f52b3f5 100644
--- a/static/app/components/replays/replayPlayer.tsx
+++ b/static/app/components/replays/replayPlayer.tsx
@@ -14,7 +14,7 @@ import {useReplayContext} from 'sentry/components/replays/replayContext';
import {trackAnalytics} from 'sentry/utils/analytics';
import useOrganization from 'sentry/utils/useOrganization';
-import PlayerDOMAlert from './playerDOMAlert';
+import UnmaskAlert from './unmaskAlert';
type Dimensions = ReturnType['dimensions'];
@@ -94,8 +94,17 @@ function BasePlayerRoot({
isFetching,
isFinished,
isVideoReplay,
+ replay,
} = useReplayContext();
+ const sdkOptions = replay?.getSDKOptions();
+
+ const hasDefaultMaskSettings = sdkOptions
+ ? Boolean(
+ sdkOptions.maskAllInputs && sdkOptions.maskAllText && sdkOptions.blockAllMedia
+ )
+ : true;
+
const windowEl = useRef(null);
const viewEl = useRef(null);
@@ -176,7 +185,9 @@ function BasePlayerRoot({
{fastForwardSpeed ? : null}
{isBuffering || isVideoBuffering ? : null}
- {isPreview || isVideoReplay || isFetching ? null : }
+ {isPreview || isVideoReplay || isFetching || !hasDefaultMaskSettings ? null : (
+
+ )}
{isFetching ? : null}
diff --git a/static/app/components/replays/replayView.tsx b/static/app/components/replays/replayView.tsx
index 8f25c0a340396..8cc21c43221a3 100644
--- a/static/app/components/replays/replayView.tsx
+++ b/static/app/components/replays/replayView.tsx
@@ -69,6 +69,7 @@ function ReplayView({toggleFullscreen, isLoading}: Props) {
{isFullscreen ? (
diff --git a/static/app/components/replays/timeAndScrubberGrid.tsx b/static/app/components/replays/timeAndScrubberGrid.tsx
index 63a20a0f5d636..7e29d5e0785ca 100644
--- a/static/app/components/replays/timeAndScrubberGrid.tsx
+++ b/static/app/components/replays/timeAndScrubberGrid.tsx
@@ -19,10 +19,11 @@ import {useReplayPrefs} from 'sentry/utils/replays/playback/providers/replayPref
type TimeAndScrubberGridProps = {
isCompact?: boolean;
+ isLoading?: boolean;
showZoom?: boolean;
};
-function TimelineSizeBar() {
+function TimelineSizeBar({isLoading}: {isLoading?: boolean}) {
const {replay} = useReplayContext();
const [timelineScale, setTimelineScale] = useTimelineScale();
const durationMs = replay?.getDurationMs();
@@ -37,7 +38,7 @@ function TimelineSizeBar() {
borderless
onClick={() => setTimelineScale(Math.max(timelineScale - 1, 1))}
aria-label={t('Zoom out')}
- disabled={timelineScale === 1}
+ disabled={timelineScale === 1 || isLoading}
/>
{timelineScale}
@@ -50,7 +51,7 @@ function TimelineSizeBar() {
borderless
onClick={() => setTimelineScale(Math.min(timelineScale + 1, maxScale))}
aria-label={t('Zoom in')}
- disabled={timelineScale === maxScale}
+ disabled={timelineScale === maxScale || isLoading}
/>
);
@@ -59,6 +60,7 @@ function TimelineSizeBar() {
export default function TimeAndScrubberGrid({
isCompact = false,
showZoom = false,
+ isLoading,
}: TimeAndScrubberGridProps) {
const {currentTime, replay} = useReplayContext();
const [prefs] = useReplayPrefs();
@@ -83,7 +85,7 @@ export default function TimeAndScrubberGrid({
- {showZoom ? : null}
+ {showZoom ? : null}
diff --git a/static/app/components/replays/unmaskAlert.spec.tsx b/static/app/components/replays/unmaskAlert.spec.tsx
new file mode 100644
index 0000000000000..500cb9d9ae9dc
--- /dev/null
+++ b/static/app/components/replays/unmaskAlert.spec.tsx
@@ -0,0 +1,85 @@
+import {ReplayRecordFixture} from 'sentry-fixture/replayRecord';
+
+import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
+import {resetMockDate, setMockDate} from 'sentry-test/utils';
+
+import useUserViewedReplays from 'sentry/components/replays/useUserViewedReplays';
+import localStorage from 'sentry/utils/localStorage';
+
+import UnmaskAlert from './unmaskAlert';
+
+jest.mock('sentry/utils/localStorage');
+jest.mock('sentry/components/replays/useUserViewedReplays.tsx');
+
+const mockGetItem = jest.mocked(localStorage.getItem);
+
+const now = new Date('2020-01-01');
+
+describe('UnmaskAlert', () => {
+ beforeEach(() => {
+ mockGetItem.mockReset();
+ setMockDate(now);
+ });
+ afterEach(() => {
+ resetMockDate();
+ });
+
+ it('should render the alert when local storage key is not set and user has viewed <= 3 replays', () => {
+ jest.mocked(useUserViewedReplays).mockReturnValue({
+ isPending: false,
+ isError: false,
+ data: {data: [ReplayRecordFixture(), ReplayRecordFixture(), ReplayRecordFixture()]},
+ });
+ render( );
+
+ expect(screen.getByTestId('unmask-alert')).toBeVisible();
+ });
+
+ it('should not render the alert when the local storage key is set', () => {
+ mockGetItem.mockImplementationOnce(() => now.getTime().toString());
+ render( );
+
+ expect(screen.queryByTestId('unmask-alert')).not.toBeInTheDocument();
+ });
+
+ it('should not render the alert if the user has viewed > 3 replays', () => {
+ jest.mocked(useUserViewedReplays).mockReturnValue({
+ isPending: false,
+ isError: false,
+ data: {
+ data: [
+ ReplayRecordFixture(),
+ ReplayRecordFixture(),
+ ReplayRecordFixture(),
+ ReplayRecordFixture(),
+ ],
+ },
+ });
+ render( );
+
+ expect(screen.queryByTestId('unmask-alert')).not.toBeInTheDocument();
+ });
+
+ it('should be dismissable', async () => {
+ jest.mocked(useUserViewedReplays).mockReturnValue({
+ isPending: false,
+ isError: false,
+ data: {
+ data: [ReplayRecordFixture(), ReplayRecordFixture()],
+ },
+ });
+ render( );
+
+ expect(screen.getByTestId('unmask-alert')).toBeVisible();
+
+ await userEvent.click(screen.getByLabelText('Close Alert'));
+
+ expect(screen.queryByTestId('unmask-alert')).not.toBeInTheDocument();
+ await waitFor(() =>
+ expect(localStorage.setItem).toHaveBeenCalledWith(
+ 'replay-unmask-alert-dismissed',
+ '"1577836800000"'
+ )
+ );
+ });
+});
diff --git a/static/app/components/replays/playerDOMAlert.tsx b/static/app/components/replays/unmaskAlert.tsx
similarity index 54%
rename from static/app/components/replays/playerDOMAlert.tsx
rename to static/app/components/replays/unmaskAlert.tsx
index 49c46e50199b8..508a33f096b8c 100644
--- a/static/app/components/replays/playerDOMAlert.tsx
+++ b/static/app/components/replays/unmaskAlert.tsx
@@ -1,25 +1,37 @@
import styled from '@emotion/styled';
import {Button} from 'sentry/components/button';
+import ExternalLink from 'sentry/components/links/externalLink';
+import useUserViewedReplays from 'sentry/components/replays/useUserViewedReplays';
import {IconClose, IconInfo} from 'sentry/icons';
-import {t} from 'sentry/locale';
+import {t, tct} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import useDismissAlert from 'sentry/utils/useDismissAlert';
-const LOCAL_STORAGE_KEY = 'replay-player-dom-alert-dismissed';
+const LOCAL_STORAGE_KEY = 'replay-unmask-alert-dismissed';
-function PlayerDOMAlert() {
+function UnmaskAlert() {
const {dismiss, isDismissed} = useDismissAlert({key: LOCAL_STORAGE_KEY});
+ const {data, isError, isPending} = useUserViewedReplays();
- if (isDismissed) {
+ if (isDismissed || isError || isPending || (data && data.data.length > 3)) {
return null;
}
return (
-
-
+
+
- {t('Right click & inspect your app’s DOM with your browser')}
+
+ {tct(
+ 'Unmask non-sensitive text (****) and media (img, svg, video). [link:Learn more].',
+ {
+ link: (
+
+ ),
+ }
+ )}
+
-
-
+
+
);
}
-export default PlayerDOMAlert;
+export default UnmaskAlert;
-const DOMAlertContainer = styled('div')`
+const UnmaskAlertContainer = styled('div')`
position: absolute;
bottom: ${space(1)};
left: 0;
@@ -44,7 +56,7 @@ const DOMAlertContainer = styled('div')`
pointer-events: none;
`;
-const DOMAlert = styled('div')`
+const Alert = styled('div')`
display: inline-flex;
align-items: flex-start;
justify-items: center;
@@ -55,6 +67,15 @@ const DOMAlert = styled('div')`
border-radius: ${p => p.theme.borderRadius};
gap: 0 ${space(1)};
line-height: 1em;
+ a {
+ color: ${p => p.theme.white};
+ pointer-events: all;
+ text-decoration: underline;
+ }
+ a:hover {
+ color: ${p => p.theme.white};
+ opacity: 0.5;
+ }
`;
const StyledIconInfo = styled(IconInfo)`
diff --git a/static/app/components/replays/useUserViewedReplays.tsx b/static/app/components/replays/useUserViewedReplays.tsx
new file mode 100644
index 0000000000000..a7f258e5b6610
--- /dev/null
+++ b/static/app/components/replays/useUserViewedReplays.tsx
@@ -0,0 +1,19 @@
+import {useApiQuery} from 'sentry/utils/queryClient';
+import useOrganization from 'sentry/utils/useOrganization';
+import type {ReplayRecord} from 'sentry/views/replays/types';
+
+type RawQueryData = {
+ data: ReplayRecord[];
+};
+
+export default function useUserViewedReplays() {
+ const organization = useOrganization();
+ const {data, isError, isPending} = useApiQuery(
+ [
+ `/organizations/${organization.slug}/replays/`,
+ {query: {query: `viewed_by_me:true`}},
+ ],
+ {staleTime: 0}
+ );
+ return {data, isError, isPending};
+}
diff --git a/static/app/components/route.tsx b/static/app/components/route.tsx
index 3dcb8a4d2f595..3d48bf85ad8c7 100644
--- a/static/app/components/route.tsx
+++ b/static/app/components/route.tsx
@@ -1,16 +1,13 @@
-// biome-ignore lint/nursery/noRestrictedImports: Will be removed with react router 6
-import type {IndexRouteProps, PlainRoute, RouteProps} from 'react-router';
-// biome-ignore lint/nursery/noRestrictedImports: Will be removed with react router 6
-import {IndexRoute as BaseIndexRoute, Route as BaseRoute} from 'react-router';
+import type {
+ IndexRedirectProps,
+ IndexRouteProps,
+ RedirectProps,
+ RouteProps,
+} from 'sentry/types/legacyReactRouter';
-import {USING_CUSTOMER_DOMAIN} from 'sentry/constants';
-import withDomainRedirect from 'sentry/utils/withDomainRedirect';
-import withDomainRequired from 'sentry/utils/withDomainRequired';
-
-// This module contains customized react-router route components used to
-// construct the app routing tree.
-//
-// The primary customization here relates to supporting rendering dual-routes for customer domains
+// This module contains the "fake" react components that are used as we migrade
+// off of react-router 3 to 6. The shims in the utils/reactRouter6Compat module
+// read the props off tese components and construct a real react router 6 tree.
type CustomProps = {
/**
@@ -34,68 +31,26 @@ type CustomProps = {
interface SentryRouteProps extends React.PropsWithChildren {}
-type RouteElement = React.ReactElement;
-
-// The original createRouteFromReactElement extracted from the base route. This
-// is not properly typed hence the ts-ignore.
-//
-// @ts-ignore
-const createRouteFromReactElement = BaseRoute.createRouteFromReactElement;
-
-/**
- * Customized React Router Route configuration component.
- */
-const Route = BaseRoute as React.ComponentClass;
-
-// We override the createRouteFromReactElement property to provide support for
-// the withOrgPath property.
-//
-// XXX(epurkhiser): It is important to note! The `Route` component is a
-// CONFIGURATION ONLY COMPONENT. It DOES NOT render! This function is part of
-// the react-router magic internals that are used to build the route tree by
-// traversing the component tree, that is why this logic lives here and not
-// inside a custom Route component.
-//
-// To understand deeper how this works, see [0].
-//
-// When `withOrgPath` is provided to the Route configuration component the
-// react-router router builder will use this function which splits the single
-// Route into two, one for the route with :orgId and one for the new-style
-// route.
-//
-// [0]: https://github.com/remix-run/react-router/blob/850de933444d260bfc5460135d308f9d74b52c97/modules/RouteUtils.js#L15
-//
-// @ts-ignore
-Route.createRouteFromReactElement = function (element: RouteElement): PlainRoute {
- const {withOrgPath, component, path} = element.props;
-
- if (!withOrgPath) {
- return createRouteFromReactElement(element);
- }
-
- const childRoutes: PlainRoute[] = [
- {
- ...createRouteFromReactElement(element),
- path: `/organizations/:orgId${path}`,
- component: withDomainRedirect(component ?? NoOp),
- },
- ];
-
- if (USING_CUSTOMER_DOMAIN) {
- childRoutes.unshift({
- ...createRouteFromReactElement(element),
- path,
- component: withDomainRequired(component ?? NoOp),
- });
- }
-
- return {childRoutes};
-};
+export function Route(_props: SentryRouteProps) {
+ // XXX: These routes are NEVER rendered
+ return null;
+}
+Route.displayName = 'Route';
-function NoOp({children}: {children: JSX.Element}) {
- return children;
+export function IndexRoute(_props: IndexRouteProps & CustomProps) {
+ // XXX: These routes are NEVER rendered
+ return null;
}
+IndexRoute.displayName = 'IndexRoute';
-const IndexRoute = BaseIndexRoute as React.ComponentClass;
+export function Redirect(_props: RedirectProps) {
+ // XXX: These routes are NEVER rendered
+ return null;
+}
+Redirect.displayName = 'Redirect';
-export {Route, IndexRoute};
+export function IndexRedirect(_props: IndexRedirectProps) {
+ // XXX: These routes are NEVER rendered
+ return null;
+}
+IndexRedirect.displayName = 'IndexRedirect';
diff --git a/static/app/components/search/sources/apiSource.spec.tsx b/static/app/components/search/sources/apiSource.spec.tsx
index dc3fbb7d729ca..8c718c77b2878 100644
--- a/static/app/components/search/sources/apiSource.spec.tsx
+++ b/static/app/components/search/sources/apiSource.spec.tsx
@@ -15,13 +15,13 @@ import ConfigStore from 'sentry/stores/configStore';
describe('ApiSource', function () {
const {organization, router} = initializeOrg();
- let orgsMock;
- let projectsMock;
- let teamsMock;
- let membersMock;
- let shortIdMock;
- let eventIdMock;
- let configState;
+ let orgsMock: jest.Mock;
+ let projectsMock: jest.Mock;
+ let teamsMock: jest.Mock;
+ let membersMock: jest.Mock;
+ let shortIdMock: jest.Mock;
+ let eventIdMock: jest.Mock;
+ let configState: ReturnType;
const defaultProps: ComponentProps = {
query: '',
diff --git a/static/app/components/search/sources/commandSource.tsx b/static/app/components/search/sources/commandSource.tsx
index 2fdb7c996e263..bde936dffce2f 100644
--- a/static/app/components/search/sources/commandSource.tsx
+++ b/static/app/components/search/sources/commandSource.tsx
@@ -167,7 +167,7 @@ class CommandSource extends Component {
function CommandSourceWithFeature(props: Omit) {
return (
-
+
{({hasSuperuser}) => }
);
diff --git a/static/app/components/searchQueryBuilder/formattedQuery.tsx b/static/app/components/searchQueryBuilder/formattedQuery.tsx
index 957955a11e150..5122861971a37 100644
--- a/static/app/components/searchQueryBuilder/formattedQuery.tsx
+++ b/static/app/components/searchQueryBuilder/formattedQuery.tsx
@@ -41,7 +41,7 @@ function FilterKey({token}: {token: TokenResult}) {
) : (
- {getKeyName(token.key)}
+ {getKeyName(token.key, {showExplicitTagPrefix: true})}
);
}
diff --git a/static/app/components/searchQueryBuilder/index.tsx b/static/app/components/searchQueryBuilder/index.tsx
index 8104d1f704255..3f2f098ca057a 100644
--- a/static/app/components/searchQueryBuilder/index.tsx
+++ b/static/app/components/searchQueryBuilder/index.tsx
@@ -286,15 +286,15 @@ export function SearchQueryBuilder({
return (
-
-
- onBlur?.(state.query, {parsedQuery, queryIsValid: queryIsValid(parsedQuery)})
- }
- ref={wrapperRef}
- aria-disabled={disabled}
- >
+
+ onBlur?.(state.query, {parsedQuery, queryIsValid: queryIsValid(parsedQuery)})
+ }
+ ref={wrapperRef}
+ aria-disabled={disabled}
+ >
+
)}
-
-
+
+
);
}
diff --git a/static/app/components/searchQueryBuilder/tokens/combobox.tsx b/static/app/components/searchQueryBuilder/tokens/combobox.tsx
index cf93eb4f493f3..145530e0029b4 100644
--- a/static/app/components/searchQueryBuilder/tokens/combobox.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/combobox.tsx
@@ -64,6 +64,7 @@ type SearchQueryBuilderComboboxProps;
+ ['data-test-id']?: string;
/**
* If the combobox has additional information to display, passing JSX
* to this prop will display it in an overlay at the top left position.
@@ -334,6 +335,7 @@ function SearchQueryBuilderComboboxInner,
ref: ForwardedRef
) {
@@ -538,6 +540,7 @@ function SearchQueryBuilderComboboxInner onKeyDownCapture?.(e, {state})}
+ data-test-id={dataTestId}
/>
{description ? (
p.theme.visuallyHidden}
}
}
`;
diff --git a/static/app/components/searchQueryBuilder/tokens/freeText.tsx b/static/app/components/searchQueryBuilder/tokens/freeText.tsx
index 7858c6e3b303f..62c7cda67c292 100644
--- a/static/app/components/searchQueryBuilder/tokens/freeText.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/freeText.tsx
@@ -500,6 +500,9 @@ function SearchQueryBuilderInputInternal({
return true;
}}
onClick={onClick}
+ data-test-id={
+ state.collection.getLastKey() === item.key ? 'query-builder-input' : undefined
+ }
>
{keyItem =>
itemIsSection(keyItem) ? (
diff --git a/static/app/components/searchSyntax/utils.tsx b/static/app/components/searchSyntax/utils.tsx
index fc49bc61686f9..6214eb18385bf 100644
--- a/static/app/components/searchSyntax/utils.tsx
+++ b/static/app/components/searchSyntax/utils.tsx
@@ -203,6 +203,10 @@ type GetKeyNameOpts = {
* Include arguments in aggregate key names
*/
aggregateWithArgs?: boolean;
+ /**
+ * Display explicit tags with `tags[name]` instead of `name`
+ */
+ showExplicitTagPrefix?: boolean;
};
/**
@@ -212,11 +216,14 @@ export const getKeyName = (
key: TokenResult,
options: GetKeyNameOpts = {}
) => {
- const {aggregateWithArgs} = options;
+ const {aggregateWithArgs, showExplicitTagPrefix = false} = options;
switch (key.type) {
case Token.KEY_SIMPLE:
return key.value;
case Token.KEY_EXPLICIT_TAG:
+ if (showExplicitTagPrefix) {
+ return key.text;
+ }
return key.key.value;
case Token.KEY_AGGREGATE:
return aggregateWithArgs
diff --git a/static/app/components/sidebar/broadcasts.spec.tsx b/static/app/components/sidebar/broadcasts.spec.tsx
index 00e83e5da13de..ff403079b1aaa 100644
--- a/static/app/components/sidebar/broadcasts.spec.tsx
+++ b/static/app/components/sidebar/broadcasts.spec.tsx
@@ -4,7 +4,7 @@ import {OrganizationFixture} from 'sentry-fixture/organization';
import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
import {BROADCAST_CATEGORIES} from 'sentry/components/sidebar/broadcastPanelItem';
-import Broadcasts from 'sentry/components/sidebar/broadcasts';
+import {Broadcasts} from 'sentry/components/sidebar/broadcasts';
import {SidebarPanelKey} from 'sentry/components/sidebar/types';
import type {Broadcast} from 'sentry/types/system';
import {trackAnalytics} from 'sentry/utils/analytics';
@@ -43,7 +43,6 @@ describe('Broadcasts', function () {
currentPanel={SidebarPanelKey.BROADCASTS}
onShowPanel={() => jest.fn()}
hidePanel={jest.fn()}
- organization={organization}
/>
);
@@ -67,7 +66,6 @@ describe('Broadcasts', function () {
currentPanel={SidebarPanelKey.BROADCASTS}
onShowPanel={() => jest.fn()}
hidePanel={jest.fn()}
- organization={organization}
/>
);
diff --git a/static/app/components/sidebar/broadcasts.tsx b/static/app/components/sidebar/broadcasts.tsx
index 14b1d3a43905b..b7d0dd9ef6ddd 100644
--- a/static/app/components/sidebar/broadcasts.tsx
+++ b/static/app/components/sidebar/broadcasts.tsx
@@ -1,7 +1,5 @@
-import {Component, Fragment, useEffect} from 'react';
+import {useCallback, useEffect, useMemo, useRef, useState} from 'react';
-import {getAllBroadcasts, markBroadcastsAsSeen} from 'sentry/actionCreators/broadcasts';
-import type {Client} from 'sentry/api';
import DemoModeGate from 'sentry/components/acl/demoModeGate';
import LoadingIndicator from 'sentry/components/loadingIndicator';
import {BroadcastPanelItem} from 'sentry/components/sidebar/broadcastPanelItem';
@@ -10,9 +8,11 @@ import SidebarPanel from 'sentry/components/sidebar/sidebarPanel';
import SidebarPanelEmpty from 'sentry/components/sidebar/sidebarPanelEmpty';
import {IconBroadcast} from 'sentry/icons';
import {t} from 'sentry/locale';
-import type {Organization} from 'sentry/types/organization';
import type {Broadcast} from 'sentry/types/system';
-import withApi from 'sentry/utils/withApi';
+import {useApiQuery, useMutation} from 'sentry/utils/queryClient';
+import useApi from 'sentry/utils/useApi';
+import useOrganization from 'sentry/utils/useOrganization';
+import usePrevious from 'sentry/utils/usePrevious';
import type {CommonSidebarProps} from './types';
import {SidebarPanelKey} from './types';
@@ -20,184 +20,116 @@ import {SidebarPanelKey} from './types';
const MARK_SEEN_DELAY = 1000;
const POLLER_DELAY = 600000; // 10 minute poll (60 * 10 * 1000)
-type Props = CommonSidebarProps & {
- api: Client;
- organization: Organization;
-};
-
-type State = {
- broadcasts: Broadcast[];
- loading: boolean;
-};
-
-function BroadcastSidebarContent({
+export function Broadcasts({
orientation,
collapsed,
- loading,
- broadcasts,
+ currentPanel,
hidePanel,
- onResetCounter,
-}: {
- broadcasts: Broadcast[];
- loading: boolean;
- onResetCounter: () => void;
-} & Pick) {
- useEffect(() => {
- return () => {
- onResetCounter();
- };
- }, [onResetCounter]);
-
- return (
-
- {loading ? (
-
- ) : broadcasts.length === 0 ? (
-
- {t('No recent updates from the Sentry team.')}
-
- ) : (
- broadcasts.map(item => (
-
- ))
- )}
-
- );
-}
-
-class Broadcasts extends Component {
- state: State = {
- broadcasts: [],
- loading: true,
- };
-
- componentDidMount() {
- this.fetchData();
- document.addEventListener('visibilitychange', this.handleVisibilityChange);
- }
-
- componentWillUnmount() {
- window.clearTimeout(this.markSeenTimeout);
- window.clearTimeout(this.pollingTimeout);
-
- document.removeEventListener('visibilitychange', this.handleVisibilityChange);
- }
-
- pollingTimeout: number | undefined = undefined;
- markSeenTimeout: number | undefined = undefined;
-
- startPolling() {
- if (this.pollingTimeout) {
- this.stopPolling();
+ onShowPanel,
+}: CommonSidebarProps) {
+ const api = useApi();
+ const organization = useOrganization();
+ const previousPanel = usePrevious(currentPanel);
+
+ const [hasSeenAllPosts, setHasSeenAllPosts] = useState(false);
+ const markSeenTimeoutRef = useRef(undefined);
+
+ const {mutate: markBroadcastsAsSeen} = useMutation({
+ mutationFn: (unseenPostIds: string[]) => {
+ return api.requestPromise('/broadcasts/', {
+ method: 'PUT',
+ query: {id: unseenPostIds},
+ data: {hasSeen: '1'},
+ });
+ },
+ });
+
+ const {isPending, data: broadcasts = []} = useApiQuery(
+ [`/organizations/${organization.slug}/broadcasts/`],
+ {
+ staleTime: 0,
+ refetchInterval: POLLER_DELAY,
+ refetchOnWindowFocus: true,
}
- this.pollingTimeout = window.setTimeout(this.fetchData, POLLER_DELAY);
- }
-
- stopPolling() {
- window.clearTimeout(this.pollingTimeout);
- this.pollingTimeout = undefined;
- }
+ );
- fetchData = async () => {
- if (this.pollingTimeout) {
- this.stopPolling();
- }
+ const unseenPostIds = useMemo(
+ () => broadcasts.filter(item => !item.hasSeen).map(item => item.id),
+ [broadcasts]
+ );
- try {
- const data = await getAllBroadcasts(this.props.api, this.props.organization.slug);
- this.setState({loading: false, broadcasts: data || []});
- } catch {
- this.setState({loading: false});
+ const handleShowPanel = useCallback(() => {
+ if (markSeenTimeoutRef.current) {
+ window.clearTimeout(markSeenTimeoutRef.current);
}
- this.startPolling();
- };
-
- /**
- * If tab/window loses visibility (note: this is different than focus), stop
- * polling for broadcasts data, otherwise, if it gains visibility, start
- * polling again.
- */
- handleVisibilityChange = () =>
- document.hidden ? this.stopPolling() : this.startPolling();
+ markSeenTimeoutRef.current = window.setTimeout(() => {
+ markBroadcastsAsSeen(unseenPostIds);
+ }, MARK_SEEN_DELAY);
- handleShowPanel = () => {
- window.clearTimeout(this.markSeenTimeout);
+ onShowPanel();
+ }, [onShowPanel, unseenPostIds, markBroadcastsAsSeen]);
- this.markSeenTimeout = window.setTimeout(this.markSeen, MARK_SEEN_DELAY);
- this.props.onShowPanel();
- };
-
- markSeen = async () => {
- const unseenBroadcastIds = this.unseenIds;
- if (unseenBroadcastIds.length === 0) {
- return;
+ useEffect(() => {
+ if (
+ previousPanel === SidebarPanelKey.BROADCASTS &&
+ currentPanel !== SidebarPanelKey.BROADCASTS
+ ) {
+ setHasSeenAllPosts(true);
}
+ }, [previousPanel, currentPanel]);
- await markBroadcastsAsSeen(this.props.api, unseenBroadcastIds);
- };
-
- get unseenIds() {
- return this.state.broadcasts
- ? this.state.broadcasts.filter(item => !item.hasSeen).map(item => item.id)
- : [];
- }
-
- handleResetCounter = () => {
- this.setState(state => ({
- broadcasts: state.broadcasts.map(item => ({...item, hasSeen: true})),
- }));
- };
-
- render() {
- const {orientation, collapsed, currentPanel, hidePanel} = this.props;
- const {broadcasts, loading} = this.state;
-
- const unseenPosts = this.unseenIds;
-
- return (
-
-
- }
- label={t("What's new")}
- onClick={this.handleShowPanel}
- id="broadcasts"
- />
+ useEffect(() => {
+ return () => {
+ if (markSeenTimeoutRef.current) {
+ window.clearTimeout(markSeenTimeoutRef.current);
+ }
+ };
+ }, []);
- {currentPanel === SidebarPanelKey.BROADCASTS && (
-
+ return (
+
+ }
+ label={t("What's new")}
+ onClick={handleShowPanel}
+ id="broadcasts"
+ />
+
+ {currentPanel === SidebarPanelKey.BROADCASTS && (
+
+ {isPending ? (
+
+ ) : broadcasts.length === 0 ? (
+
+ {t('No recent updates from the Sentry team.')}
+
+ ) : (
+ broadcasts.map(item => (
+
+ ))
)}
-
-
- );
- }
+
+ )}
+
+ );
}
-
-export default withApi(Broadcasts);
diff --git a/static/app/components/sidebar/index.spec.tsx b/static/app/components/sidebar/index.spec.tsx
index 6f132193db5a7..81cab00af5d36 100644
--- a/static/app/components/sidebar/index.spec.tsx
+++ b/static/app/components/sidebar/index.spec.tsx
@@ -246,13 +246,15 @@ describe('Sidebar', function () {
// Should mark as seen after a delay
act(() => jest.advanceTimersByTime(2000));
- expect(apiMocks.broadcastsMarkAsSeen).toHaveBeenCalledWith(
- '/broadcasts/',
- expect.objectContaining({
- data: {hasSeen: '1'},
- query: {id: ['8']},
- })
- );
+ await waitFor(() => {
+ expect(apiMocks.broadcastsMarkAsSeen).toHaveBeenCalledWith(
+ '/broadcasts/',
+ expect.objectContaining({
+ data: {hasSeen: '1'},
+ query: {id: ['8']},
+ })
+ );
+ });
jest.useRealTimers();
// Close the sidebar
diff --git a/static/app/components/sidebar/index.tsx b/static/app/components/sidebar/index.tsx
index d10ca22d6c162..6eeb2ea55d8e6 100644
--- a/static/app/components/sidebar/index.tsx
+++ b/static/app/components/sidebar/index.tsx
@@ -56,25 +56,25 @@ import {MODULE_SIDEBAR_TITLE as HTTP_MODULE_SIDEBAR_TITLE} from 'sentry/views/in
import {
AI_LANDING_SUB_PATH,
AI_LANDING_TITLE,
-} from 'sentry/views/insights/pages/aiLandingPage';
+} from 'sentry/views/insights/pages/ai/settings';
import {
BACKEND_LANDING_SUB_PATH,
BACKEND_LANDING_TITLE,
-} from 'sentry/views/insights/pages/backendLandingPage';
+} from 'sentry/views/insights/pages/backend/settings';
import {
FRONTEND_LANDING_SUB_PATH,
FRONTEND_LANDING_TITLE,
-} from 'sentry/views/insights/pages/frontendLandingPage';
+} from 'sentry/views/insights/pages/frontend/settings';
import {
MOBILE_LANDING_SUB_PATH,
MOBILE_LANDING_TITLE,
-} from 'sentry/views/insights/pages/mobileLandingPage';
+} from 'sentry/views/insights/pages/mobile/settings';
import {MODULE_TITLES} from 'sentry/views/insights/settings';
import MetricsOnboardingSidebar from 'sentry/views/metrics/ddmOnboarding/sidebar';
import {ProfilingOnboardingSidebar} from '../profiling/profilingOnboardingSidebar';
-import Broadcasts from './broadcasts';
+import {Broadcasts} from './broadcasts';
import SidebarHelp from './help';
import OnboardingStatus from './onboardingStatus';
import ServiceIncidents from './serviceIncidents';
@@ -264,7 +264,7 @@ function Sidebar() {
);
- const moduleURLBuilder = useModuleURLBuilder(true);
+ const moduleURLBuilder = useModuleURLBuilder(true, false);
const queries = hasOrganization && (
@@ -822,7 +822,6 @@ function Sidebar() {
currentPanel={activePanel}
onShowPanel={() => togglePanel(SidebarPanelKey.BROADCASTS)}
hidePanel={hidePanel}
- organization={organization}
/>
{
- trackAnalytics('onboarding.wizard_opened', {organization: org});
- onShowPanel();
- };
const onboardingContext = useContext(OnboardingContext);
const {projects} = useProjects();
const {shouldAccordionFloat} = useContext(ExpandedContext);
- if (!org.features?.includes('onboarding')) {
- return null;
- }
+ const isActive = currentPanel === SidebarPanelKey.ONBOARDING_WIZARD;
+ const walkthrough = isDemoWalkthrough();
+
+ const handleToggle = useCallback(() => {
+ if (!walkthrough && !isActive === true) {
+ trackAnalytics('quick_start.opened', {
+ organization: org,
+ });
+ }
+ onShowPanel();
+ }, [walkthrough, isActive, onShowPanel, org]);
const tasks = getMergedTasks({
organization: org,
@@ -62,6 +68,7 @@ export default function OnboardingStatus({
const allDisplayedTasks = tasks
.filter(task => task.display)
.filter(task => !task.renderCard);
+
const doneTasks = allDisplayedTasks.filter(isDone);
const numberRemaining = allDisplayedTasks.length - doneTasks.length;
@@ -72,13 +79,23 @@ export default function OnboardingStatus({
!task.completionSeen
);
- const isActive = currentPanel === SidebarPanelKey.ONBOARDING_WIZARD;
+ const allTasksCompleted = doneTasks.length >= allDisplayedTasks.length;
+
+ useEffect(() => {
+ if (!allTasksCompleted || isActive) {
+ return;
+ }
- if (doneTasks.length >= allDisplayedTasks.length && !isActive) {
+ trackAnalytics('quick_start.completed', {
+ organization: org,
+ referrer: 'onboarding_sidebar',
+ });
+ }, [isActive, allTasksCompleted, org]);
+
+ if (!org.features?.includes('onboarding') || (allTasksCompleted && !isActive)) {
return null;
}
- const walkthrough = isDemoWalkthrough();
const label = walkthrough ? t('Guided Tours') : t('Quick Start');
const task = walkthrough ? 'tours' : 'tasks';
@@ -87,7 +104,7 @@ export default function OnboardingStatus({
)}
- {isActive && (
-
- )}
+ {isActive &&
+ (hasQuickStartUpdatesFeature(org) ? (
+
+ ) : (
+
+ ))}
);
}
diff --git a/static/app/components/sidebar/sidebarAccordion.tsx b/static/app/components/sidebar/sidebarAccordion.tsx
index ac6d7e39d3957..3b41def6d27fc 100644
--- a/static/app/components/sidebar/sidebarAccordion.tsx
+++ b/static/app/components/sidebar/sidebarAccordion.tsx
@@ -17,8 +17,8 @@ import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {useLocalStorageState} from 'sentry/utils/useLocalStorageState';
import useMedia from 'sentry/utils/useMedia';
+import {useNavigate} from 'sentry/utils/useNavigate';
import useOnClickOutside from 'sentry/utils/useOnClickOutside';
-import useRouter from 'sentry/utils/useRouter';
import type {SidebarItemProps} from './sidebarItem';
import SidebarItem, {isItemActive} from './sidebarItem';
@@ -42,7 +42,7 @@ function SidebarAccordion({
useContext(ExpandedContext);
const theme = useTheme();
const horizontal = useMedia(`(max-width: ${theme.breakpoints.medium})`);
- const router = useRouter();
+ const navigate = useNavigate();
const [expanded, setExpanded] = useLocalStorageState(
`sidebar-accordion-${id}:expanded`,
initiallyExpanded ?? true
@@ -107,7 +107,7 @@ function SidebarAccordion({
e: React.MouseEvent
) => void = () => {
if (itemProps.to) {
- router.push(itemProps.to);
+ navigate(itemProps.to);
setExpandedItemId(null);
}
};
diff --git a/static/app/components/sidebar/sidebarItem.tsx b/static/app/components/sidebar/sidebarItem.tsx
index ae670d5ae95ef..0d517daaed29f 100644
--- a/static/app/components/sidebar/sidebarItem.tsx
+++ b/static/app/components/sidebar/sidebarItem.tsx
@@ -1,5 +1,4 @@
import {Fragment, isValidElement, useCallback, useContext, useMemo} from 'react';
-import isPropValid from '@emotion/is-prop-valid';
import type {Theme} from '@emotion/react';
import {css} from '@emotion/react';
import styled from '@emotion/styled';
@@ -194,7 +193,6 @@ function SidebarItem({
return {
pathname: to ? to : href,
search,
- state: {source: SIDEBAR_NAVIGATION_SOURCE},
};
}, [to, href, search]);
@@ -239,6 +237,7 @@ function SidebarItem({
isInFloatingAccordion={isInFloatingAccordion}
active={isActive ? 'true' : undefined}
to={toProps}
+ state={{source: SIDEBAR_NAVIGATION_SOURCE}}
disabled={!hasLink && isInFloatingAccordion}
className={className}
aria-current={isActive ? 'page' : undefined}
@@ -391,7 +390,7 @@ const getActiveStyle = ({
};
const StyledSidebarItem = styled(Link, {
- shouldForwardProp: p => typeof p === 'string' && isPropValid(p),
+ shouldForwardProp: p => !['isInFloatingAccordion', 'hasNewNav', 'index'].includes(p),
})`
display: flex;
color: ${p => (p.isInFloatingAccordion ? p.theme.gray400 : 'inherit')};
diff --git a/static/app/components/stream/group.spec.tsx b/static/app/components/stream/group.spec.tsx
index ec1b20159120b..a0a888642b2c9 100644
--- a/static/app/components/stream/group.spec.tsx
+++ b/static/app/components/stream/group.spec.tsx
@@ -1,8 +1,17 @@
import {GroupFixture} from 'sentry-fixture/group';
+import {OrganizationFixture} from 'sentry-fixture/organization';
import {ProjectFixture} from 'sentry-fixture/project';
+import {RouterFixture} from 'sentry-fixture/routerFixture';
import {initializeOrg} from 'sentry-test/initializeOrg';
-import {act, render, screen, userEvent, within} from 'sentry-test/reactTestingLibrary';
+import {
+ act,
+ render,
+ screen,
+ userEvent,
+ waitFor,
+ within,
+} from 'sentry-test/reactTestingLibrary';
import StreamGroup from 'sentry/components/stream/group';
import GroupStore from 'sentry/stores/groupStore';
@@ -30,6 +39,8 @@ describe('StreamGroup', function () {
reason: 0,
reason_details: null,
},
+ firstSeen: '2017-10-10T02:41:20.000Z',
+ lastSeen: '2017-10-16T02:41:20.000Z',
});
MockApiClient.addMockResponse({
url: '/organizations/org-slug/projects/',
@@ -162,4 +173,52 @@ describe('StreamGroup', function () {
expect(container).toBeEmptyDOMElement();
});
+
+ it('shows first and last seen columns', function () {
+ render(
+ ,
+ {
+ organization: OrganizationFixture({
+ features: ['issue-stream-table-layout'],
+ }),
+ }
+ );
+
+ expect(screen.getByRole('time', {name: 'First Seen'})).toHaveTextContent('1w');
+ expect(screen.getByRole('time', {name: 'Last Seen'})).toHaveTextContent('1d');
+ });
+
+ it('navigates to issue with correct params when clicked', async function () {
+ const router = RouterFixture();
+ render(
+ ,
+ {
+ router,
+ organization: OrganizationFixture({
+ features: ['issue-stream-table-layout'],
+ }),
+ }
+ );
+
+ await userEvent.click(screen.getByTestId('group'));
+
+ await waitFor(() => {
+ expect(router.push).toHaveBeenCalledWith({
+ pathname: '/organizations/org-slug/issues/1337/',
+ query: {
+ _allp: 1,
+ query: 'is:unresolved is:for_review assigned_or_suggested:[me, none]',
+ referrer: 'issue-stream',
+ stream_index: undefined,
+ },
+ });
+ });
+ });
});
diff --git a/static/app/components/stream/group.tsx b/static/app/components/stream/group.tsx
index 706765ae08e58..5b87920c4e7a0 100644
--- a/static/app/components/stream/group.tsx
+++ b/static/app/components/stream/group.tsx
@@ -1,5 +1,4 @@
import {Fragment, useCallback, useMemo, useRef} from 'react';
-import type {Theme} from '@emotion/react';
import {css} from '@emotion/react';
import styled from '@emotion/styled';
import type {LocationDescriptor} from 'history';
@@ -15,6 +14,7 @@ import EventOrGroupExtraDetails from 'sentry/components/eventOrGroupExtraDetails
import EventOrGroupHeader from 'sentry/components/eventOrGroupHeader';
import {AssigneeSelector} from 'sentry/components/group/assigneeSelector';
import {getBadgeProperties} from 'sentry/components/group/inboxBadges/statusBadge';
+import InteractionStateLayer from 'sentry/components/interactionStateLayer';
import type {GroupListColumn} from 'sentry/components/issues/groupList';
import Link from 'sentry/components/links/link';
import PanelItem from 'sentry/components/panels/panelItem';
@@ -46,15 +46,22 @@ import {trackAnalytics} from 'sentry/utils/analytics';
import {isDemoWalkthrough} from 'sentry/utils/demoMode';
import EventView from 'sentry/utils/discover/eventView';
import {SavedQueryDatasets} from 'sentry/utils/discover/types';
+import {isCtrlKeyPressed} from 'sentry/utils/isCtrlKeyPressed';
import {getConfigForIssueType} from 'sentry/utils/issueTypeConfig';
import {useMutation} from 'sentry/utils/queryClient';
import type RequestError from 'sentry/utils/requestError/requestError';
+import normalizeUrl from 'sentry/utils/url/normalizeUrl';
+import {useLocation} from 'sentry/utils/useLocation';
+import {useNavigate} from 'sentry/utils/useNavigate';
+import useOrganization from 'sentry/utils/useOrganization';
import usePageFilters from 'sentry/utils/usePageFilters';
import withOrganization from 'sentry/utils/withOrganization';
import type {TimePeriodType} from 'sentry/views/alerts/rules/metric/details/constants';
import {hasDatasetSelector} from 'sentry/views/dashboards/utils';
import GroupPriority from 'sentry/views/issueDetails/groupPriority';
+import {COLUMN_BREAKPOINTS} from 'sentry/views/issueList/actions/utils';
import {
+ createIssueLink,
DISCOVER_EXCLUSION_FIELDS,
getTabs,
isForReviewQuery,
@@ -91,8 +98,10 @@ function GroupCheckbox({
group: Group;
displayReprocessingLayout?: boolean;
}) {
+ const organization = useOrganization();
const {records: selectedGroupMap} = useLegacyStore(SelectedGroupStore);
const isSelected = selectedGroupMap.get(group.id) ?? false;
+ const hasNewLayout = organization.features.includes('issue-stream-table-layout');
const onChange = useCallback(
(evt: React.ChangeEvent) => {
@@ -108,7 +117,7 @@ function GroupCheckbox({
);
return (
-
+
;
+ }
+
+ return (
+
+ );
+}
+
function BaseGroupRow({
id,
organization,
@@ -141,6 +166,8 @@ function BaseGroupRow({
showLastTriggered = false,
onPriorityChange,
}: Props) {
+ const navigate = useNavigate();
+ const location = useLocation();
const groups = useLegacyStore(GroupStore);
const group = useMemo(
() => groups.find(item => item.id === id) as Group | undefined,
@@ -148,6 +175,7 @@ function BaseGroupRow({
);
const originalInboxState = useRef(group?.inbox as InboxDetails | null);
const {selection} = usePageFilters();
+ const hasNewLayout = organization.features.includes('issue-stream-table-layout');
const referrer = source ? `${source}-issue-stream` : 'issue-stream';
@@ -206,37 +234,32 @@ function BaseGroupRow({
},
});
- const wrapperToggle = useCallback(
+ const clickHasBeenHandled = useCallback(
(evt: React.MouseEvent) => {
const targetElement = evt.target as Partial;
if (!group) {
- return;
+ return true;
}
// Ignore clicks on links
if (targetElement?.tagName?.toLowerCase() === 'a') {
- return;
+ return true;
}
// Ignore clicks on the selection checkbox
if (targetElement?.tagName?.toLowerCase() === 'input') {
- return;
+ return true;
}
let e = targetElement;
while (e.parentElement) {
if (e?.tagName?.toLowerCase() === 'a') {
- return;
+ return true;
}
e = e.parentElement!;
}
- if (evt.shiftKey) {
- SelectedGroupStore.shiftToggleItems(group.id);
- window.getSelection()?.removeAllRanges();
- } else {
- SelectedGroupStore.toggleSelect(group.id);
- }
+ return false;
},
[group]
);
@@ -422,9 +445,20 @@ function BaseGroupRow({
}
>
-
- {secondaryCount !== undefined && useFilteredStats && (
-
+ {hasNewLayout ? (
+
+
+ {secondaryCount !== undefined && useFilteredStats && (
+
+ )}
+
+ ) : (
+
+
+ {secondaryCount !== undefined && useFilteredStats && (
+
+ )}
+
)}
@@ -462,9 +496,20 @@ function BaseGroupRow({
}
>
-
- {secondaryUserCount !== undefined && useFilteredStats && (
-
+ {hasNewLayout ? (
+
+
+ {secondaryUserCount !== undefined && useFilteredStats && (
+
+ )}
+
+ ) : (
+
+
+ {secondaryUserCount !== undefined && useFilteredStats && (
+
+ )}
+
)}
);
@@ -486,21 +531,68 @@ function BaseGroupRow({
);
+ const onClick = (e: React.MouseEvent) => {
+ if (displayReprocessingLayout) {
+ return;
+ }
+
+ const handled = clickHasBeenHandled(e);
+
+ if (handled) {
+ return;
+ }
+
+ if (canSelect && e.shiftKey) {
+ SelectedGroupStore.shiftToggleItems(group.id);
+ window.getSelection()?.removeAllRanges();
+ return;
+ }
+
+ if (canSelect && isCtrlKeyPressed(e)) {
+ SelectedGroupStore.toggleSelect(group.id);
+ return;
+ }
+
+ if (hasNewLayout) {
+ navigate(
+ normalizeUrl(
+ createIssueLink({
+ data: group,
+ organization,
+ referrer,
+ streamIndex: index,
+ location,
+ query,
+ })
+ )
+ );
+ return;
+ }
+
+ if (!canSelect) {
+ return;
+ }
+
+ SelectedGroupStore.toggleSelect(group.id);
+ };
+
return (
+ {hasNewLayout && }
{canSelect && (
)}
-
+
-
+
{hasGuideAnchor && issueStreamAnchor}
- {withChart && !displayReprocessingLayout && issueTypeConfig.stats.enabled && (
-
+ {withChart &&
+ !displayReprocessingLayout &&
+ issueTypeConfig.stats.enabled &&
+ hasNewLayout ? (
+
+
+
+ ) : (
+
- {withColumns.includes('event') && issueTypeConfig.stats.enabled && (
- {groupCount}
+ {withColumns.includes('firstSeen') && (
+
+
+
+ )}
+ {withColumns.includes('lastSeen') && (
+
+
+
+ )}
+ {withColumns.includes('event') &&
+ issueTypeConfig.stats.enabled &&
+ hasNewLayout ? (
+
+ {groupCount}
+
+ ) : (
+
+ {groupCount}
+
)}
- {withColumns.includes('users') && issueTypeConfig.stats.enabled && (
+ {withColumns.includes('users') &&
+ issueTypeConfig.stats.enabled &&
+ hasNewLayout ? (
+
+ {groupUsersCount}
+
+ ) : (
{groupUsersCount}
)}
{withColumns.includes('priority') ? (
-
- {group.priority ? (
-
- ) : null}
-
+ hasNewLayout ? (
+
+ {group.priority ? (
+
+ ) : null}
+
+ ) : (
+
+ {group.priority ? (
+
+ ) : null}
+
+ )
) : null}
- {withColumns.includes('assignee') && (
-
-
-
- )}
+ {withColumns.includes('assignee') &&
+ (hasNewLayout ? (
+
+
+
+ ) : (
+
+
+
+ ))}
{showLastTriggered && {lastTriggered} }
)}
@@ -565,6 +719,7 @@ export default StreamGroup;
// Position for wrapper is relative for overlay actions
const Wrapper = styled(PanelItem)<{
+ hasNewLayout: boolean;
reviewed: boolean;
useTintRow: boolean;
}>`
@@ -572,6 +727,32 @@ const Wrapper = styled(PanelItem)<{
padding: ${space(1.5)} 0;
line-height: 1.1;
+ ${p =>
+ p.hasNewLayout &&
+ css`
+ cursor: pointer;
+ padding: ${space(1)} 0;
+ min-height: 66px;
+
+ /* Adds underline to issue title when active */
+ &:hover {
+ [data-issue-title-primary] {
+ text-decoration: underline;
+ }
+ }
+
+ /* Disables the hover effect when hovering over dropdown buttons and checkboxes */
+ &:has(button:hover, input:hover, [data-overlay]:hover) {
+ [data-layer] {
+ display: none;
+ }
+
+ [data-issue-title-primary] {
+ text-decoration: none;
+ }
+ }
+ `}
+
${p =>
p.useTintRow &&
p.reviewed &&
@@ -608,51 +789,80 @@ const Wrapper = styled(PanelItem)<{
`};
`;
-const GroupSummary = styled('div')<{canSelect: boolean}>`
+const GroupSummary = styled('div')<{canSelect: boolean; hasNewLayout: boolean}>`
overflow: hidden;
margin-left: ${p => space(p.canSelect ? 1 : 2)};
- margin-right: ${space(1)};
+ margin-right: ${p => (p.hasNewLayout ? space(2) : space(1))};
flex: 1;
width: 66.66%;
+ ${p =>
+ p.hasNewLayout &&
+ css`
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+ font-size: ${p.theme.fontSizeMedium};
+ `}
+
@media (min-width: ${p => p.theme.breakpoints.medium}) {
width: 50%;
}
`;
-const GroupCheckBoxWrapper = styled('div')`
+const GroupCheckBoxWrapper = styled('div')<{hasNewLayout: boolean}>`
margin-left: ${space(2)};
align-self: flex-start;
height: 15px;
display: flex;
align-items: center;
+
+ ${p =>
+ p.hasNewLayout &&
+ css`
+ padding-top: ${space(2)};
+ `}
`;
-const primaryStatStyle = (theme: Theme) => css`
- font-size: ${theme.fontSizeLarge};
- font-variant-numeric: tabular-nums;
+const CountsWrapper = styled('div')`
+ display: flex;
+ flex-direction: column;
`;
-const PrimaryCount = styled(Count)`
- ${p => primaryStatStyle(p.theme)};
+export const PrimaryCount = styled(Count)<{hasNewLayout?: boolean}>`
+ font-size: ${p => (p.hasNewLayout ? p.theme.fontSizeMedium : p.theme.fontSizeLarge)};
+ ${p =>
+ p.hasNewLayout &&
+ `
+ display: flex;
+ justify-content: right;
+ margin-bottom: ${space(0.25)};
+ `}
+ font-variant-numeric: tabular-nums;
`;
-const secondaryStatStyle = (theme: Theme) => css`
- font-size: ${theme.fontSizeLarge};
+const SecondaryCount = styled(({value, ...p}) => )<{
+ hasNewLayout?: boolean;
+}>`
+ font-size: ${p => (p.hasNewLayout ? p.theme.fontSizeSmall : p.theme.fontSizeLarge)};
+ ${p =>
+ p.hasNewLayout &&
+ css`
+ display: flex;
+ justify-content: right;
+ color: ${p.theme.subText};
+ `}
+
font-variant-numeric: tabular-nums;
:before {
content: '/';
padding-left: ${space(0.25)};
padding-right: 2px;
- color: ${theme.gray300};
+ color: ${p => p.theme.gray300};
}
`;
-const SecondaryCount = styled(({value, ...p}) => )`
- ${p => secondaryStatStyle(p.theme)}
-`;
-
const CountTooltipContent = styled('div')`
display: grid;
grid-template-columns: 1fr max-content;
@@ -670,17 +880,54 @@ const CountTooltipContent = styled('div')`
}
`;
-const ChartWrapper = styled('div')<{narrowGroups: boolean}>`
+const ChartWrapper = styled('div')<{margin: boolean; narrowGroups: boolean}>`
width: 200px;
align-self: center;
+ margin-right: ${p => (p.margin ? space(2) : 0)};
- /* prettier-ignore */
@media (max-width: ${p =>
- p.narrowGroups ? p.theme.breakpoints.xlarge : p.theme.breakpoints.large}) {
+ p.narrowGroups ? p.theme.breakpoints.xlarge : p.theme.breakpoints.large}) {
+ display: none;
+ }
+`;
+
+const NarrowChartWrapper = styled('div')<{breakpoint: string}>`
+ width: 175px;
+ align-self: center;
+ margin-right: ${space(2)};
+
+ @media (max-width: ${p => p.breakpoint}) {
+ display: none;
+ }
+`;
+
+const TimestampWrapper = styled('div')<{breakpoint: string}>`
+ display: flex;
+ align-self: center;
+ width: 75px;
+ margin-right: ${space(2)};
+
+ @media (max-width: ${p => p.breakpoint}) {
display: none;
}
`;
+const NarrowEventsOrUsersCountsWrapper = styled('div')<{breakpoint: string}>`
+ display: flex;
+ justify-content: flex-end;
+ align-self: center;
+ margin-right: ${space(2)};
+ width: 60px;
+
+ @media (max-width: ${p => p.breakpoint}) {
+ display: none;
+ }
+`;
+
+export const InnerCountsWrapper = styled('div')`
+ margin-right: ${space(2)};
+`;
+
const EventCountsWrapper = styled('div')<{leftMargin?: string}>`
display: flex;
justify-content: flex-end;
@@ -694,6 +941,18 @@ const EventCountsWrapper = styled('div')<{leftMargin?: string}>`
}
`;
+const NarrowPriorityWrapper = styled('div')<{breakpoint: string}>`
+ width: 70px;
+ margin-right: ${space(2)};
+ align-self: center;
+ display: flex;
+ justify-content: flex-start;
+
+ @media (max-width: ${p => p.theme.breakpoints.large}) {
+ display: none;
+ }
+`;
+
const PriorityWrapper = styled('div')<{narrowGroups: boolean}>`
width: 70px;
margin: 0 ${space(2)};
@@ -701,9 +960,8 @@ const PriorityWrapper = styled('div')<{narrowGroups: boolean}>`
display: flex;
justify-content: flex-end;
- /* prettier-ignore */
@media (max-width: ${p =>
- p.narrowGroups ? p.theme.breakpoints.large : p.theme.breakpoints.medium}) {
+ p.narrowGroups ? p.theme.breakpoints.large : p.theme.breakpoints.medium}) {
display: none;
}
`;
@@ -713,9 +971,21 @@ const AssigneeWrapper = styled('div')<{narrowGroups: boolean}>`
margin: 0 ${space(2)};
align-self: center;
- /* prettier-ignore */
@media (max-width: ${p =>
- p.narrowGroups ? p.theme.breakpoints.large : p.theme.breakpoints.medium}) {
+ p.narrowGroups ? p.theme.breakpoints.large : p.theme.breakpoints.medium}) {
+ display: none;
+ }
+`;
+
+const NarrowAssigneeWrapper = styled('div')<{breakpoint: string}>`
+ display: flex;
+ justify-content: flex-start;
+ text-align: right;
+ width: 60px;
+ margin-right: ${space(2)};
+ align-self: center;
+
+ @media (max-width: ${p => p.breakpoint}) {
display: none;
}
`;
diff --git a/static/app/components/tabs/tab.tsx b/static/app/components/tabs/tab.tsx
index aad8165a5525b..e3c978c6b5cd6 100644
--- a/static/app/components/tabs/tab.tsx
+++ b/static/app/components/tabs/tab.tsx
@@ -1,5 +1,5 @@
import {forwardRef, useCallback} from 'react';
-import type {Theme} from '@emotion/react';
+import {css, type Theme} from '@emotion/react';
import styled from '@emotion/styled';
import type {AriaTabProps} from '@react-aria/tabs';
import {useTab} from '@react-aria/tabs';
@@ -288,27 +288,26 @@ const innerWrapStyles = ({
}: {
orientation: Orientation;
theme: Theme;
-}) => `
+}) => css`
display: flex;
align-items: center;
position: relative;
height: calc(
- ${theme.form.sm.height}px +
- ${orientation === 'horizontal' ? space(0.75) : '0px'}
+ ${theme.form.sm.height}px + ${orientation === 'horizontal' ? space(0.75) : '0px'}
);
border-radius: ${theme.borderRadius};
transform: translateY(1px);
- ${
- orientation === 'horizontal'
- ? `
+ ${orientation === 'horizontal'
+ ? css`
/* Extra padding + negative margin trick, to expand click area */
padding: ${space(0.75)} ${space(1)} ${space(1.5)};
margin-left: -${space(1)};
margin-right: -${space(1)};
`
- : `padding: ${space(0.75)} ${space(2)};`
- };
+ : css`
+ padding: ${space(0.75)} ${space(2)};
+ `};
`;
const TabLink = styled(Link)<{orientation: Orientation}>`
diff --git a/static/app/components/tabs/tabList.tsx b/static/app/components/tabs/tabList.tsx
index 68cdb69b877a3..eca812f1d53a0 100644
--- a/static/app/components/tabs/tabList.tsx
+++ b/static/app/components/tabs/tabList.tsx
@@ -14,7 +14,7 @@ import DropdownButton from 'sentry/components/dropdownButton';
import {IconEllipsis} from 'sentry/icons';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
-import {browserHistory} from 'sentry/utils/browserHistory';
+import {useNavigate} from 'sentry/utils/useNavigate';
import {TabsContext} from './index';
import type {TabListItemProps} from './item';
@@ -132,6 +132,7 @@ function BaseTabList({
variant = 'flat',
...props
}: BaseTabListProps) {
+ const navigate = useNavigate();
const tabListRef = useRef(null);
const {rootProps, setTabListState} = useContext(TabsContext);
const {
@@ -156,7 +157,7 @@ function BaseTabList({
if (!linkTo) {
return;
}
- browserHistory.push(linkTo);
+ navigate(linkTo);
},
isDisabled: disabled,
keyboardActivation,
diff --git a/static/app/components/timeSince.tsx b/static/app/components/timeSince.tsx
index 6b224e9d3eb2e..925ec8346c383 100644
--- a/static/app/components/timeSince.tsx
+++ b/static/app/components/timeSince.tsx
@@ -5,10 +5,10 @@ import moment from 'moment-timezone';
import type {TooltipProps} from 'sentry/components/tooltip';
import {Tooltip} from 'sentry/components/tooltip';
import {t} from 'sentry/locale';
-import ConfigStore from 'sentry/stores/configStore';
import getDuration from 'sentry/utils/duration/getDuration';
import getDynamicText from 'sentry/utils/getDynamicText';
import type {ColorOrAlias} from 'sentry/utils/theme';
+import {useUser} from 'sentry/utils/useUser';
function getDateObj(date: RelaxedDateType): Date {
return typeof date === 'string' || isNumber(date) ? new Date(date) : date;
@@ -120,6 +120,7 @@ function TimeSince({
liveUpdateInterval = 'minute',
...props
}: Props) {
+ const user = useUser();
const tickerRef = useRef();
const computeRelativeDate = useCallback(
@@ -150,7 +151,6 @@ function TimeSince({
}, [liveUpdateInterval, computeRelativeDate]);
const dateObj = getDateObj(date);
- const user = ConfigStore.get('user');
const options = user ? user.options : null;
// Use short months when showing seconds, because "September" causes the
diff --git a/static/app/components/versionHoverCard.tsx b/static/app/components/versionHoverCard.tsx
index 59cf657794089..a7139fe17f182 100644
--- a/static/app/components/versionHoverCard.tsx
+++ b/static/app/components/versionHoverCard.tsx
@@ -1,4 +1,3 @@
-import {Component} from 'react';
import styled from '@emotion/styled';
import type {Client} from 'sentry/api';
@@ -39,23 +38,24 @@ interface Props extends React.ComponentProps {
repositoriesLoading?: boolean;
}
-type State = {
- visible: boolean;
-};
-
-class VersionHoverCard extends Component {
- state: State = {
- visible: false,
- };
-
- toggleHovercard() {
- this.setState({
- visible: true,
- });
- }
-
- getRepoLink() {
- const {organization} = this.props;
+function VersionHoverCard({
+ api: _api,
+ projectSlug: _projectSlug,
+ deploysLoading,
+ deploysError,
+ release,
+ releaseLoading,
+ releaseError,
+ repositories,
+ repositoriesLoading,
+ repositoriesError,
+ organization,
+ deploys,
+ releaseVersion,
+ children,
+ ...hovercardProps
+}: Props) {
+ function getRepoLink() {
const orgSlug = organization.slug;
return {
header: null,
@@ -75,8 +75,7 @@ class VersionHoverCard extends Component {
};
}
- getBody() {
- const {releaseVersion, release, deploys} = this.props;
+ function getBody() {
if (release === undefined || !defined(deploys)) {
return {header: null, body: null};
}
@@ -142,68 +141,52 @@ class VersionHoverCard extends Component {
};
}
- render() {
- const {
- deploysLoading,
- deploysError,
- release,
- releaseLoading,
- releaseError,
- repositories,
- repositoriesLoading,
- repositoriesError,
- } = this.props;
- let header: React.ReactNode = null;
- let body: React.ReactNode = null;
-
- const loading = !!(deploysLoading || releaseLoading || repositoriesLoading);
- const error = deploysError ?? releaseError ?? repositoriesError;
- const hasRepos = repositories && repositories.length > 0;
-
- if (loading) {
- body = ;
- } else if (error) {
- body = ;
- } else {
- const renderObj: {[key: string]: React.ReactNode} =
- hasRepos && release ? this.getBody() : this.getRepoLink();
- header = renderObj.header;
- body = renderObj.body;
- }
-
- return (
-
- {this.props.children}
-
- );
+ let header: React.ReactNode = null;
+ let body: React.ReactNode = null;
+
+ const loading = !!(deploysLoading || releaseLoading || repositoriesLoading);
+ const error = deploysError ?? releaseError ?? repositoriesError;
+ const hasRepos = repositories && repositories.length > 0;
+
+ if (loading) {
+ body = ;
+ } else if (error) {
+ body = ;
+ } else {
+ const renderObj: {[key: string]: React.ReactNode} =
+ hasRepos && release ? getBody() : getRepoLink();
+ header = renderObj.header;
+ body = renderObj.body;
}
+
+ return (
+
+ {children}
+
+ );
}
interface VersionHoverHeaderProps {
releaseVersion: string;
}
-export class VersionHoverHeader extends Component {
- render() {
- return (
-
- {t('Release')}
-
-
-
-
-
-
- );
- }
+function VersionHoverHeader({releaseVersion}: VersionHoverHeaderProps) {
+ return (
+
+ {t('Release')}
+
+
+
+
+
+ );
}
-export {VersionHoverCard};
export default withApi(withRelease(withRepositories(VersionHoverCard)));
const ConnectRepo = styled('div')`
diff --git a/static/app/constants/index.tsx b/static/app/constants/index.tsx
index 41acbd9907034..74b39e0ba69ee 100644
--- a/static/app/constants/index.tsx
+++ b/static/app/constants/index.tsx
@@ -254,6 +254,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'errors',
displayName: 'error',
titleName: t('Errors'),
+ productName: t('Error Monitoring'),
uid: 1,
},
[DataCategoryExact.TRANSACTION]: {
@@ -262,6 +263,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'transactions',
displayName: 'transaction',
titleName: t('Transactions'),
+ productName: t('Performance Monitoring'),
uid: 2,
},
[DataCategoryExact.ATTACHMENT]: {
@@ -270,6 +272,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'attachments',
displayName: 'attachment',
titleName: t('Attachments'),
+ productName: t('Attachments'),
uid: 4,
},
[DataCategoryExact.PROFILE]: {
@@ -278,6 +281,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'profiles',
displayName: 'profile',
titleName: t('Profiles'),
+ productName: t('Continuous Profiling'),
uid: 6,
},
[DataCategoryExact.REPLAY]: {
@@ -286,6 +290,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'replays',
displayName: 'replay',
titleName: t('Session Replays'),
+ productName: t('Session Replay'),
uid: 7,
},
[DataCategoryExact.TRANSACTION_PROCESSED]: {
@@ -294,6 +299,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'transactions',
displayName: 'transaction',
titleName: t('Transactions'),
+ productName: t('Performance Monitoring'),
uid: 8,
},
[DataCategoryExact.TRANSACTION_INDEXED]: {
@@ -302,6 +308,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'indexed transactions',
displayName: 'indexed transaction',
titleName: t('Indexed Transactions'),
+ productName: t('Performance Monitoring'),
uid: 9,
},
[DataCategoryExact.MONITOR]: {
@@ -310,6 +317,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'monitor check-ins',
displayName: 'monitor check-in',
titleName: t('Monitor Check-Ins'),
+ productName: t('Cron Monitoring'),
uid: 10,
},
[DataCategoryExact.SPAN]: {
@@ -318,6 +326,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'spans',
displayName: 'spans',
titleName: t('Spans'),
+ productName: t('Tracing'),
uid: 12,
},
[DataCategoryExact.MONITOR_SEAT]: {
@@ -326,6 +335,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'monitorSeats',
displayName: 'cron monitors',
titleName: t('Cron Monitors'),
+ productName: t('Cron Monitoring'),
uid: 13,
},
[DataCategoryExact.PROFILE_DURATION]: {
@@ -334,6 +344,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'profileDuration',
displayName: 'profile hours',
titleName: t('Profile Hours'),
+ productName: t('Continuous Profiling'),
uid: 17,
},
[DataCategoryExact.METRIC_SECOND]: {
@@ -342,6 +353,7 @@ export const DATA_CATEGORY_INFO = {
plural: 'metricSeconds',
displayName: 'metric hours',
titleName: t('Metrics Hours'),
+ productName: t('Metrics'),
uid: 19,
},
} as const satisfies Record;
diff --git a/static/app/data/controlsiloUrlPatterns.ts b/static/app/data/controlsiloUrlPatterns.ts
index 268c70b0c786f..f0e1959803be8 100644
--- a/static/app/data/controlsiloUrlPatterns.ts
+++ b/static/app/data/controlsiloUrlPatterns.ts
@@ -43,6 +43,7 @@ const patterns: RegExp[] = [
new RegExp('^api/0/_admin/instance-level-oauth/$'),
new RegExp('^api/0/_admin/instance-level-oauth/[^/]+/$'),
new RegExp('^_admin/'),
+ new RegExp('^_warmup/$'),
new RegExp('^api/0/organizations/[^/]+/api-keys/$'),
new RegExp('^api/0/organizations/[^/]+/api-keys/[^/]+/$'),
new RegExp('^api/0/organizations/[^/]+/audit-logs/$'),
@@ -135,6 +136,7 @@ const patterns: RegExp[] = [
new RegExp('^api/0/internal/integration-proxy/$'),
new RegExp('^api/0/internal/rpc/[^/]+/[^/]+/$'),
new RegExp('^api/0/internal/feature-flags/$'),
+ new RegExp('^api/0/secret-scanning/github/$'),
new RegExp('^api/hooks/mailgun/inbound/'),
new RegExp('^oauth/authorize/$'),
new RegExp('^oauth/token/$'),
diff --git a/static/app/data/forms/cspReports.tsx b/static/app/data/forms/cspReports.tsx
index 3c64e72402c8d..eea9235aa343b 100644
--- a/static/app/data/forms/cspReports.tsx
+++ b/static/app/data/forms/cspReports.tsx
@@ -2,7 +2,7 @@
import type {JsonFormObject} from 'sentry/components/forms/types';
import {t} from 'sentry/locale';
-export const route = '/settings/:orgId/projects/:projectId/csp/';
+export const route = '/settings/:orgId/projects/:projectId/security-headers/csp';
const formGroups: JsonFormObject[] = [
{
diff --git a/static/app/data/forms/projectIssueGrouping.tsx b/static/app/data/forms/projectIssueGrouping.tsx
index 00dfb2c473723..30b24809d7ede 100644
--- a/static/app/data/forms/projectIssueGrouping.tsx
+++ b/static/app/data/forms/projectIssueGrouping.tsx
@@ -102,10 +102,8 @@ stack.function:mylibrary_* +app`}
const RuleDescription = styled('div')`
margin-bottom: ${space(1)};
margin-top: -${space(1)};
- margin-right: 36px;
`;
const RuleExample = styled('pre')`
margin-bottom: ${space(1)};
- margin-right: 36px;
`;
diff --git a/static/app/data/platformCategories.tsx b/static/app/data/platformCategories.tsx
index b30c69a502dfa..79c76885db50c 100644
--- a/static/app/data/platformCategories.tsx
+++ b/static/app/data/platformCategories.tsx
@@ -21,6 +21,7 @@ export const frontend: PlatformKey[] = [
'javascript-ember',
'javascript-gatsby',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-react',
'javascript-remix',
'javascript-solid',
@@ -228,6 +229,7 @@ export const platformsWithNestedInstrumentationGuides: PlatformKey[] = [
'javascript-ember',
'javascript-gatsby',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-react',
'javascript-remix',
'javascript-solid',
@@ -289,6 +291,7 @@ export const profiling: PlatformKey[] = [
'node-koa',
'node-connect',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-remix',
'javascript-solidstart',
'javascript-sveltekit',
@@ -340,6 +343,7 @@ export const releaseHealth: PlatformKey[] = [
'javascript-gatsby',
'javascript-vue',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-remix',
'javascript-solid',
'javascript-solidstart',
@@ -445,6 +449,7 @@ export const replayFrontendPlatforms: readonly PlatformKey[] = [
'javascript-ember',
'javascript-gatsby',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-react',
'javascript-remix',
'javascript-solid',
@@ -631,6 +636,7 @@ const customMetricFrontendPlatforms: readonly PlatformKey[] = [
'javascript-ember',
'javascript-gatsby',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-react',
'javascript-remix',
'javascript-solid',
diff --git a/static/app/data/platformPickerCategories.tsx b/static/app/data/platformPickerCategories.tsx
index e22d5b470b0fc..2a8e73c4111a7 100644
--- a/static/app/data/platformPickerCategories.tsx
+++ b/static/app/data/platformPickerCategories.tsx
@@ -45,6 +45,7 @@ const browser: Set = new Set([
'javascript-svelte',
'javascript-sveltekit',
'javascript-vue',
+ 'javascript-nuxt',
'unity',
]);
diff --git a/static/app/data/platforms.tsx b/static/app/data/platforms.tsx
index e6aa1559bcae4..3fd8d5b6880ed 100644
--- a/static/app/data/platforms.tsx
+++ b/static/app/data/platforms.tsx
@@ -354,6 +354,13 @@ export const platforms: PlatformIntegration[] = [
language: 'javascript',
link: 'https://docs.sentry.io/platforms/javascript/guides/vue/',
},
+ {
+ id: 'javascript-nuxt',
+ name: 'Nuxt',
+ type: 'framework',
+ language: 'javascript',
+ link: 'https://docs.sentry.io/platforms/javascript/guides/nuxt/',
+ },
{
id: 'kotlin',
name: 'Kotlin',
diff --git a/static/app/gettingStartedDocs/android/android.tsx b/static/app/gettingStartedDocs/android/android.tsx
index 3422e25514960..a59faeddb9bab 100644
--- a/static/app/gettingStartedDocs/android/android.tsx
+++ b/static/app/gettingStartedDocs/android/android.tsx
@@ -104,12 +104,12 @@ SentryAndroid.init(context) { options ->
options.isDebug = true
// Currently under experimental options:
- options.experimental.sessionReplay.errorSampleRate = 1.0
+ options.experimental.sessionReplay.onErrorSampleRate = 1.0
options.experimental.sessionReplay.sessionSampleRate = 1.0
}`;
const getReplaySetupSnippetXml = () => `
-
+
`;
const getReplayConfigurationSnippet = () => `
@@ -138,7 +138,9 @@ const onboarding: OnboardingConfig = {
{
description: (
- {t('The Sentry wizard will automatically patch your application:')}
+
+ {t('The Sentry wizard will automatically patch your application:')}
+
{tct(
@@ -158,10 +160,9 @@ const onboarding: OnboardingConfig = {
{tct(
- 'Create [sentryProperties: sentry.properties] with an auth token to upload proguard mappings (this file is automatically added to [gitignore: .gitignore])',
+ 'Create [code: sentry.properties] with an auth token to upload proguard mappings (this file is automatically added to [code: .gitignore])',
{
- sentryProperties:
,
- gitignore:
,
+ code:
,
}
)}
@@ -171,18 +172,16 @@ const onboarding: OnboardingConfig = {
)}
-
- {tct(
- 'Alternatively, you can also [manualSetupLink:set up the SDK manually].',
- {
- manualSetupLink: (
-
- ),
- }
- )}
-
),
+ additionalInfo: tct(
+ 'Alternatively, you can also [manualSetupLink:set up the SDK manually].',
+ {
+ manualSetupLink: (
+
+ ),
+ }
+ ),
},
],
},
@@ -218,10 +217,9 @@ const onboarding: OnboardingConfig = {
{tct(
- 'Configuration is done via the application [manifest: AndroidManifest.xml]. Under the hood Sentry uses a [provider:ContentProvider] to initialize the SDK based on the values provided below. This way the SDK can capture important crashes and metrics right from the app start.',
+ 'Configuration is done via the application [code: AndroidManifest.xml]. Under the hood Sentry uses a [code:ContentProvider] to initialize the SDK based on the values provided below. This way the SDK can capture important crashes and metrics right from the app start.',
{
- manifest:
,
- provider:
,
+ code:
,
}
)}
@@ -436,7 +434,7 @@ const replayOnboarding: OnboardingConfig = {
],
verify: getReplayVerifyStep({
replayOnErrorSampleRateName:
- 'options\u200b.experimental\u200b.sessionReplay\u200b.errorSampleRate',
+ 'options\u200b.experimental\u200b.sessionReplay\u200b.onErrorSampleRate',
replaySessionSampleRateName:
'options\u200b.experimental\u200b.sessionReplay\u200b.sessionSampleRate',
}),
diff --git a/static/app/gettingStartedDocs/apple/ios.tsx b/static/app/gettingStartedDocs/apple/ios.tsx
index 67d873ae583a8..670a318b64880 100644
--- a/static/app/gettingStartedDocs/apple/ios.tsx
+++ b/static/app/gettingStartedDocs/apple/ios.tsx
@@ -349,19 +349,17 @@ const onboarding: OnboardingConfig = {
{tct(
- 'Add a new [phase: Upload Debug Symbols] phase to your [xcodebuild: xcodebuild] build script',
+ 'Add a new [code: Upload Debug Symbols] phase to your [code: xcodebuild] build script',
{
- phase:
,
- xcodebuild:
,
+ code:
,
}
)}
{tct(
- 'Create [sentryclirc: .sentryclirc] with an auth token to upload debug symbols (this file is automatically added to [gitignore: .gitignore])',
+ 'Create [code: .sentryclirc] with an auth token to upload debug symbols (this file is automatically added to [code: .gitignore])',
{
- sentryclirc:
,
- gitignore:
,
+ code:
,
}
)}
@@ -523,14 +521,6 @@ const onboarding: OnboardingConfig = {
description: t('Learn about our first class integration with SwiftUI.'),
link: 'https://docs.sentry.io/platforms/apple/tracing/instrumentation/swiftui-instrumentation/',
},
- {
- id: 'profiling',
- name: t('Profiling'),
- description: t(
- 'Collect and analyze performance profiles from real user devices in production.'
- ),
- link: 'https://docs.sentry.io/platforms/apple/profiling/',
- },
],
};
@@ -584,13 +574,9 @@ const metricsOnboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: tct(
- "Then you'll be able to add metrics as [codeCounters:counters], [codeSets:sets], [codeDistribution:distributions], and [codeGauge:gauges]. These are available under the [codeNamespace:SentrySDK.metrics()] namespace.",
+ "Then you'll be able to add metrics as [code:counters], [code:sets], [code:distributions], and [code:gauges]. These are available under the [code:SentrySDK.metrics()] namespace.",
{
- codeCounters:
,
- codeSets:
,
- codeDistribution:
,
- codeGauge:
,
- codeNamespace:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/apple/macos.tsx b/static/app/gettingStartedDocs/apple/macos.tsx
index 1cfcc4871a346..399ee10e391a2 100644
--- a/static/app/gettingStartedDocs/apple/macos.tsx
+++ b/static/app/gettingStartedDocs/apple/macos.tsx
@@ -215,14 +215,6 @@ const onboarding: OnboardingConfig = {
description: t('Learn about our first class integration with SwiftUI.'),
link: 'https://docs.sentry.io/platforms/apple/tracing/instrumentation/swiftui-instrumentation/',
},
- {
- id: 'profiling',
- name: t('Profiling'),
- description: t(
- 'Collect and analyze performance profiles from real user devices in production.'
- ),
- link: 'https://docs.sentry.io/platforms/apple/profiling/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/bun/bun.spec.tsx b/static/app/gettingStartedDocs/bun/bun.spec.tsx
index b7293806881ad..9b4b615690575 100644
--- a/static/app/gettingStartedDocs/bun/bun.spec.tsx
+++ b/static/app/gettingStartedDocs/bun/bun.spec.tsx
@@ -28,8 +28,5 @@ describe('bun onboarding docs', function () {
expect(
screen.queryByText(textWithMarkupMatcher(/tracesSampleRate: 1\.0,/))
).not.toBeInTheDocument();
-
- // Renders next steps
- expect(screen.getByRole('link', {name: 'Tracing'})).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/bun/bun.tsx b/static/app/gettingStartedDocs/bun/bun.tsx
index b350d8e2e7df7..57e8a22dc10e1 100644
--- a/static/app/gettingStartedDocs/bun/bun.tsx
+++ b/static/app/gettingStartedDocs/bun/bun.tsx
@@ -92,19 +92,7 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: params =>
- params.isPerformanceSelected
- ? []
- : [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/bun/tracing/',
- },
- ],
+ nextSteps: () => [],
};
const customMetricsOnboarding: OnboardingConfig = {
@@ -112,10 +100,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'You need a minimum version [codeVersion:7.91.0] of [codePackage:@sentry/bun].',
+ 'You need a minimum version [code:7.91.0] of [code:@sentry/bun].',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
),
configurations: getInstallConfig(),
@@ -139,13 +126,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: tct(
- "Then you'll be able to add metrics as [codeCounters:counters], [codeSets:sets], [codeDistribution:distributions], and [codeGauge:gauges]. These are available under the [codeNamespace:Sentry.metrics] namespace. This API is available in both renderer and main processes.",
+ "Then you'll be able to add metrics as [code:counters], [code:sets], [code:distributions], and [code:gauges]. These are available under the [code:Sentry.metrics] namespace. This API is available in both renderer and main processes.",
{
- codeCounters:
,
- codeSets:
,
- codeDistribution:
,
- codeGauge:
,
- codeNamespace:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/capacitor/capacitor.tsx b/static/app/gettingStartedDocs/capacitor/capacitor.tsx
index d6aa0511909ff..b268bb760ab73 100644
--- a/static/app/gettingStartedDocs/capacitor/capacitor.tsx
+++ b/static/app/gettingStartedDocs/capacitor/capacitor.tsx
@@ -171,10 +171,9 @@ const onboarding: OnboardingConfig = {
description: (
{tct(
- `Install the Sentry Capacitor SDK as a dependency using [codeNpm:npm] or [codeYarn:yarn], alongside the Sentry [siblingName:] SDK:`,
+ `Install the Sentry Capacitor SDK as a dependency using [code:npm] or [code:yarn], alongside the Sentry [siblingName:] SDK:`,
{
- codeYarn:
,
- codeNpm:
,
+ code:
,
siblingName: getSiblingName(params.platformOptions.siblingOption),
}
)}
@@ -243,7 +242,7 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: params => [
+ nextSteps: () => [
{
id: 'capacitor-android-setup',
name: t('Capacitor 2 Setup'),
@@ -252,26 +251,6 @@ const onboarding: OnboardingConfig = {
),
link: 'https://docs.sentry.io/platforms/javascript/guides/capacitor/?#capacitor-2---android-specifics',
},
- params.isPerformanceSelected
- ? null
- : {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/capacitor/tracing/',
- },
- params.isReplaySelected
- ? null
- : {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/capacitor/session-replay/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/cordova/cordova.tsx b/static/app/gettingStartedDocs/cordova/cordova.tsx
index dd4520df6ff7d..4a7a2684e67ba 100644
--- a/static/app/gettingStartedDocs/cordova/cordova.tsx
+++ b/static/app/gettingStartedDocs/cordova/cordova.tsx
@@ -39,10 +39,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'You should [initCode:init] the SDK in the [deviceReadyCode:deviceReady] function, to make sure the native integrations runs. For more details about Cordova [link:click here]',
+ 'You should [code:init] the SDK in the [code:deviceReady] function, to make sure the native integrations runs. For more details about Cordova [link:click here]',
{
- initCode:
,
- deviceReadyCode:
,
+ code:
,
link: (
),
diff --git a/static/app/gettingStartedDocs/dart/dart.tsx b/static/app/gettingStartedDocs/dart/dart.tsx
index 92436fc0aca67..6ebb1ef207f62 100644
--- a/static/app/gettingStartedDocs/dart/dart.tsx
+++ b/static/app/gettingStartedDocs/dart/dart.tsx
@@ -91,10 +91,9 @@ const metricsOnboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'You need Sentry Dart SDK version [codeVersion:7.19.0] or higher. Learn more about installation methods in our [docsLink:full documentation].',
+ 'You need Sentry Dart SDK version [code:7.19.0] or higher. Learn more about installation methods in our [docsLink:full documentation].',
{
- package:
,
- codeVersion:
,
+ code:
,
docsLink: ,
}
),
@@ -131,13 +130,9 @@ const metricsOnboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: tct(
- "Then you'll be able to add metrics as [codeCounters:counters], [codeSets:sets], [codeDistribution:distributions], and [codeGauge:gauges]. These are available under the [codeNamespace:Sentry.metrics()] namespace.",
+ "Then you'll be able to add metrics as [code:counters], [code:sets], [code:distributions], and [code:gauges]. These are available under the [code:Sentry.metrics()] namespace.",
{
- codeCounters:
,
- codeSets:
,
- codeDistribution:
,
- codeGauge:
,
- codeNamespace:
,
+ code:
,
}
),
configurations: [
@@ -223,13 +218,9 @@ const onboarding: OnboardingConfig = {
language: 'dart',
code: getConfigureSnippet(params),
additionalInfo: tct(
- 'You can configure the [sentryDsn: SENTRY_DSN], [sentryRelease: SENTRY_RELEASE], [sentryDist: SENTRY_DIST], and [sentryEnv: SENTRY_ENVIRONMENT] via the Dart environment variables passing the [dartDefine: --dart-define] flag to the compiler, as noted in the code sample.',
+ 'You can configure the [code: SENTRY_DSN], [code: SENTRY_RELEASE], [code: SENTRY_DIST], and [code: SENTRY_ENVIRONMENT] via the Dart environment variables passing the [code: --dart-define] flag to the compiler, as noted in the code sample.',
{
- sentryDsn:
,
- sentryRelease:
,
- sentryDist:
,
- sentryEnv:
,
- dartDefine:
,
+ code:
,
}
),
},
diff --git a/static/app/gettingStartedDocs/deno/deno.spec.tsx b/static/app/gettingStartedDocs/deno/deno.spec.tsx
index f1ef4b757143e..7da88a5de9945 100644
--- a/static/app/gettingStartedDocs/deno/deno.spec.tsx
+++ b/static/app/gettingStartedDocs/deno/deno.spec.tsx
@@ -28,8 +28,5 @@ describe('deno onboarding docs', function () {
expect(
screen.queryByText(textWithMarkupMatcher(/tracesSampleRate: 1\.0,/))
).not.toBeInTheDocument();
-
- // Renders next steps
- expect(screen.getByRole('link', {name: 'Tracing'})).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/deno/deno.tsx b/static/app/gettingStartedDocs/deno/deno.tsx
index 700f25d5575ee..2303cf885e5a7 100644
--- a/static/app/gettingStartedDocs/deno/deno.tsx
+++ b/static/app/gettingStartedDocs/deno/deno.tsx
@@ -101,19 +101,7 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: params =>
- params.isPerformanceSelected
- ? []
- : [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/bun/tracing/',
- },
- ],
+ nextSteps: () => [],
};
const customMetricsOnboarding: OnboardingConfig = {
@@ -121,10 +109,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'You need a minimum version [codeVersion:7.91.0] of [codePackage:@sentry/deno].',
+ 'You need a minimum version [code:7.91.0] of [code:@sentry/deno].',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
),
configurations: getInstallConfig(),
@@ -148,13 +135,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: tct(
- "Then you'll be able to add metrics as [codeCounters:counters], [codeSets:sets], [codeDistribution:distributions], and [codeGauge:gauges]. These are available under the [codeNamespace:Sentry.metrics] namespace. This API is available in both renderer and main processes. Try out this example:",
+ "Then you'll be able to add metrics as [code:counters], [code:sets], [code:distributions], and [code:gauges]. These are available under the [code:Sentry.metrics] namespace. This API is available in both renderer and main processes. Try out this example:",
{
- codeCounters:
,
- codeSets:
,
- codeDistribution:
,
- codeGauge:
,
- codeNamespace:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/dotnet/aspnet.tsx b/static/app/gettingStartedDocs/dotnet/aspnet.tsx
index 726718e1f089d..394a6781c2be1 100644
--- a/static/app/gettingStartedDocs/dotnet/aspnet.tsx
+++ b/static/app/gettingStartedDocs/dotnet/aspnet.tsx
@@ -123,10 +123,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'You should [initCode:init] the Sentry SDK as soon as possible during your application load by adding Sentry to [globalCode:Global.asax.cs]:',
+ 'You should [code:init] the Sentry SDK as soon as possible during your application load by adding Sentry to [code:Global.asax.cs]:',
{
- initCode:
,
- globalCode:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx b/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx
index f6d5186be5696..963e25db4e37c 100644
--- a/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx
+++ b/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx
@@ -169,10 +169,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'Add Sentry to [programCode:Program.cs] through the [webHostCode:WebHostBuilder]:',
+ 'Add Sentry to [code:Program.cs] through the [code:WebHostBuilder]:',
{
- webHostCode:
,
- programCode:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/dotnet/dotnet.tsx b/static/app/gettingStartedDocs/dotnet/dotnet.tsx
index 8f773e4e7b481..084f27e5ac48b 100644
--- a/static/app/gettingStartedDocs/dotnet/dotnet.tsx
+++ b/static/app/gettingStartedDocs/dotnet/dotnet.tsx
@@ -201,10 +201,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'Initialize the SDK as early as possible. For example, call [sentrySdkCode:SentrySdk.Init] in your [programCode:Program.cs] file:',
+ 'Initialize the SDK as early as possible. For example, call [code:SentrySdk.Init] in your [code:Program.cs] file:',
{
- sentrySdkCode:
,
- programCode:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/dotnet/gcpfunctions.tsx b/static/app/gettingStartedDocs/dotnet/gcpfunctions.tsx
index 65c9739e072ae..e1eda418ff183 100644
--- a/static/app/gettingStartedDocs/dotnet/gcpfunctions.tsx
+++ b/static/app/gettingStartedDocs/dotnet/gcpfunctions.tsx
@@ -127,10 +127,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'Then, add Sentry to the [functionCode:Function] class through [functionStartupCode:FunctionsStartup]:',
+ 'Then, add Sentry to the [code:Function] class through [code:FunctionsStartup]:',
{
- functionCode:
,
- functionStartupCode:
,
+ code:
,
}
),
configurations: [
@@ -143,8 +142,8 @@ const onboarding: OnboardingConfig = {
description: (
{tct(
- "Additionally, you'll need to set up your [sentryCode:Sentry] settings on [appsettingsCode:appsettings.json]:",
- {sentryCode:
, appsettingsCode:
}
+ "Additionally, you'll need to set up your [code:Sentry] settings on [code:appsettings.json]:",
+ {code:
}
)}
),
diff --git a/static/app/gettingStartedDocs/dotnet/maui.tsx b/static/app/gettingStartedDocs/dotnet/maui.tsx
index 043f7732baa3a..cd8f27272f9e4 100644
--- a/static/app/gettingStartedDocs/dotnet/maui.tsx
+++ b/static/app/gettingStartedDocs/dotnet/maui.tsx
@@ -199,10 +199,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'Then add Sentry to [mauiProgram:MauiProgram.cs] through the [mauiAppBuilderCode:MauiAppBuilder]:',
+ 'Then add Sentry to [code:MauiProgram.cs] through the [code:MauiAppBuilder]:',
{
- mauiAppBuilderCode:
,
- mauiProgram:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/dotnet/xamarin.tsx b/static/app/gettingStartedDocs/dotnet/xamarin.tsx
index dffb8cf01de86..362d09e3c43d8 100644
--- a/static/app/gettingStartedDocs/dotnet/xamarin.tsx
+++ b/static/app/gettingStartedDocs/dotnet/xamarin.tsx
@@ -131,11 +131,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'Initialize the SDK as early as possible, like in the constructor of the [appCode:App], and Add [sentryXamarinFormsIntegrationCode:SentryXamarinFormsIntegration] as a new Integration to [sentryXamarinOptionsCode:SentryXamarinOptions] if you are going to run your app with Xamarin Forms:',
+ 'Initialize the SDK as early as possible, like in the constructor of the [code:App], and Add [code:SentryXamarinFormsIntegration] as a new Integration to [code:SentryXamarinOptions] if you are going to run your app with Xamarin Forms:',
{
- appCode:
,
- sentryXamarinFormsIntegrationCode:
,
- sentryXamarinOptionsCode:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/electron/electron.tsx b/static/app/gettingStartedDocs/electron/electron.tsx
index a2a10c7f3c0a7..70b1e3d35ab0a 100644
--- a/static/app/gettingStartedDocs/electron/electron.tsx
+++ b/static/app/gettingStartedDocs/electron/electron.tsx
@@ -77,12 +77,10 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- `You need to call [codeInit:Sentry.init] in the [codeMain:main] process and in every [codeRenderer:renderer] process you spawn.
+ `You need to call [code:Sentry.init] in the [code:main] process and in every [code:renderer] process you spawn.
For more details about configuring the Electron SDK [docsLink:click here].`,
{
- codeInit:
,
- codeMain:
,
- codeRenderer:
,
+ code:
,
docsLink: (
),
@@ -178,10 +176,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'You need a minimum version [codeVersion:4.17.0] of [codePackage:@sentry/electron].',
+ 'You need a minimum version [code:4.17.0] of [code:@sentry/electron].',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
),
configurations: getInstallConfig(),
@@ -205,13 +202,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: tct(
- "Then you'll be able to add metrics as [codeCounters:counters], [codeSets:sets], [codeDistribution:distributions], and [codeGauge:gauges]. These are available under the [codeNamespace:Sentry.metrics] namespace. This API is available in both renderer and main processes.",
+ "Then you'll be able to add metrics as [code:counters], [code:sets], [code:distributions], and [code:gauges]. These are available under the [code:Sentry.metrics] namespace. This API is available in both renderer and main processes.",
{
- codeCounters:
,
- codeSets:
,
- codeDistribution:
,
- codeGauge:
,
- codeNamespace:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/elixir/elixir.tsx b/static/app/gettingStartedDocs/elixir/elixir.tsx
index 85228efd8e100..689819e8801ca 100644
--- a/static/app/gettingStartedDocs/elixir/elixir.tsx
+++ b/static/app/gettingStartedDocs/elixir/elixir.tsx
@@ -70,8 +70,8 @@ const onboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'Edit your [mixCode:mix.exs] file to add it as a dependency and add the [sentryCode::sentry] package to your applications:',
- {sentryCode:
, mixCode:
}
+ 'Edit your [code:mix.exs] file to add it as a dependency and add the [code::sentry] package to your applications:',
+ {code:
}
),
configurations: [
{
@@ -111,10 +111,9 @@ const onboarding: OnboardingConfig = {
{tct(
- 'You can capture errors in Plug (and Phoenix) applications with [plugContext:Sentry.PlugContext] and [plugCapture:Sentry.PlugCapture]:',
+ 'You can capture errors in Plug (and Phoenix) applications with [code:Sentry.PlugContext] and [code:Sentry.PlugCapture]:',
{
- plugContext:
,
- plugCapture:
,
+ code:
,
}
)}
@@ -127,10 +126,9 @@ const onboarding: OnboardingConfig = {
},
],
additionalInfo: tct(
- '[sentryPlugContextCode:Sentry.PlugContext] gathers the contextual information for errors, and [sentryPlugCaptureCode:Sentry.PlugCapture] captures and sends any errors that occur in the Plug stack.',
+ '[code:Sentry.PlugContext] gathers the contextual information for errors, and [code:Sentry.PlugCapture] captures and sends any errors that occur in the Plug stack.',
{
- sentryPlugCaptureCode:
,
- sentryPlugContextCode:
,
+ code:
,
}
),
},
diff --git a/static/app/gettingStartedDocs/flutter/flutter.tsx b/static/app/gettingStartedDocs/flutter/flutter.tsx
index 2a3441b31e808..28defb7312558 100644
--- a/static/app/gettingStartedDocs/flutter/flutter.tsx
+++ b/static/app/gettingStartedDocs/flutter/flutter.tsx
@@ -1,3 +1,7 @@
+import {Fragment} from 'react';
+import styled from '@emotion/styled';
+
+import {Alert} from 'sentry/components/alert';
import ExternalLink from 'sentry/components/links/externalLink';
import Link from 'sentry/components/links/link';
import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step';
@@ -19,7 +23,6 @@ dependencies:
sentry_flutter: ^${getPackageVersion(params, 'sentry.dart.flutter', '7.8.0')}`;
const getConfigureSnippet = (params: Params) => `
-import 'package:flutter/widgets.dart';
import 'package:sentry_flutter/sentry_flutter.dart';
Future main() async {
@@ -41,37 +44,42 @@ Future main() async {
: ''
}
},
- appRunner: () => runApp(MyApp()),
+ appRunner: () => runApp(const MyApp()),
);
// or define SENTRY_DSN via Dart environment variable (--dart-define)
}`;
-const getVerifySnippet = () => `
-import 'package:sentry/sentry.dart';
+const configureAdditionalInfo = tct(
+ 'You can configure the [code: SENTRY_DSN], [code: SENTRY_RELEASE], [code: SENTRY_DIST], and [code: SENTRY_ENVIRONMENT] via the Dart environment variables passing the [code: --dart-define] flag to the compiler, as noted in the code sample.',
+ {
+ code:
,
+ }
+);
-try {
- aMethodThatMightFail();
-} catch (exception, stackTrace) {
- await Sentry.captureException(
- exception,
- stackTrace: stackTrace,
- );
-}`;
+const getVerifySnippet = () => `
+child: ElevatedButton(
+ onPressed: () {
+ throw Exception('This is test exception');
+ },
+ child: const Text('Verify Sentry Setup'),
+)
+`;
const getPerformanceSnippet = () => `
import 'package:sentry/sentry.dart';
-import { getPackageVersion } from 'sentry/utils/gettingStartedDocs/getPackageVersion';
-final transaction = Sentry.startTransaction('processOrderBatch()', 'task');
+void execute() async {
+ final transaction = Sentry.startTransaction('processOrderBatch()', 'task');
-try {
- await processOrderBatch(transaction);
-} catch (exception) {
- transaction.throwable = exception;
- transaction.status = SpanStatus.internalError();
-} finally {
- await transaction.finish();
+ try {
+ await processOrderBatch(transaction);
+ } catch (exception) {
+ transaction.throwable = exception;
+ transaction.status = const SpanStatus.internalError();
+ } finally {
+ await transaction.finish();
+ }
}
Future processOrderBatch(ISentrySpan span) async {
@@ -82,7 +90,7 @@ Future processOrderBatch(ISentrySpan span) async {
// omitted code
} catch (exception) {
innerSpan.throwable = exception;
- innerSpan.status = SpanStatus.notFound();
+ innerSpan.status = const SpanStatus.notFound();
} finally {
await innerSpan.finish();
}
@@ -106,10 +114,9 @@ const metricsOnboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'You need Sentry Flutter SDK version [codeVersion:7.19.0] or higher. Learn more about installation methods in our [docsLink:full documentation].',
+ 'You need Sentry Flutter SDK version [code:7.19.0] or higher. Learn more about installation methods in our [docsLink:full documentation].',
{
- package:
,
- codeVersion:
,
+ code:
,
docsLink: ,
}
),
@@ -146,13 +153,9 @@ const metricsOnboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: tct(
- "Then you'll be able to add metrics as [codeCounters:counters], [codeSets:sets], [codeDistribution:distributions], and [codeGauge:gauges]. These are available under the [codeNamespace:Sentry.metrics()] namespace.",
+ "Then you'll be able to add metrics as [code:counters], [code:sets], [code:distributions], and [code:gauges]. These are available under the [code:Sentry.metrics()] namespace.",
{
- codeCounters:
,
- codeSets:
,
- codeDistribution:
,
- codeGauge:
,
- codeNamespace:
,
+ code:
,
}
),
configurations: [
@@ -220,9 +223,16 @@ const onboarding: OnboardingConfig = {
),
configurations: [
{
- language: 'yml',
- partialLoading: params.sourcePackageRegistries?.isLoading,
- code: getInstallSnippet(params),
+ code: [
+ {
+ label: 'YAML',
+ value: 'yaml',
+ language: 'yaml',
+ filename: 'pubspec.yaml',
+ partialLoading: params.sourcePackageRegistries?.isLoading,
+ code: getInstallSnippet(params),
+ },
+ ],
},
],
},
@@ -244,17 +254,26 @@ const onboarding: OnboardingConfig = {
]
: []),
{
- language: 'dart',
- code: getConfigureSnippet(params),
- additionalInfo: tct(
- 'You can configure the [sentryDsn: SENTRY_DSN], [sentryRelease: SENTRY_RELEASE], [sentryDist: SENTRY_DIST], and [sentryEnv: SENTRY_ENVIRONMENT] via the Dart environment variables passing the [dartDefine: --dart-define] flag to the compiler, as noted in the code sample.',
+ code: [
{
- sentryDsn:
,
- sentryRelease:
,
- sentryDist:
,
- sentryEnv:
,
- dartDefine:
,
- }
+ label: 'Dart',
+ value: 'dart',
+ language: 'dart',
+ filename: 'main.dart',
+ code: getConfigureSnippet(params),
+ },
+ ],
+ additionalInfo: params.isPerformanceSelected ? (
+
+ {configureAdditionalInfo}
+
+ {t(
+ 'To monitor performance, you need to add extra instrumentation as described in the Tracing section below.'
+ )}
+
+
+ ) : (
+ configureAdditionalInfo
),
},
],
@@ -264,18 +283,18 @@ const onboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: t(
- 'Create an intentional error, so you can test that everything is working:'
+ 'Create an intentional error, so you can test that everything is working. In the example below, pressing the button will throw an exception:'
),
configurations: [
{
- language: 'dart',
- code: getVerifySnippet(),
- additionalInfo: tct(
- "If you're new to Sentry, use the email alert to access your account and complete a product tour.[break] If you're an existing user and have disabled alerts, you won't receive this email.",
+ code: [
{
- break: ,
- }
- ),
+ label: 'Dart',
+ value: 'dart',
+ language: 'dart',
+ code: getVerifySnippet(),
+ },
+ ],
},
],
},
@@ -288,10 +307,16 @@ const onboarding: OnboardingConfig = {
),
configurations: [
{
- language: 'dart',
- code: getPerformanceSnippet(),
+ code: [
+ {
+ label: 'Dart',
+ value: 'dart',
+ language: 'dart',
+ code: getPerformanceSnippet(),
+ },
+ ],
additionalInfo: tct(
- 'To learn more about the API and automatic instrumentations, check out the [perfDocs: performance documentation].',
+ 'To learn more about the API and automatic instrumentations, check out the [perfDocs: tracing documentation].',
{
perfDocs: (
@@ -330,3 +355,7 @@ const docs: Docs = {
};
export default docs;
+
+const AlertWithoutMarginBottom = styled(Alert)`
+ margin-bottom: 0;
+`;
diff --git a/static/app/gettingStartedDocs/go/echo.tsx b/static/app/gettingStartedDocs/go/echo.tsx
index 301e905d8febe..173b604def975 100644
--- a/static/app/gettingStartedDocs/go/echo.tsx
+++ b/static/app/gettingStartedDocs/go/echo.tsx
@@ -156,8 +156,8 @@ const onboarding: OnboardingConfig = {
{t('Options')}
{tct(
- '[sentryEchoCode:sentryecho] accepts a struct of [optionsCode:Options] that allows you to configure how the handler will behave.',
- {sentryEchoCode:
, optionsCode:
}
+ '[code:sentryecho] accepts a struct of [code:Options] that allows you to configure how the handler will behave.',
+ {code:
}
)}
{t('Currently it respects 3 options:')}
@@ -174,23 +174,19 @@ const onboarding: OnboardingConfig = {
{tct(
- "[sentryEchoCode:sentryecho] attaches an instance of [sentryHubLink:*sentry.Hub] to the [echoContextCode:echo.Context], which makes it available throughout the rest of the request's lifetime. You can access it by using the [getHubFromContextCode:sentryecho.GetHubFromContext()] method on the context itself in any of your proceeding middleware and routes. And it should be used instead of the global [captureMessageCode:sentry.CaptureMessage], [captureExceptionCode:sentry.CaptureException] or any other calls, as it keeps the separation of data between the requests.",
+ "[code:sentryecho] attaches an instance of [sentryHubLink:*sentry.Hub] to the [code:echo.Context], which makes it available throughout the rest of the request's lifetime. You can access it by using the [code:sentryecho.GetHubFromContext()] method on the context itself in any of your proceeding middleware and routes. And it should be used instead of the global [code:sentry.CaptureMessage], [code:sentry.CaptureException] or any other calls, as it keeps the separation of data between the requests.",
{
- sentryEchoCode:
,
+ code:
,
sentryHubLink: (
),
- echoContextCode:
,
- getHubFromContextCode:
,
- captureMessageCode:
,
- captureExceptionCode:
,
}
)}
{tct(
- "Keep in mind that [sentryHubCode:*sentry.Hub] won't be available in middleware attached before [sentryEchoCode:sentryecho]!",
- {sentryEchoCode:
, sentryHubCode:
}
+ "Keep in mind that [code:*sentry.Hub] won't be available in middleware attached before [code:sentryecho]!",
+ {code:
}
)}
diff --git a/static/app/gettingStartedDocs/go/fasthttp.tsx b/static/app/gettingStartedDocs/go/fasthttp.tsx
index b95cf03c68b3d..1c66d8fa9e4cb 100644
--- a/static/app/gettingStartedDocs/go/fasthttp.tsx
+++ b/static/app/gettingStartedDocs/go/fasthttp.tsx
@@ -161,8 +161,8 @@ const onboarding: OnboardingConfig = {
{t('Options')}
{tct(
- '[sentryfasthttpCode:sentryfasthttp] accepts a struct of [optionsCode:Options] that allows you to configure how the handler will behave.',
- {sentryfasthttpCode:
, optionsCode:
}
+ '[code:sentryfasthttp] accepts a struct of [code:Options] that allows you to configure how the handler will behave.',
+ {code:
}
)}
{t('Currently it respects 3 options:')}
@@ -179,22 +179,19 @@ const onboarding: OnboardingConfig = {
{tct(
- "[sentryfasthttpCode:sentryfasthttp] attaches an instance of [sentryHubLink:*sentry.Hub] to the request's context, which makes it available throughout the rest of the request's lifetime. You can access it by using the [getHubFromContextCode:sentryfasthttp.GetHubFromContext()] method on the context itself in any of your proceeding middleware and routes. And it should be used instead of the global [captureMessageCode:sentry.CaptureMessage], [captureExceptionCode:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
+ "[code:sentryfasthttp] attaches an instance of [sentryHubLink:*sentry.Hub] to the request's context, which makes it available throughout the rest of the request's lifetime. You can access it by using the [code:sentryfasthttp.GetHubFromContext()] method on the context itself in any of your proceeding middleware and routes. And it should be used instead of the global [code:sentry.CaptureMessage], [code:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
{
- sentryfasthttpCode:
,
sentryHubLink: (
),
- getHubFromContextCode:
,
- captureMessageCode:
,
- captureExceptionCode:
,
+ code:
,
}
)}
{tct(
- "Keep in mind that [sentryHubCode:*sentry.Hub] won't be available in middleware attached before [sentryfasthttpCode:sentryfasthttp]!",
- {sentryfasthttpCode:
, sentryHubCode:
}
+ "Keep in mind that [code:*sentry.Hub] won't be available in middleware attached before [code:sentryfasthttp]!",
+ {code:
}
)}
diff --git a/static/app/gettingStartedDocs/go/fiber.tsx b/static/app/gettingStartedDocs/go/fiber.tsx
index 7701136e3f0e3..e2a48887e3700 100644
--- a/static/app/gettingStartedDocs/go/fiber.tsx
+++ b/static/app/gettingStartedDocs/go/fiber.tsx
@@ -166,8 +166,8 @@ const onboarding: OnboardingConfig = {
{t('Options')}
{tct(
- '[sentryFiberCode:sentryfiber] accepts a struct of [optionsCode:Options] that allows you to configure how the handler will behave.',
- {sentryFiberCode:
, optionsCode:
}
+ '[code:sentryfiber] accepts a struct of [code:Options] that allows you to configure how the handler will behave.',
+ {code:
}
)}
{t('Currently it respects 3 options:')}
@@ -184,23 +184,19 @@ const onboarding: OnboardingConfig = {
{tct(
- "[sentryFiberCode:sentryfiber] attaches an instance of [sentryHubLink:*sentry.Hub] to the [fiberContextCode:*fiber.Ctx], which makes it available throughout the rest of the request's lifetime. You can access it by using the [getHubFromContextCode:sentryfiber.GetHubFromContext()] method on the context itself in any of your proceeding middleware and routes. And it should be used instead of the global [captureMessageCode:sentry.CaptureMessage], [captureExceptionCode:sentry.CaptureException] or any other calls, as it keeps the separation of data between the requests.",
+ "[code:sentryfiber] attaches an instance of [sentryHubLink:*sentry.Hub] to the [code:*fiber.Ctx], which makes it available throughout the rest of the request's lifetime. You can access it by using the [code:sentryfiber.GetHubFromContext()] method on the context itself in any of your proceeding middleware and routes. And it should be used instead of the global [code:sentry.CaptureMessage], [code:sentry.CaptureException] or any other calls, as it keeps the separation of data between the requests.",
{
- sentryFiberCode:
,
+ code:
,
sentryHubLink: (
),
- fiberContextCode:
,
- getHubFromContextCode:
,
- captureMessageCode:
,
- captureExceptionCode:
,
}
)}
{tct(
- "Keep in mind that [sentryHubCode:*sentry.Hub] won't be available in middleware attached before [sentryFiberCode:sentryfiber]!",
- {sentryFiberCode:
, sentryHubCode:
}
+ "Keep in mind that [code:*sentry.Hub] won't be available in middleware attached before [code:sentryfiber]!",
+ {code:
}
)}
diff --git a/static/app/gettingStartedDocs/go/gin.tsx b/static/app/gettingStartedDocs/go/gin.tsx
index 66c6ce3ab574d..3fc2d7edbfc01 100644
--- a/static/app/gettingStartedDocs/go/gin.tsx
+++ b/static/app/gettingStartedDocs/go/gin.tsx
@@ -148,8 +148,8 @@ const onboarding: OnboardingConfig = {
{t('Options')}
{tct(
- '[sentryGinCode:sentrygin] accepts a struct of [optionsCode:Options] that allows you to configure how the handler will behave.',
- {sentryGinCode:
, optionsCode:
}
+ '[code:sentrygin] accepts a struct of [code:Options] that allows you to configure how the handler will behave.',
+ {code:
}
)}
{t('Currently it respects 3 options:')}
@@ -166,23 +166,19 @@ const onboarding: OnboardingConfig = {
{tct(
- "[sentryGinCode:sentrygin] attaches an instance of [sentryHubLink:*sentry.Hub] to the [ginContextCode:*gin.Context], which makes it available throughout the rest of the request's lifetime. You can access it by using the [getHubFromContextCode:sentrygin.GetHubFromContext()] method on the context itself in any of your proceeding middleware and routes. And it should be used instead of the global [captureMessageCode:sentry.CaptureMessage], [captureExceptionCode:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
+ "[code:sentrygin] attaches an instance of [sentryHubLink:*sentry.Hub] to the [code:*gin.Context], which makes it available throughout the rest of the request's lifetime. You can access it by using the [code:sentrygin.GetHubFromContext()] method on the context itself in any of your proceeding middleware and routes. And it should be used instead of the global [code:sentry.CaptureMessage], [code:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
{
- sentryGinCode:
,
+ code:
,
sentryHubLink: (
),
- ginContextCode:
,
- getHubFromContextCode:
,
- captureMessageCode:
,
- captureExceptionCode:
,
}
)}
{tct(
- "Keep in mind that [sentryHubCode:*sentry.Hub] won't be available in middleware attached before [sentryGinCode:sentrygin]!",
- {sentryGinCode:
, sentryHubCode:
}
+ "Keep in mind that [code:*sentry.Hub] won't be available in middleware attached before [code:sentrygin]!",
+ {code:
}
)}
diff --git a/static/app/gettingStartedDocs/go/http.tsx b/static/app/gettingStartedDocs/go/http.tsx
index 89d939a31e352..dba8624e4517e 100644
--- a/static/app/gettingStartedDocs/go/http.tsx
+++ b/static/app/gettingStartedDocs/go/http.tsx
@@ -156,8 +156,8 @@ const onboarding: OnboardingConfig = {
{t('Options')}
{tct(
- '[sentryHttpCode:sentryhttp] accepts a struct of [optionsCode:Options] that allows you to configure how the handler will behave.',
- {sentryHttpCode:
, optionsCode:
}
+ '[code:sentryhttp] accepts a struct of [code:Options] that allows you to configure how the handler will behave.',
+ {code:
}
)}
{t('Currently it respects 3 options:')}
@@ -174,22 +174,19 @@ const onboarding: OnboardingConfig = {
{tct(
- "[sentryHttpCode:sentryhttp] attaches an instance of [sentryHubLink:*sentry.Hub] to the request's context, which makes it available throughout the rest of the request's lifetime. You can access it by using the [getHubFromContextCode:sentry.GetHubFromContext()] method on the request itself in any of your proceeding middleware and routes. And it should be used instead of the global [captureMessageCode:sentry.CaptureMessage], [captureExceptionCode:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
+ "[code:sentryhttp] attaches an instance of [sentryHubLink:*sentry.Hub] to the request's context, which makes it available throughout the rest of the request's lifetime. You can access it by using the [code:sentry.GetHubFromContext()] method on the request itself in any of your proceeding middleware and routes. And it should be used instead of the global [code:sentry.CaptureMessage], [code:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
{
- sentryHttpCode:
,
+ code:
,
sentryHubLink: (
),
- getHubFromContextCode:
,
- captureMessageCode:
,
- captureExceptionCode:
,
}
)}
{tct(
- "Keep in mind that [sentryHubCode:*sentry.Hub] won't be available in middleware attached before [sentryHttpCode:sentryhttp]!",
- {sentryHttpCode:
, sentryHubCode:
}
+ "Keep in mind that [code:*sentry.Hub] won't be available in middleware attached before [code:sentryhttp]!",
+ {code:
}
)}
diff --git a/static/app/gettingStartedDocs/go/iris.tsx b/static/app/gettingStartedDocs/go/iris.tsx
index 13e09682cb1a5..0e6a34e74b9fe 100644
--- a/static/app/gettingStartedDocs/go/iris.tsx
+++ b/static/app/gettingStartedDocs/go/iris.tsx
@@ -145,8 +145,8 @@ const onboarding: OnboardingConfig = {
{t('Options')}
{tct(
- '[sentryirisCode:sentryiris] accepts a struct of [optionsCode:Options] that allows you to configure how the handler will behave.',
- {sentryirisCode:
, optionsCode:
}
+ '[code:sentryiris] accepts a struct of [code:Options] that allows you to configure how the handler will behave.',
+ {code:
}
)}
{t('Currently it respects 3 options:')}
@@ -163,23 +163,19 @@ const onboarding: OnboardingConfig = {
{tct(
- "[sentryirisCode:sentryiris] attaches an instance of [sentryHubLink:*sentry.Hub] to the [irisContextCode:iris.Context], which makes it available throughout the rest of the request's lifetime. You can access it by using the [getHubFromContextCode:sentryiris.GetHubFromContext()] method on the context itself in any of your proceeding middleware and routes. And it should be used instead of the global [captureMessageCode:sentry.CaptureMessage], [captureExceptionCode:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
+ "[code:sentryiris] attaches an instance of [sentryHubLink:*sentry.Hub] to the [code:iris.Context], which makes it available throughout the rest of the request's lifetime. You can access it by using the [code:sentryiris.GetHubFromContext()] method on the context itself in any of your proceeding middleware and routes. And it should be used instead of the global [code:sentry.CaptureMessage], [code:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
{
- sentryirisCode:
,
+ code:
,
sentryHubLink: (
),
- irisContextCode:
,
- getHubFromContextCode:
,
- captureMessageCode:
,
- captureExceptionCode:
,
}
)}
{tct(
- "Keep in mind that [sentryHubCode:*sentry.Hub] won't be available in middleware attached before [sentryirisCode:sentryiris]!",
- {sentryirisCode:
, sentryHubCode:
}
+ "Keep in mind that [code:*sentry.Hub] won't be available in middleware attached before [code:sentryiris]!",
+ {code:
}
)}
diff --git a/static/app/gettingStartedDocs/go/martini.tsx b/static/app/gettingStartedDocs/go/martini.tsx
index 590ab9c785ff3..63b00ea361dd7 100644
--- a/static/app/gettingStartedDocs/go/martini.tsx
+++ b/static/app/gettingStartedDocs/go/martini.tsx
@@ -143,8 +143,8 @@ const onboarding: OnboardingConfig = {
{t('Options')}
{tct(
- '[sentryMartiniCode:sentrymartini] accepts a struct of [optionsCode:Options] that allows you to configure how the handler will behave.',
- {sentryMartiniCode:
, optionsCode:
}
+ '[code:sentrymartini] accepts a struct of [code:Options] that allows you to configure how the handler will behave.',
+ {code:
}
)}
{t('Currently it respects 3 options:')}
@@ -161,22 +161,19 @@ const onboarding: OnboardingConfig = {
{tct(
- "[sentryMartiniCode:sentrymartini] maps an instance of [sentryHubLink:*sentry.Hub] as one of the services available throughout the rest of the request's lifetime. You can access it by providing a hub [sentryHubCode:*sentry.Hub] parameter in any of your proceeding middleware and routes. And it should be used instead of the global [captureMessageCode:sentry.CaptureMessage], [captureExceptionCode:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
+ "[code:sentrymartini] maps an instance of [sentryHubLink:*sentry.Hub] as one of the services available throughout the rest of the request's lifetime. You can access it by providing a hub [code:*sentry.Hub] parameter in any of your proceeding middleware and routes. And it should be used instead of the global [code:sentry.CaptureMessage], [code:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
{
- sentryMartiniCode:
,
+ code:
,
sentryHubLink: (
),
- sentryHubCode:
,
- captureMessageCode:
,
- captureExceptionCode:
,
}
)}
{tct(
- "Keep in mind that [sentryHubCode:*sentry.Hub] won't be available in middleware attached before [sentryMartiniCode:sentrymartini]!",
- {sentryMartiniCode:
, sentryHubCode:
}
+ "Keep in mind that [code:*sentry.Hub] won't be available in middleware attached before [code:sentrymartini]!",
+ {code:
}
)}
diff --git a/static/app/gettingStartedDocs/go/negroni.tsx b/static/app/gettingStartedDocs/go/negroni.tsx
index beec048926688..ac3c384214311 100644
--- a/static/app/gettingStartedDocs/go/negroni.tsx
+++ b/static/app/gettingStartedDocs/go/negroni.tsx
@@ -170,8 +170,8 @@ const onboarding: OnboardingConfig = {
{t('Options')}
{tct(
- '[sentryNegroniCode:sentrynegroni] accepts a struct of [optionsCode:Options] that allows you to configure how the handler will behave.',
- {sentryNegroniCode:
, optionsCode:
}
+ '[code:sentrynegroni] accepts a struct of [code:Options] that allows you to configure how the handler will behave.',
+ {code:
}
)}
{t('Currently it respects 3 options:')}
@@ -188,22 +188,19 @@ const onboarding: OnboardingConfig = {
{tct(
- "[sentryNegroniCode:sentrynegroni] attaches an instance of [sentryHubLink:*sentry.Hub] to the request's context, which makes it available throughout the rest of the request's lifetime. You can access it by using the [getHubFromContextCode:sentry.GetHubFromContext()] method on the request itself in any of your proceeding middleware and routes. And it should be used instead of the global [captureMessageCode:sentry.CaptureMessage], [captureExceptionCode:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
+ "[code:sentrynegroni] attaches an instance of [sentryHubLink:*sentry.Hub] to the request's context, which makes it available throughout the rest of the request's lifetime. You can access it by using the [code:sentry.GetHubFromContext()] method on the request itself in any of your proceeding middleware and routes. And it should be used instead of the global [code:sentry.CaptureMessage], [code:sentry.CaptureException], or any other calls, as it keeps the separation of data between the requests.",
{
- sentryNegroniCode:
,
+ code:
,
sentryHubLink: (
),
- getHubFromContextCode:
,
- captureMessageCode:
,
- captureExceptionCode:
,
}
)}
{tct(
- "Keep in mind that [sentryHubCode:*sentry.Hub] won't be available in middleware attached before [sentryNegroniCode:sentrynegroni]!",
- {sentryNegroniCode:
, sentryHubCode:
}
+ "Keep in mind that [code:*sentry.Hub] won't be available in middleware attached before [code:sentrynegroni]!",
+ {code:
}
)}
@@ -232,10 +229,9 @@ const onboarding: OnboardingConfig = {
{tct(
- "Negroni provides an option called [panicHandlerFuncCode:PanicHandlerFunc], which lets you 'plug-in' to its default [recoveryCode:Recovery] middleware.",
+ "Negroni provides an option called [code:PanicHandlerFunc], which lets you 'plug-in' to its default [code:Recovery] middleware.",
{
- panicHandlerFuncCode:
,
- recoveryCode:
,
+ code:
,
}
)}
diff --git a/static/app/gettingStartedDocs/java/java.tsx b/static/app/gettingStartedDocs/java/java.tsx
index 8b78d6cadc16d..f236144f27b65 100644
--- a/static/app/gettingStartedDocs/java/java.tsx
+++ b/static/app/gettingStartedDocs/java/java.tsx
@@ -286,14 +286,6 @@ const onboarding: OnboardingConfig = {
description: t('Check out our sample applications.'),
link: 'https://github.com/getsentry/sentry-java/tree/main/sentry-samples',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Stay ahead of latency issues and trace every slow transaction to a poor-performing API call or database query.'
- ),
- link: 'https://docs.sentry.io/platforms/java/tracing/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/java/log4j2.tsx b/static/app/gettingStartedDocs/java/log4j2.tsx
index 5b870971febcb..61b6c97ab2b87 100644
--- a/static/app/gettingStartedDocs/java/log4j2.tsx
+++ b/static/app/gettingStartedDocs/java/log4j2.tsx
@@ -243,11 +243,9 @@ const onboarding: OnboardingConfig = {
{
language: 'xml',
description: tct(
- 'The following example using the [log4j2Code:log4j2.xml] format to configure a [sentryConsoleAppenderCode:ConsoleAppender] that logs to standard out at the INFO level, and a [sentryAppenderCode:SentryAppender] that logs to the Sentry server at the ERROR level.',
+ 'The following example using the [code:log4j2.xml] format to configure a [code:ConsoleAppender] that logs to standard out at the INFO level, and a [code:SentryAppender] that logs to the Sentry server at the ERROR level.',
{
- log4j2Code:
,
- sentryConsoleAppenderCode:
,
- sentryAppenderCode:
,
+ code:
,
}
),
code: getConsoleAppenderSnippet(params),
diff --git a/static/app/gettingStartedDocs/java/spring-boot.tsx b/static/app/gettingStartedDocs/java/spring-boot.tsx
index 867223524b57c..7acd0784ea000 100644
--- a/static/app/gettingStartedDocs/java/spring-boot.tsx
+++ b/static/app/gettingStartedDocs/java/spring-boot.tsx
@@ -216,10 +216,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'Open up [applicationPropertiesCode:src/main/application.properties] (or [applicationYmlCode:src/main/application.yml]) and configure the DSN, and any other settings you need:',
+ 'Open up [code:src/main/application.properties] (or [code:src/main/application.yml]) and configure the DSN, and any other settings you need:',
{
- applicationPropertiesCode:
,
- applicationYmlCode:
,
+ code:
,
}
),
configurations: [
@@ -289,14 +288,6 @@ const onboarding: OnboardingConfig = {
description: t('Check out our sample applications.'),
link: 'https://github.com/getsentry/sentry-java/tree/main/sentry-samples',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Stay ahead of latency issues and trace every slow transaction to a poor-performing API call or database query.'
- ),
- link: 'https://docs.sentry.io/platforms/java/guides/spring-boot/tracing/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/java/spring.tsx b/static/app/gettingStartedDocs/java/spring.tsx
index ef8641dd48633..024aab936db9d 100644
--- a/static/app/gettingStartedDocs/java/spring.tsx
+++ b/static/app/gettingStartedDocs/java/spring.tsx
@@ -361,14 +361,6 @@ const onboarding: OnboardingConfig = {
description: t('Check out our sample applications.'),
link: 'https://github.com/getsentry/sentry-java/tree/main/sentry-samples',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Stay ahead of latency issues and trace every slow transaction to a poor-performing API call or database query.'
- ),
- link: 'https://docs.sentry.io/platforms/java/guides/spring/tracing/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/javascript/angular.tsx b/static/app/gettingStartedDocs/javascript/angular.tsx
index 9745e2d4b671d..a79d23c9743cd 100644
--- a/static/app/gettingStartedDocs/javascript/angular.tsx
+++ b/static/app/gettingStartedDocs/javascript/angular.tsx
@@ -1,3 +1,5 @@
+import {Fragment} from 'react';
+
import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout';
import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout';
import TracePropagationMessage from 'sentry/components/onboarding/gettingStartedDoc/replay/tracePropagationMessage';
@@ -29,7 +31,6 @@ import {
getReplayConfigureDescription,
getReplayVerifyStep,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding';
-import {ProductSolution} from 'sentry/components/onboarding/productSelection';
import {t, tct} from 'sentry/locale';
export enum AngularConfigType {
@@ -246,30 +247,6 @@ function getVerifyConfiguration(): Configuration {
};
}
-const getNextStep = (
- params: Params
-): {
- description: string;
- id: string;
- link: string;
- name: string;
-}[] => {
- let nextStepDocs = [...nextSteps];
-
- if (params.isPerformanceSelected) {
- nextStepDocs = nextStepDocs.filter(
- step => step.id !== ProductSolution.PERFORMANCE_MONITORING
- );
- }
-
- if (params.isReplaySelected) {
- nextStepDocs = nextStepDocs.filter(
- step => step.id !== ProductSolution.SESSION_REPLAY
- );
- }
- return nextStepDocs;
-};
-
const getInstallConfig = () => [
{
language: 'bash',
@@ -297,7 +274,19 @@ const getInstallConfig = () => [
];
const onboarding: OnboardingConfig = {
- introduction: MaybeBrowserProfilingBetaWarning,
+ introduction: params => (
+
+
+
+ {tct(
+ 'In this quick guide you’ll use [strong:npm], [strong:yarn] or [strong:pnpm] to set up:',
+ {
+ strong: ,
+ }
+ )}
+
+
+ ),
install: () => [
{
type: StepType.INSTALL,
@@ -360,43 +349,26 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: (params: Params) => getNextStep(params),
+ nextSteps: () => [
+ {
+ id: 'angular-features',
+ name: t('Angular Features'),
+ description: t(
+ 'Learn about our first class integration with the Angular framework.'
+ ),
+ link: 'https://docs.sentry.io/platforms/javascript/guides/angular/features/',
+ },
+ ],
};
-export const nextSteps = [
- {
- id: 'angular-features',
- name: t('Angular Features'),
- description: t('Learn about our first class integration with the Angular framework.'),
- link: 'https://docs.sentry.io/platforms/javascript/guides/angular/features/',
- },
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/angular/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/angular/session-replay/',
- },
-];
-
const replayOnboarding: OnboardingConfig = {
install: () => [
{
type: StepType.INSTALL,
description: tct(
- 'In order to use Session Replay, you will need version 7.27.0 of [codeAngular:@sentry/angular] at minimum. You do not need to install any additional packages.',
+ 'In order to use Session Replay, you will need version 7.27.0 of [code:@sentry/angular] at minimum. You do not need to install any additional packages.',
{
- codeAngular:
,
- codeIvy:
,
+ code:
,
}
),
configurations: getInstallConfig(),
@@ -432,10 +404,9 @@ const feedbackOnboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'For the User Feedback integration to work, you must have the Sentry browser SDK package, or an equivalent framework SDK (e.g. [codeAngular:@sentry/angular]) installed, minimum version 7.85.0.',
+ 'For the User Feedback integration to work, you must have the Sentry browser SDK package, or an equivalent framework SDK (e.g. [code:@sentry/angular]) installed, minimum version 7.85.0.',
{
- codeAngular:
,
- codeIvy:
,
+ code:
,
}
),
configurations: getInstallConfig(),
diff --git a/static/app/gettingStartedDocs/javascript/astro.tsx b/static/app/gettingStartedDocs/javascript/astro.tsx
index 1861a7360c990..1c20b807619d0 100644
--- a/static/app/gettingStartedDocs/javascript/astro.tsx
+++ b/static/app/gettingStartedDocs/javascript/astro.tsx
@@ -72,10 +72,9 @@ const getInstallConfig = () => [
{
type: StepType.INSTALL,
description: tct(
- 'Install the [sentryAstroPkg:@sentry/astro] package with the [astroCli:astro] CLI:',
+ 'Install the [code:@sentry/astro] package with the [code:astro] CLI:',
{
- sentryAstroPkg:
,
- astroCli:
,
+ code:
,
}
),
configurations: [
@@ -206,22 +205,6 @@ const onboarding: OnboardingConfig = {
),
link: 'https://docs.sentry.io/platforms/javascript/guides/astro/manual-setup/',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/astro/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/astro/session-replay/',
- },
],
};
@@ -235,7 +218,7 @@ const replayOnboarding: OnboardingConfig = {
],
configure: (params: Params) => [
{
- type: StepType.CONFIGURE,
+ title: 'Configure Session Replay (Optional)',
description: tct(
'There are several privacy and sampling options available. Learn more about configuring Session Replay by reading the [link:configuration docs].',
{
@@ -314,7 +297,7 @@ import * as Sentry from "@sentry/astro";`,
},
],
additionalInfo: ,
- isOptional: true,
+ collapsible: true,
},
],
verify: getReplayVerifyStep(),
diff --git a/static/app/gettingStartedDocs/javascript/ember.tsx b/static/app/gettingStartedDocs/javascript/ember.tsx
index f290d7a901d42..61174ee899e45 100644
--- a/static/app/gettingStartedDocs/javascript/ember.tsx
+++ b/static/app/gettingStartedDocs/javascript/ember.tsx
@@ -1,3 +1,5 @@
+import {Fragment} from 'react';
+
import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout';
import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout';
import TracePropagationMessage from 'sentry/components/onboarding/gettingStartedDoc/replay/tracePropagationMessage';
@@ -107,7 +109,16 @@ const getVerifyEmberSnippet = () => `
myUndefinedFunction();`;
const onboarding: OnboardingConfig = {
- introduction: MaybeBrowserProfilingBetaWarning,
+ introduction: params => (
+
+
+
+ {tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ })}
+
+
+ ),
install: () => [
{
type: StepType.INSTALL,
@@ -121,10 +132,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'You should [initCode:init] the Sentry SDK as soon as possible during your application load up in [appCode:app.js], before initializing Ember:',
+ 'You should [code:init] the Sentry SDK as soon as possible during your application load up in [code:app.js], before initializing Ember:',
{
- initCode:
,
- appCode:
,
+ code:
,
}
),
configurations: [
@@ -168,24 +178,7 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: () => [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/ember/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/ember/session-replay/',
- },
- ],
+ nextSteps: () => [],
};
const replayOnboarding: OnboardingConfig = {
diff --git a/static/app/gettingStartedDocs/javascript/gatsby.tsx b/static/app/gettingStartedDocs/javascript/gatsby.tsx
index 469fbf0f224eb..30643c39fb6a1 100644
--- a/static/app/gettingStartedDocs/javascript/gatsby.tsx
+++ b/static/app/gettingStartedDocs/javascript/gatsby.tsx
@@ -102,8 +102,8 @@ const getConfigureStep = (params: Params) => {
configurations: [
{
description: tct(
- 'Register the [codeSentry@sentry/gatsby] plugin in your Gatsby configuration file (typically [codeGatsby:gatsby-config.js]).',
- {codeSentry:
, codeGatsby:
}
+ 'Register the [code:Sentry@sentry/gatsby] plugin in your Gatsby configuration file (typically [code:gatsby-config.js]).',
+ {code:
}
),
code: [
{
@@ -159,15 +159,23 @@ const getInstallConfig = () => [
];
const onboarding: OnboardingConfig = {
- introduction: MaybeBrowserProfilingBetaWarning,
+ introduction: params => (
+
+
+
+ {tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ })}
+
+
+ ),
install: () => [
{
type: StepType.INSTALL,
description: tct(
- 'Add the Sentry SDK as a dependency using [codeNpm:npm] or [codeYarn:yarn]:',
+ 'Add the Sentry SDK as a dependency using [code:npm] or [code:yarn]:',
{
- codeYarn:
,
- codeNpm:
,
+ code:
,
}
),
configurations: getInstallConfig(),
@@ -200,24 +208,7 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: () => [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/gatsby/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/gatsby/session-replay/',
- },
- ],
+ nextSteps: () => [],
};
const replayOnboarding: OnboardingConfig = {
@@ -244,12 +235,9 @@ const replayOnboarding: OnboardingConfig = {
{tct(
- 'Note: If [codeGatsby:gatsby-config.js] has any settings for the [codeSentry:@sentry/gatsby] plugin, they need to be moved into [codeConfig:sentry.config.js]. The [codeGatsby:gatsby-config.js] file does not support non-serializable options, like [codeNew:new Replay()].',
+ 'Note: If [code:gatsby-config.js] has any settings for the [code:@sentry/gatsby] plugin, they need to be moved into [code:sentry.config.js]. The [code:gatsby-config.js] file does not support non-serializable options, like [code:new Replay()].',
{
- codeGatsby:
,
- codeSentry:
,
- codeConfig:
,
- codeNew:
,
+ code:
,
}
)}
diff --git a/static/app/gettingStartedDocs/javascript/javascript.tsx b/static/app/gettingStartedDocs/javascript/javascript.tsx
index 4b3b2bc5a6d5c..baa5e8b01805f 100644
--- a/static/app/gettingStartedDocs/javascript/javascript.tsx
+++ b/static/app/gettingStartedDocs/javascript/javascript.tsx
@@ -197,11 +197,11 @@ const loaderScriptOnboarding: OnboardingConfig = {
],
configure: params => [
{
- type: StepType.CONFIGURE,
+ title: t('Configure SDK (Optional)'),
description: t(
"Initialize Sentry as early as possible in your application's lifecycle."
),
- isOptional: true,
+ collapsible: true,
configurations: [
{
language: 'html',
@@ -252,36 +252,12 @@ const loaderScriptOnboarding: OnboardingConfig = {
],
verify: getVerifyConfig,
nextSteps: () => [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/session-replay/',
- },
{
id: 'source-maps',
name: t('Source Maps'),
description: t('Learn how to enable readable stack traces in your Sentry errors.'),
link: 'https://docs.sentry.io/platforms/javascript/sourcemaps/',
},
- {
- id: 'sdk-configuration',
- name: t('SDK Configuration'),
- description: t(
- 'Learn about additional configuration options for the Javascript SDK.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/configuration/',
- },
],
onPageLoad: params => {
return () => {
@@ -367,24 +343,7 @@ const packageManagerOnboarding: OnboardingConfig = {
}),
],
verify: getVerifyConfig,
- nextSteps: () => [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/session-replay/',
- },
- ],
+ nextSteps: () => [],
onPageLoad: params => {
return () => {
trackAnalytics('onboarding.js_loader_npm_docs_shown', {
diff --git a/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx b/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx
index dcfdac8d8671d..02259ed121e7b 100644
--- a/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx
+++ b/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx
@@ -64,7 +64,7 @@ const replayOnboardingJsLoader: OnboardingConfig = {
install: (params: Params) => getInstallConfig(params),
configure: (params: Params) => [
{
- type: StepType.CONFIGURE,
+ title: t('Configure Session Replay (Optional)'),
description: getReplayConfigureDescription({
link: 'https://docs.sentry.io/platforms/javascript/session-replay/',
}),
@@ -74,7 +74,7 @@ const replayOnboardingJsLoader: OnboardingConfig = {
code: getReplayJsLoaderSdkSetupSnippet(params),
},
],
- isOptional: true,
+ collapsible: true,
additionalInfo: ,
},
],
diff --git a/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx b/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx
index f216a2682d729..9f24cae07b750 100644
--- a/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx
@@ -11,7 +11,9 @@ describe('javascript-nextjs onboarding docs', function () {
renderWithOnboardingLayout(docs);
// Renders main headings
- expect(screen.getByRole('heading', {name: 'Install'})).toBeInTheDocument();
+ expect(
+ screen.getByRole('heading', {name: 'Automatic Configuration (Recommended)'})
+ ).toBeInTheDocument();
// Includes configure statement
expect(
@@ -19,7 +21,7 @@ describe('javascript-nextjs onboarding docs', function () {
).toBeInTheDocument();
});
- it('displays the configure instructions', () => {
+ it('displays the verify instructions', () => {
renderWithOnboardingLayout(docs, {
selectedProducts: [
ProductSolution.ERROR_MONITORING,
@@ -29,14 +31,7 @@ describe('javascript-nextjs onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/sentry.client.config.js/))
- ).toBeInTheDocument();
- expect(screen.queryByText(textWithMarkupMatcher(/Sentry.init/))).toBeInTheDocument();
- expect(
- screen.queryByText(textWithMarkupMatcher(/.env.sentry-build-plugin/))
- ).toBeInTheDocument();
- expect(
- screen.queryByText(textWithMarkupMatcher(/instrumentation.ts/))
+ screen.queryByText(textWithMarkupMatcher(/sentry-example-page/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/javascript/nextjs.tsx b/static/app/gettingStartedDocs/javascript/nextjs.tsx
index ec8a517ec0b62..00e24eb868a25 100644
--- a/static/app/gettingStartedDocs/javascript/nextjs.tsx
+++ b/static/app/gettingStartedDocs/javascript/nextjs.tsx
@@ -33,13 +33,8 @@ import {trackAnalytics} from 'sentry/utils/analytics';
type Params = DocsParams;
-const getInstallSnippet = ({
- isSelfHosted,
- urlPrefix,
- organization,
- projectSlug,
-}: Params) => {
- const urlParam = !isSelfHosted && urlPrefix ? `--url ${urlPrefix}` : '';
+const getInstallSnippet = ({isSelfHosted, organization, projectSlug}: Params) => {
+ const urlParam = isSelfHosted ? '' : '--saas';
return `npx @sentry/wizard@latest -i nextjs ${urlParam} --org ${organization.slug} --project ${projectSlug}`;
};
@@ -47,7 +42,7 @@ const getInstallConfig = (params: Params) => {
return [
{
description: tct(
- 'Configure your app automatically with the [wizardLink:Sentry wizard].',
+ 'Configure your app automatically by running the [wizardLink:Sentry wizard] in the root of your project.',
{
wizardLink: (
@@ -83,63 +78,111 @@ const getManualInstallConfig = () => [
const onboarding: OnboardingConfig = {
install: (params: Params) => [
{
- type: StepType.INSTALL,
+ title: t('Automatic Configuration (Recommended)'),
configurations: getInstallConfig(params),
- additionalInfo: (
+ },
+ ],
+ configure: () => [
+ {
+ title: t('Manual Configuration'),
+ collapsible: true,
+ configurations: [
+ {
+ description: (
+
+
+ {tct(
+ 'Alternatively, you can also [manualSetupLink:set up the SDK manually], by following these steps:',
+ {
+ manualSetupLink: (
+
+ ),
+ }
+ )}
+
+
+
+ {tct(
+ 'Create [code:sentry.server.config.js], [code:sentry.client.config.js] and [code:sentry.edge.config.js] with the default [code:Sentry.init].',
+ {
+ code:
,
+ }
+ )}
+
+
+ {tct(
+ 'Create or update the Next.js instrumentation file [instrumentationCode:instrumentation.ts] to initialize the SDK with the configuration files added in the previous step.',
+ {
+ instrumentationCode:
,
+ }
+ )}
+
+
+ {tct(
+ 'Create or update your Next.js config [nextConfig:next.config.js] with the default Sentry configuration.',
+ {
+ nextConfig:
,
+ }
+ )}
+
+
+ {tct(
+ 'Create a [bundlerPluginsEnv:.env.sentry-build-plugin] with an auth token (which is used to upload source maps when building the application).',
+ {
+ bundlerPluginsEnv:
,
+ }
+ )}
+
+
+ {t('Add an example page to your app to verify your Sentry setup.')}
+
+
+
+ ),
+ },
+ ],
+ },
+ ],
+ verify: (params: Params) => [
+ {
+ type: StepType.VERIFY,
+ description: (
- {t(
- 'The Sentry wizard will automatically patch your application to configure the Sentry SDK:'
- )}
-
-
- {tct(
- 'Create [code:sentry.server.config.js], [code:sentry.client.config.js] and [code:sentry.edge.config.js] with the default [code:Sentry.init].',
- {
- code:
,
- }
- )}
-
-
- {tct(
- 'Create or update the Next.js instrumentation file [instrumentationCode:instrumentation.ts] to initialize the SDK with the configuration files added in the previous step.',
- {
- instrumentationCode:
,
- }
- )}
-
-
- {tct(
- 'Create or update your Next.js config [nextConfig:next.config.js] with the default Sentry configuration.',
- {
- nextConfig:
,
- }
- )}
-
-
- {tct(
- 'Create a [bundlerPluginsEnv:.env.sentry-build-plugin] with an auth token (which is used to upload source maps when building the application).',
- {
- bundlerPluginsEnv:
,
- }
- )}
-
-
- {t('Add an example page to your app to verify your Sentry setup.')}
-
-
-
- {t('Manual Setup')}
{tct(
- 'Alternatively, you can also [manualSetupLink:set up the SDK manually].',
+ 'Start your development server and visit [code:/sentry-example-page] if you have set it up. Click the button to trigger a test error.',
{
- manualSetupLink: (
-
- ),
+ code:
,
}
)}
-
+
+ {t(
+ 'Or, trigger a sample error by calling a function that does not exist somewhere in your application.'
+ )}
+
+
+ ),
+ configurations: [
+ {
+ code: [
+ {
+ label: 'Javascript',
+ value: 'javascript',
+ language: 'javascript',
+ code: `myUndefinedFunction();`,
+ },
+ ],
+ },
+ ],
+ additionalInfo: (
+
+
+ {t(
+ 'If you see an issue in your Sentry dashboard, you have successfully set up Sentry with Next.js.'
+ )}
+
+
{tct(
@@ -165,8 +208,6 @@ const onboarding: OnboardingConfig = {
),
},
],
- configure: () => [],
- verify: () => [],
};
const replayOnboarding: OnboardingConfig = {
@@ -391,13 +432,17 @@ const DSNText = styled('div')`
margin-bottom: ${space(0.5)};
`;
-const ManualSetupTitle = styled('p')`
- font-size: ${p => p.theme.fontSizeLarge};
- font-weight: ${p => p.theme.fontWeightBold};
-`;
-
const AdditionalInfoWrapper = styled('div')`
display: flex;
flex-direction: column;
gap: ${space(2)};
`;
+
+const Divider = styled('hr')`
+ height: 1px;
+ width: 100%;
+ background: ${p => p.theme.border};
+ border: none;
+ margin-top: ${space(1)};
+ margin-bottom: ${space(2)};
+`;
diff --git a/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx b/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx
new file mode 100644
index 0000000000000..ab3a3f8578e52
--- /dev/null
+++ b/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx
@@ -0,0 +1,85 @@
+import {renderWithOnboardingLayout} from 'sentry-test/onboarding/renderWithOnboardingLayout';
+import {screen} from 'sentry-test/reactTestingLibrary';
+import {textWithMarkupMatcher} from 'sentry-test/utils';
+
+import {ProductSolution} from 'sentry/components/onboarding/productSelection';
+
+import docs from './nuxt';
+
+describe('javascript-nuxt onboarding docs', function () {
+ it('renders onboarding docs correctly', () => {
+ renderWithOnboardingLayout(docs);
+
+ // Renders main headings
+ expect(screen.getByRole('heading', {name: 'Install'})).toBeInTheDocument();
+ expect(screen.getByRole('heading', {name: 'Configure SDK'})).toBeInTheDocument();
+ expect(screen.getByRole('heading', {name: 'Upload Source Maps'})).toBeInTheDocument();
+ expect(screen.getByRole('heading', {name: 'Verify'})).toBeInTheDocument();
+
+ // Includes 2 import statements
+ expect(
+ screen.getAllByText(
+ textWithMarkupMatcher(/import \* as Sentry from "@sentry\/nuxt"/)
+ )
+ ).toHaveLength(2);
+ });
+
+ it('displays sample rates by default', () => {
+ renderWithOnboardingLayout(docs, {
+ selectedProducts: [
+ ProductSolution.ERROR_MONITORING,
+ ProductSolution.PERFORMANCE_MONITORING,
+ ProductSolution.SESSION_REPLAY,
+ ],
+ });
+
+ expect(screen.getAllByText(textWithMarkupMatcher(/tracesSampleRate/))).toHaveLength(
+ 2
+ ); // client and server
+ expect(
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
+ ).toBeInTheDocument(); // only client
+ expect(
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
+ ).toBeInTheDocument(); // only client
+ });
+
+ it('enables performance setting the tracesSampleRate to 1', () => {
+ renderWithOnboardingLayout(docs, {
+ selectedProducts: [
+ ProductSolution.ERROR_MONITORING,
+ ProductSolution.PERFORMANCE_MONITORING,
+ ],
+ });
+
+ expect(
+ screen.getAllByText(textWithMarkupMatcher(/tracesSampleRate: 1\.0/))
+ ).toHaveLength(2);
+ });
+
+ it('enables replay by setting replay samplerates', () => {
+ renderWithOnboardingLayout(docs, {
+ selectedProducts: [
+ ProductSolution.ERROR_MONITORING,
+ ProductSolution.SESSION_REPLAY,
+ ],
+ });
+
+ expect(
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate: 0\.1/))
+ ).toBeInTheDocument();
+ expect(
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate: 1\.0/))
+ ).toBeInTheDocument();
+ });
+
+ it('enables profiling by setting profiling sample rates', () => {
+ renderWithOnboardingLayout(docs, {
+ selectedProducts: [ProductSolution.ERROR_MONITORING, ProductSolution.PROFILING],
+ });
+
+ expect(
+ screen.getAllByText(textWithMarkupMatcher(/profilesSampleRate: 1\.0/))
+ ).toHaveLength(2);
+ });
+});
diff --git a/static/app/gettingStartedDocs/javascript/nuxt.tsx b/static/app/gettingStartedDocs/javascript/nuxt.tsx
new file mode 100644
index 0000000000000..6f164453c3dae
--- /dev/null
+++ b/static/app/gettingStartedDocs/javascript/nuxt.tsx
@@ -0,0 +1,384 @@
+import {Fragment} from 'react';
+import styled from '@emotion/styled';
+
+import Alert from 'sentry/components/alert';
+import ExternalLink from 'sentry/components/links/externalLink';
+import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout';
+import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout';
+import TracePropagationMessage from 'sentry/components/onboarding/gettingStartedDoc/replay/tracePropagationMessage';
+import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step';
+import type {
+ Docs,
+ DocsParams,
+ OnboardingConfig,
+} from 'sentry/components/onboarding/gettingStartedDoc/types';
+import {
+ getCrashReportJavaScriptInstallStep,
+ getCrashReportModalConfigDescription,
+ getCrashReportModalIntroduction,
+ getFeedbackConfigureDescription,
+} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
+import {getJSMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
+import {MaybeBrowserProfilingBetaWarning} from 'sentry/components/onboarding/gettingStartedDoc/utils/profilingOnboarding';
+import {
+ getReplayConfigOptions,
+ getReplayConfigureDescription,
+} from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding';
+import {t, tct} from 'sentry/locale';
+
+type Params = DocsParams;
+
+const getNuxtModuleSnippet = () => `
+export default defineNuxtConfig({
+ modules: ["@sentry/nuxt/module"],
+});
+`;
+
+const getSdkClientSetupSnippet = (params: Params) => `
+import * as Sentry from "@sentry/nuxt";
+
+Sentry.init({
+ // If set up, you can use your runtime config here
+ // dsn: useRuntimeConfig().public.sentry.dsn,
+ dsn: "${params.dsn.public}",${
+ params.isReplaySelected
+ ? `
+ integrations: [Sentry.replayIntegration(${getReplayConfigOptions(params.replayOptions)})],`
+ : ''
+ }${
+ params.isPerformanceSelected
+ ? `
+ // Tracing
+ // We recommend adjusting this value in production, or using a tracesSampler for finer control.
+ tracesSampleRate: 1.0, // Capture 100% of the transactions
+ // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
+ tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/],`
+ : ''
+ }${
+ params.isReplaySelected
+ ? `
+ // Session Replay
+ replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
+ replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.`
+ : ''
+ }${
+ params.isProfilingSelected
+ ? `
+ // Set profilesSampleRate to 1.0 to profile every transaction.
+ // Since profilesSampleRate is relative to tracesSampleRate,
+ // the final profiling rate can be computed as tracesSampleRate * profilesSampleRate
+ // For example, a tracesSampleRate of 0.5 and profilesSampleRate of 0.5 would
+ // results in 25% of transactions being profiled (0.5*0.5=0.25)
+ profilesSampleRate: 1.0,`
+ : ''
+ }
+});
+`;
+
+const getSdkServerSetupSnippet = (params: Params) => `
+import * as Sentry from "@sentry/nuxt";
+
+Sentry.init({
+ dsn: "${params.dsn.public}",${
+ params.isPerformanceSelected
+ ? `
+ // Tracing
+ // We recommend adjusting this value in production, or using a tracesSampler for finer control.
+ tracesSampleRate: 1.0, // Capture 100% of the transactions`
+ : ''
+ }${
+ params.isProfilingSelected
+ ? `
+ // Set profilesSampleRate to 1.0 to profile every transaction.
+ // Since profilesSampleRate is relative to tracesSampleRate,
+ // the final profiling rate can be computed as tracesSampleRate * profilesSampleRate
+ // For example, a tracesSampleRate of 0.5 and profilesSampleRate of 0.5 would
+ // results in 25% of transactions being profiled (0.5*0.5=0.25)
+ profilesSampleRate: 1.0,`
+ : ''
+ }
+});
+`;
+
+const getVerifyNuxtSnippet = () => `
+
+
+
+ Trigger Error
+ `;
+
+const getInstallConfig = () => [
+ {
+ language: 'bash',
+ code: [
+ {
+ label: 'npm',
+ value: 'npm',
+ language: 'bash',
+ code: 'npm install --save @sentry/nuxt',
+ },
+ {
+ label: 'yarn',
+ value: 'yarn',
+ language: 'bash',
+ code: 'yarn add @sentry/nuxt',
+ },
+ {
+ label: 'pnpm',
+ value: 'pnpm',
+ language: 'bash',
+ code: `pnpm add @sentry/nuxt`,
+ },
+ ],
+ },
+];
+
+const onboarding: OnboardingConfig = {
+ introduction: params => (
+
+
+
+ {tct(
+ 'In this quick guide you’ll use [strong:npm], [strong:yarn] or [strong:pnpm] to set up:',
+ {
+ strong: ,
+ }
+ )}
+
+
+ ),
+ install: () => [
+ {
+ type: StepType.INSTALL,
+ description: t(
+ 'Add the Sentry Nuxt SDK as a dependency using your preferred package manager:'
+ ),
+ configurations: getInstallConfig(),
+ },
+ ],
+ configure: (params: Params) => [
+ {
+ type: StepType.CONFIGURE,
+ configurations: [
+ {
+ description: tct(
+ 'Add the Sentry Nuxt module in your [code:nuxt.config.ts] file:',
+ {code:
}
+ ),
+ code: [
+ {
+ label: 'TypeScript',
+ value: 'typescript',
+ language: 'typescript',
+ filename: 'nuxt.config.ts',
+ code: getNuxtModuleSnippet(),
+ },
+ ],
+ },
+ {
+ description: tct(
+ 'For the client, create a [codeFile:sentry.client.config.ts] file in your project root and initialize the Sentry SDK:',
+ {codeFile:
}
+ ),
+ code: [
+ {
+ label: 'TypeScript',
+ value: 'typescript',
+ language: 'typescript',
+ filename: 'sentry.client.config.ts',
+ code: getSdkClientSetupSnippet(params),
+ },
+ ],
+ },
+ {
+ description: (
+
+
+ {tct(
+ 'For the server, create a [codeFile:sentry.server.config.ts] file in your project root and initialize the Sentry SDK:',
+ {codeFile:
}
+ )}
+
+
+
+ {tct(
+ 'To complete the server-side setup, follow the [link:Sentry Nuxt docs] for guidance. Nuxt compiles your code in ESM on the server side as well, so the deployment setup can get tricky depending on where you deploy your application.',
+ {
+ link: (
+
+ ),
+ }
+ )}
+
+
+ ),
+ code: [
+ {
+ label: 'TypeScript',
+ value: 'typescript',
+ language: 'typescript',
+ filename: 'sentry.server.config.ts',
+ code: getSdkServerSetupSnippet(params),
+ },
+ ],
+ },
+ ],
+ },
+ {
+ title: t('Upload Source Maps'),
+ description: tct(
+ 'To upload source maps to Sentry, follow the [link:instructions in our documentation].',
+ {
+ link: (
+
+ ),
+ }
+ ),
+ },
+ ],
+ verify: () => [
+ {
+ type: StepType.VERIFY,
+ description: t(
+ "This snippet contains an intentional error and can be used as a test to make sure that everything's working as expected."
+ ),
+ configurations: [
+ {
+ code: [
+ {
+ label: 'Vue',
+ value: 'vue',
+ language: 'html',
+ code: getVerifyNuxtSnippet(),
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ nextSteps: () => [
+ {
+ id: 'nuxt-features',
+ name: t('Nuxt Features'),
+ description: t('Learn about our first class integration with the Nuxt framework.'),
+ link: 'https://docs.sentry.io/platforms/javascript/guides/nuxt/features/',
+ },
+ ],
+};
+
+const replayOnboarding: OnboardingConfig = {
+ install: () => [
+ {
+ type: StepType.INSTALL,
+ description: tct(
+ 'You need a minimum version 8.9.1 of [code:@sentry/nuxt] in order to use Session Replay. You do not need to install any additional packages.',
+ {
+ code:
,
+ }
+ ),
+ configurations: getInstallConfig(),
+ },
+ ],
+ configure: (params: Params) => [
+ {
+ type: StepType.CONFIGURE,
+ description: getReplayConfigureDescription({
+ link: 'https://docs.sentry.io/platforms/javascript/guides/nuxt/session-replay/',
+ }),
+ configurations: [
+ {
+ code: [
+ {
+ label: 'JavaScript',
+ value: 'javascript',
+ language: 'javascript',
+ code: getSdkClientSetupSnippet(params),
+ },
+ ],
+ additionalInfo: ,
+ },
+ ],
+ },
+ ],
+ verify: () => [],
+ nextSteps: () => [],
+};
+
+const feedbackOnboarding: OnboardingConfig = {
+ install: () => [
+ {
+ type: StepType.INSTALL,
+ description: tct(
+ 'For the User Feedback integration to work, you must have the Sentry browser SDK package, or an equivalent framework SDK (e.g. [code:@sentry/nuxt]) installed, minimum version 7.85.0.',
+ {
+ code:
,
+ }
+ ),
+ configurations: getInstallConfig(),
+ },
+ ],
+ configure: (params: Params) => [
+ {
+ type: StepType.CONFIGURE,
+ description: getFeedbackConfigureDescription({
+ linkConfig:
+ 'https://docs.sentry.io/platforms/javascript/guides/nuxt/user-feedback/configuration/',
+ linkButton:
+ 'https://docs.sentry.io/platforms/javascript/guides/nuxt/user-feedback/configuration/#bring-your-own-button',
+ }),
+ configurations: [
+ {
+ code: [
+ {
+ label: 'JavaScript',
+ value: 'javascript',
+ language: 'javascript',
+ code: getSdkClientSetupSnippet(params),
+ },
+ ],
+ },
+ ],
+ additionalInfo: crashReportCallout({
+ link: 'https://docs.sentry.io/platforms/nuxt/guides/nuxt/user-feedback/#crash-report-modal',
+ }),
+ },
+ ],
+ verify: () => [],
+ nextSteps: () => [],
+};
+
+const crashReportOnboarding: OnboardingConfig = {
+ introduction: () => getCrashReportModalIntroduction(),
+ install: (params: Params) => getCrashReportJavaScriptInstallStep(params),
+ configure: () => [
+ {
+ type: StepType.CONFIGURE,
+ description: getCrashReportModalConfigDescription({
+ link: 'https://docs.sentry.io/platforms/javascript/guides/nuxt/user-feedback/configuration/#crash-report-modal',
+ }),
+ additionalInfo: widgetCallout({
+ link: 'https://docs.sentry.io/platforms/javascript/guides/nuxt/user-feedback/#user-feedback-widget',
+ }),
+ },
+ ],
+ verify: () => [],
+ nextSteps: () => [],
+};
+
+const docs: Docs = {
+ onboarding,
+ feedbackOnboardingNpm: feedbackOnboarding,
+ replayOnboarding,
+ customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
+ crashReportOnboarding,
+};
+
+const StyledAlert = styled(Alert)`
+ margin-bottom: 0;
+`;
+
+export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/react.tsx b/static/app/gettingStartedDocs/javascript/react.tsx
index 259274bcee858..782ae725d48da 100644
--- a/static/app/gettingStartedDocs/javascript/react.tsx
+++ b/static/app/gettingStartedDocs/javascript/react.tsx
@@ -1,3 +1,5 @@
+import {Fragment} from 'react';
+
import ExternalLink from 'sentry/components/links/externalLink';
import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout';
import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout';
@@ -118,7 +120,16 @@ const getInstallConfig = () => [
];
const onboarding: OnboardingConfig = {
- introduction: MaybeBrowserProfilingBetaWarning,
+ introduction: params => (
+
+
+
+ {tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ })}
+
+
+ ),
install: () => [
{
type: StepType.INSTALL,
@@ -191,22 +202,6 @@ const onboarding: OnboardingConfig = {
),
link: 'https://docs.sentry.io/platforms/javascript/guides/react/configuration/integrations/react-router/',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/react/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/react/session-replay/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/javascript/remix.spec.tsx b/static/app/gettingStartedDocs/javascript/remix.spec.tsx
index 6112ddb268397..58c7e0a303dad 100644
--- a/static/app/gettingStartedDocs/javascript/remix.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/remix.spec.tsx
@@ -9,13 +9,11 @@ describe('javascript-remix onboarding docs', function () {
renderWithOnboardingLayout(docs);
// Renders main headings
- expect(screen.getByRole('heading', {name: 'Install'})).toBeInTheDocument();
- expect(screen.getByRole('heading', {name: 'Configure SDK'})).toBeInTheDocument();
-
- // Includes minimum required Astro version
- expect(screen.getByText(textWithMarkupMatcher(/Remix 1.0.0/))).toBeInTheDocument();
+ expect(
+ screen.getByRole('heading', {name: 'Automatic Configuration (Recommended)'})
+ ).toBeInTheDocument();
- // Includes wizard command statement
+ // Includes configure statement
expect(
screen.getByText(textWithMarkupMatcher(/npx @sentry\/wizard@latest -i remix/))
).toBeInTheDocument();
diff --git a/static/app/gettingStartedDocs/javascript/remix.tsx b/static/app/gettingStartedDocs/javascript/remix.tsx
index 581972cb055e5..0aa14e7fd2867 100644
--- a/static/app/gettingStartedDocs/javascript/remix.tsx
+++ b/static/app/gettingStartedDocs/javascript/remix.tsx
@@ -29,12 +29,13 @@ import {t, tct} from 'sentry/locale';
type Params = DocsParams;
-const getConfigStep = ({isSelfHosted, urlPrefix, organization, projectSlug}: Params) => {
- const urlParam = !isSelfHosted && urlPrefix ? `--url ${urlPrefix}` : '';
+const getConfigStep = ({isSelfHosted, organization, projectSlug}: Params) => {
+ const urlParam = isSelfHosted ? '' : '--saas';
+
return [
{
description: tct(
- 'Configure your app automatically with the [wizardLink:Sentry wizard].',
+ 'Configure your app automatically by running the [wizardLink:Sentry wizard] in the root of your project.',
{
wizardLink: (
@@ -59,12 +60,23 @@ const onboarding: OnboardingConfig = {
tct("Sentry's integration with [remixLink:Remix] supports Remix 1.0.0 and above.", {
remixLink: ,
}),
- install: (params: Params) => getInstallConfig(params),
+ install: (params: Params) => [
+ {
+ title: t('Automatic Configuration (Recommended)'),
+ configurations: getConfigStep(params),
+ },
+ ],
configure: () => [
{
- type: StepType.CONFIGURE,
- description: t(
- 'The Sentry wizard will automatically add code to your project to inialize and configure the Sentry SDK:'
+ collapsible: true,
+ title: t('Manual Configuration'),
+ description: tct(
+ 'Alternatively, you can also [manualSetupLink:set up the SDK manually], by following these steps:',
+ {
+ manualSetupLink: (
+
+ ),
+ }
),
configurations: [
{
@@ -72,10 +84,9 @@ const onboarding: OnboardingConfig = {
{tct(
- "Create two files in the root directory of your project, [clientEntry:entry.client.tsx] and [serverEntry:entry.server.tsx] (if they don't already exist).",
+ "Create two files in the root directory of your project, [code:entry.client.tsx] and [code:entry.server.tsx] (if they don't already exist).",
{
- clientEntry:
,
- serverEntry:
,
+ code:
,
}
)}
@@ -89,42 +100,63 @@ const onboarding: OnboardingConfig = {
{tct(
- 'Create a [cliRc:.sentryclirc] with an auth token to upload source maps (this file is automatically added to your [gitignore:.gitignore]).',
+ 'Create a [code:.sentryclirc] with an auth token to upload source maps (this file is automatically added to your [code:.gitignore]).',
{
- cliRc:
,
- gitignore:
,
+ code:
,
}
)}
{tct(
- 'Adjust your [buildscript:build] script in your [pkgJson:package.json] to automatically upload source maps to Sentry when you build your application.',
+ 'Adjust your [code:build] script in your [code:package.json] to automatically upload source maps to Sentry when you build your application.',
{
- buildscript:
,
- pkgJson:
,
+ code:
,
}
)}
),
},
+ ],
+ },
+ ],
+ verify: () => [
+ {
+ type: StepType.VERIFY,
+ description: (
+
+
+ {tct(
+ 'Start your development server and visit [code:/sentry-example-page] if you have set it up. Click the button to trigger a test error.',
+ {
+ code:
,
+ }
+ )}
+
+
+ {t(
+ 'Or, trigger a sample error by calling a function that does not exist somewhere in your application.'
+ )}
+
+
+ ),
+ configurations: [
{
- description: tct(
- 'You can also further [manualConfigure:configure your SDK] or [manualSetupLink:set it up manually], without the wizard.',
+ code: [
{
- manualConfigure: (
-
- ),
- manualSetupLink: (
-
- ),
- }
- ),
+ label: 'Javascript',
+ value: 'javascript',
+ language: 'javascript',
+ code: `myUndefinedFunction();`,
+ },
+ ],
},
],
+ additionalInfo: t(
+ 'If you see an issue in your Sentry dashboard, you have successfully set up Sentry.'
+ ),
},
],
- verify: () => [],
nextSteps: () => [],
};
@@ -157,8 +189,8 @@ const replayOnboarding: OnboardingConfig = {
{tct(
- 'Note: The Replay integration only needs to be added to your [entryClient:entry.client.tsx] file. It will not run if it is added into [sentryServer:sentry.server.config.js].',
- {entryClient:
, sentryServer:
}
+ 'Note: The Replay integration only needs to be added to your [code:entry.client.tsx] file. It will not run if it is added into [code:sentry.server.config.js].',
+ {code:
}
)}
),
diff --git a/static/app/gettingStartedDocs/javascript/solid.tsx b/static/app/gettingStartedDocs/javascript/solid.tsx
index c0eb8c9ab921d..9b00730166e3e 100644
--- a/static/app/gettingStartedDocs/javascript/solid.tsx
+++ b/static/app/gettingStartedDocs/javascript/solid.tsx
@@ -1,3 +1,5 @@
+import {Fragment} from 'react';
+
import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout';
import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout';
import TracePropagationMessage from 'sentry/components/onboarding/gettingStartedDoc/replay/tracePropagationMessage';
@@ -130,15 +132,23 @@ const getInstallConfig = () => [
];
const onboarding: OnboardingConfig = {
- introduction: MaybeBrowserProfilingBetaWarning,
+ introduction: params => (
+
+
+
+ {tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ })}
+
+
+ ),
install: () => [
{
type: StepType.INSTALL,
description: tct(
- 'Add the Sentry SDK as a dependency using [codeNpm:npm] or [codeYarn:yarn]:',
+ 'Add the Sentry SDK as a dependency using [code:npm] or [code:yarn]:',
{
- codeYarn:
,
- codeNpm:
,
+ code:
,
}
),
configurations: getInstallConfig(),
@@ -199,22 +209,6 @@ const onboarding: OnboardingConfig = {
description: t('Learn about our first class integration with the Solid framework.'),
link: 'https://docs.sentry.io/platforms/javascript/guides/solid/features/',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/solid/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/solid/session-replay/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/javascript/solidstart.tsx b/static/app/gettingStartedDocs/javascript/solidstart.tsx
index 30a5cc41925b7..643afe3747330 100644
--- a/static/app/gettingStartedDocs/javascript/solidstart.tsx
+++ b/static/app/gettingStartedDocs/javascript/solidstart.tsx
@@ -1,3 +1,5 @@
+import {Fragment} from 'react';
+
import ExternalLink from 'sentry/components/links/externalLink';
import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout';
import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout';
@@ -201,15 +203,26 @@ const getInstallConfig = () => [
];
const onboarding: OnboardingConfig = {
- introduction: MaybeBrowserProfilingBetaWarning,
+ introduction: params => (
+
+
+
+ {tct(
+ 'In this quick guide you’ll use [strong:npm], [strong:yarn] or [strong:pnpm] to set up:',
+ {
+ strong: ,
+ }
+ )}
+
+
+ ),
install: () => [
{
type: StepType.INSTALL,
description: tct(
- 'Add the Sentry SDK as a dependency using [codeNpm:npm] or [codeYarn:yarn]:',
+ 'Add the Sentry SDK as a dependency using [code:npm], [code:yarn] or [code:pnpm]:',
{
- codeYarn:
,
- codeNpm:
,
+ code:
,
}
),
configurations: getInstallConfig(),
@@ -241,8 +254,8 @@ const onboarding: OnboardingConfig = {
},
{
description: tct(
- 'For the server, create an instrument file [codeFile:instrument.server.mjs], initialize the Sentry SDK and deploy it alongside your application. For example by placing it in the [codeFolder:public] folder.',
- {codeFile:
, codeFolder:
}
+ 'For the server, create an instrument file [code:instrument.server.mjs], initialize the Sentry SDK and deploy it alongside your application. For example by placing it in the [code:public] folder.',
+ {code:
}
),
code: [
{
@@ -253,8 +266,8 @@ const onboarding: OnboardingConfig = {
},
],
additionalInfo: tct(
- 'Note: Placing [codeFile:instrument.server.mjs] inside the [codeFolder:public] folder makes it accessible to the outside world. Consider blocking requests to this file or finding a more appropriate location which your backend can access.',
- {codeFile:
, codeFolder:
}
+ 'Note: Placing [code:instrument.server.mjs] inside the [code:public] folder makes it accessible to the outside world. Consider blocking requests to this file or finding a more appropriate location which your backend can access.',
+ {code:
}
),
},
...(params.isPerformanceSelected
@@ -296,10 +309,9 @@ const onboarding: OnboardingConfig = {
},
{
description: tct(
- "If you're using [solidRouterLink:Solid Router], wrap your [codeRouter:Router] with [codeRouterWrapping:withSentryRouterRouting]. This creates a higher order component, which will enable Sentry to collect navigation spans.",
+ "If you're using [solidRouterLink:Solid Router], wrap your [code:Router] with [code:withSentryRouterRouting]. This creates a higher order component, which will enable Sentry to collect navigation spans.",
{
- codeRouter:
,
- codeRouterWrapping:
,
+ code:
,
solidRouterLink: (
),
@@ -321,13 +333,9 @@ const onboarding: OnboardingConfig = {
: []),
{
description: tct(
- 'Add an [codeFlag:--import] flag to the [codeNodeOptions:NODE_OPTIONS] environment variable wherever you run your application to import [codeInstrument:public/instrument.server.mjs]. For example, update your [codeScripts:scripts] entry in [codePackageJson:package.json]',
+ 'Add an [code:--import] flag to the [code:NODE_OPTIONS] environment variable wherever you run your application to import [code:public/instrument.server.mjs]. For example, update your [code:scripts] entry in [code:package.json]',
{
- codeFlag:
,
- codeNodeOptions:
,
- codeInstrument:
,
- codeScripts:
,
- codePackageJson:
,
+ code:
,
}
),
code: [
@@ -380,22 +388,6 @@ const onboarding: OnboardingConfig = {
description: t('Learn about our first class integration with the Solid framework.'),
link: 'https://docs.sentry.io/platforms/javascript/guides/solid/features/',
},
- {
- id: 'performance-monitoring',
- name: t('Performance Monitoring'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/solid/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/solid/session-replay/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/javascript/svelte.tsx b/static/app/gettingStartedDocs/javascript/svelte.tsx
index 85c030bf1443b..55b1f1545575c 100644
--- a/static/app/gettingStartedDocs/javascript/svelte.tsx
+++ b/static/app/gettingStartedDocs/javascript/svelte.tsx
@@ -1,3 +1,5 @@
+import {Fragment} from 'react';
+
import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout';
import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout';
import TracePropagationMessage from 'sentry/components/onboarding/gettingStartedDoc/replay/tracePropagationMessage';
@@ -123,15 +125,23 @@ const getInstallConfig = () => [
];
const onboarding: OnboardingConfig = {
- introduction: MaybeBrowserProfilingBetaWarning,
+ introduction: params => (
+
+
+
+ {tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ })}
+
+
+ ),
install: () => [
{
type: StepType.INSTALL,
description: tct(
- 'Add the Sentry SDK as a dependency using [codeNpm:npm] or [codeYarn:yarn]:',
+ 'Add the Sentry SDK as a dependency using [code:npm] or [code:yarn]:',
{
- codeYarn:
,
- codeNpm:
,
+ code:
,
}
),
configurations: getInstallConfig(),
@@ -194,22 +204,6 @@ const onboarding: OnboardingConfig = {
),
link: 'https://docs.sentry.io/platforms/javascript/guides/svelte/features/',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/svelte/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/svelte/session-replay/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx b/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx
index a88d5c93e60f7..8157e2dead41c 100644
--- a/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx
@@ -11,8 +11,9 @@ describe('javascript-sveltekit onboarding docs', function () {
renderWithOnboardingLayout(docs);
// Renders main headings
- expect(screen.getByRole('heading', {name: 'Install'})).toBeInTheDocument();
- expect(screen.getByRole('heading', {name: 'Configure SDK'})).toBeInTheDocument();
+ expect(
+ screen.getByRole('heading', {name: 'Automatic Configuration (Recommended)'})
+ ).toBeInTheDocument();
// Includes configure statement
expect(
@@ -20,7 +21,7 @@ describe('javascript-sveltekit onboarding docs', function () {
).toBeInTheDocument();
});
- it('displays the configure instructions', () => {
+ it('displays the verify instructions', () => {
renderWithOnboardingLayout(docs, {
selectedProducts: [
ProductSolution.ERROR_MONITORING,
@@ -30,11 +31,7 @@ describe('javascript-sveltekit onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/vite.config.js/))
- ).toBeInTheDocument();
- expect(
- screen.queryByText(textWithMarkupMatcher(/src\/hooks.server.js/))
+ screen.queryByText(textWithMarkupMatcher(/sentry-example-page/))
).toBeInTheDocument();
- expect(screen.queryByText(textWithMarkupMatcher(/.sentryclirc/))).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/javascript/sveltekit.tsx b/static/app/gettingStartedDocs/javascript/sveltekit.tsx
index 36be073f12154..42c6e4e4e2c77 100644
--- a/static/app/gettingStartedDocs/javascript/sveltekit.tsx
+++ b/static/app/gettingStartedDocs/javascript/sveltekit.tsx
@@ -29,19 +29,14 @@ import {t, tct} from 'sentry/locale';
type Params = DocsParams;
-const getInstallConfig = ({
- isSelfHosted,
- urlPrefix,
- organization,
- projectSlug,
-}: Params) => {
- const urlParam = !isSelfHosted && urlPrefix ? `--url ${urlPrefix}` : '';
+const getConfigStep = ({isSelfHosted, organization, projectSlug}: Params) => {
+ const urlParam = isSelfHosted ? '' : '--saas';
return [
{
type: StepType.INSTALL,
description: tct(
- 'Configure your app automatically with the [wizardLink:Sentry wizard].',
+ 'Configure your app automatically by running the [wizardLink:Sentry wizard] in the root of your project.',
{
wizardLink: (
@@ -58,64 +53,103 @@ const getInstallConfig = ({
];
};
+const getInstallConfig = (params: Params) => [
+ {
+ type: StepType.INSTALL,
+ configurations: getConfigStep(params),
+ },
+];
+
const onboarding: OnboardingConfig = {
- install: (params: Params) => getInstallConfig(params),
+ install: (params: Params) => [
+ {
+ title: t('Automatic Configuration (Recommended)'),
+ configurations: getConfigStep(params),
+ },
+ ],
configure: () => [
{
- type: StepType.CONFIGURE,
+ title: t('Manual Configuration'),
+ collapsible: true,
+ description: tct(
+ 'Alternatively, you can also [manualSetupLink:set up the SDK manually], by following these steps:',
+ {
+ manualSetupLink: (
+
+ ),
+ }
+ ),
configurations: [
{
description: (
-
- {t(
- 'The Sentry wizard will automatically patch your application to configure the Sentry SDK:'
- )}
-
-
- {tct(
- 'Create or update [hookClientCode:src/hooks.client.js] and [hookServerCode:src/hooks.server.js] with the default [sentryInitCode:Sentry.init] call and SvelteKit hooks handlers.',
- {
- hookClientCode:
,
- hookServerCode:
,
- sentryInitCode:
,
- }
- )}
-
-
- {tct(
- 'Update [code:vite.config.js] to add source maps upload and auto-instrumentation via Vite plugins.',
- {
- code:
,
- }
- )}
-
-
- {tct(
- 'Create [sentryClircCode:.sentryclirc] and [sentryPropertiesCode:sentry.properties] files with configuration for sentry-cli (which is used when automatically uploading source maps).',
- {
- sentryClircCode:
,
- sentryPropertiesCode:
,
- }
- )}
-
-
-
+
+
+ {tct(
+ 'Create or update [code:src/hooks.client.js] and [code:src/hooks.server.js] with the default [code:Sentry.init] call and SvelteKit hooks handlers.',
+ {
+ code:
,
+ }
+ )}
+
+
+ {tct(
+ 'Update [code:vite.config.js] to add source maps upload and auto-instrumentation via Vite plugins.',
+ {
+ code:
,
+ }
+ )}
+
+
{tct(
- 'Alternatively, you can also [manualSetupLink:set up the SDK manually].',
+ 'Create [code:.sentryclirc] and [code:sentry.properties] files with configuration for sentry-cli (which is used when automatically uploading source maps).',
{
- manualSetupLink: (
-
- ),
+ code:
,
}
)}
-
-
+
+
),
},
],
},
],
- verify: () => [],
+ verify: () => [
+ {
+ type: StepType.VERIFY,
+ description: (
+
+
+ {tct(
+ 'Start your development server and visit [code:/sentry-example-page] if you have set it up. Click the button to trigger a test error.',
+ {
+ code:
,
+ }
+ )}
+
+
+ {t(
+ 'Or, trigger a sample error by calling a function that does not exist somewhere in your application.'
+ )}
+
+
+ ),
+ configurations: [
+ {
+ code: [
+ {
+ label: 'Javascript',
+ value: 'javascript',
+ language: 'javascript',
+ code: `myUndefinedFunction();`,
+ },
+ ],
+ },
+ ],
+ additionalInfo: t(
+ 'If you see an issue in your Sentry dashboard, you have successfully set up Sentry.'
+ ),
+ },
+ ],
};
const replayOnboarding: OnboardingConfig = {
diff --git a/static/app/gettingStartedDocs/javascript/vue.tsx b/static/app/gettingStartedDocs/javascript/vue.tsx
index fc6e0f2ef7227..95eb396a45ce9 100644
--- a/static/app/gettingStartedDocs/javascript/vue.tsx
+++ b/static/app/gettingStartedDocs/javascript/vue.tsx
@@ -1,3 +1,5 @@
+import {Fragment} from 'react';
+
import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout';
import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout';
import TracePropagationMessage from 'sentry/components/onboarding/gettingStartedDoc/replay/tracePropagationMessage';
@@ -26,7 +28,6 @@ import {
getReplayConfigureDescription,
getReplayVerifyStep,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding';
-import {ProductSolution} from 'sentry/components/onboarding/productSelection';
import {t, tct} from 'sentry/locale';
export enum VueVersion {
@@ -120,42 +121,26 @@ const getInstallConfig = () => [
},
];
-const getNextStep = (
- params: Params
-): {
- description: string;
- id: string;
- link: string;
- name: string;
-}[] => {
- let nextStepDocs = [...nextSteps];
-
- if (params.isPerformanceSelected) {
- nextStepDocs = nextStepDocs.filter(
- step => step.id !== ProductSolution.PERFORMANCE_MONITORING
- );
- }
-
- if (params.isReplaySelected) {
- nextStepDocs = nextStepDocs.filter(
- step => step.id !== ProductSolution.SESSION_REPLAY
- );
- }
- return nextStepDocs;
-};
-
const onboarding: OnboardingConfig = {
- introduction: MaybeBrowserProfilingBetaWarning,
+ introduction: params => (
+
+
+
+ {tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ })}
+
+
+ ),
install: () => [
{
type: StepType.INSTALL,
description: (
{tct(
- `Install the Sentry Vue SDK as a dependency using [codeNpm:npm] or [codeYarn:yarn], alongside the Sentry Vue SDK:`,
+ `Install the Sentry Vue SDK as a dependency using [code:npm] or [code:yarn], alongside the Sentry Vue SDK:`,
{
- codeYarn:
,
- codeNpm:
,
+ code:
,
}
)}
@@ -191,34 +176,16 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: params => getNextStep(params),
+ nextSteps: () => [
+ {
+ id: 'vue-features',
+ name: t('Vue Features'),
+ description: t('Learn about our first class integration with the Vue framework.'),
+ link: 'https://docs.sentry.io/platforms/javascript/guides/vue/features/',
+ },
+ ],
};
-export const nextSteps = [
- {
- id: 'vue-features',
- name: t('Vue Features'),
- description: t('Learn about our first class integration with the Vue framework.'),
- link: 'https://docs.sentry.io/platforms/javascript/guides/vue/features/',
- },
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/vue/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/vue/session-replay/',
- },
-];
-
function getSiblingImportsSetupConfiguration(siblingOption: string): string {
switch (siblingOption) {
case VueVersion.VUE3:
diff --git a/static/app/gettingStartedDocs/kotlin/kotlin.tsx b/static/app/gettingStartedDocs/kotlin/kotlin.tsx
index c31048f246a66..94fafbfeb1351 100644
--- a/static/app/gettingStartedDocs/kotlin/kotlin.tsx
+++ b/static/app/gettingStartedDocs/kotlin/kotlin.tsx
@@ -139,7 +139,7 @@ try {
const onboarding: OnboardingConfig = {
introduction: () =>
tct(
- "Sentry supports Kotlin for both JVM and Android. This wizard guides you through set up in the JVM scenario. If you're interested in [strong:Android], head over to the [gettingStartedWithAndroidLink:Getting Started] for that SDK instead. At its core, Sentry for Java provides a raw client for sending events to Sentry. If you use [strong2:Spring Boot, Spring, Logback, JUL, or Log4j2], head over to our [gettingStartedWithJavaLink:Getting Started for Sentry Java].",
+ "Sentry supports Kotlin for both JVM and Android. This wizard guides you through set up in the JVM scenario. If you're interested in [strong:Android], head over to the [gettingStartedWithAndroidLink:Getting Started] for that SDK instead. At its core, Sentry for Java provides a raw client for sending events to Sentry. If you use [strong:Spring Boot, Spring, Logback, JUL, or Log4j2], head over to our [gettingStartedWithJavaLink:Getting Started for Sentry Java].",
{
gettingStartedWithAndroidLink: (
@@ -148,7 +148,6 @@ const onboarding: OnboardingConfig = {
),
strong: ,
- strong2: ,
}
),
install: params => [
@@ -264,14 +263,6 @@ const onboarding: OnboardingConfig = {
description: t('Check out our sample applications.'),
link: 'https://github.com/getsentry/sentry-java/tree/main/sentry-samples',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Stay ahead of latency issues and trace every slow transaction to a poor-performing API call or database query.'
- ),
- link: 'https://docs.sentry.io/platforms/java/tracing/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/minidump/minidump.tsx b/static/app/gettingStartedDocs/minidump/minidump.tsx
index 0d6268b7e2d87..00140f5dc4753 100644
--- a/static/app/gettingStartedDocs/minidump/minidump.tsx
+++ b/static/app/gettingStartedDocs/minidump/minidump.tsx
@@ -49,12 +49,10 @@ const onboarding: OnboardingConfig = {
configurations: [
{
description: tct(
- 'If you have already integrated a library that generates minidumps and would just like to upload them to Sentry, you need to configure the [minidumpEndpointUrlItalic:Minidump Endpoint URL], which can be found at [projectSettingsItalic:Project Settings > Client Keys (DSN)]. This endpoint expects a [postCode:POST] request with the minidump in the [uploadFileMinidumpCode:upload_file_minidump] field:',
+ 'If you have already integrated a library that generates minidumps and would just like to upload them to Sentry, you need to configure the [italic:Minidump Endpoint URL], which can be found at [italic:Project Settings > Client Keys (DSN)]. This endpoint expects a [code:POST] request with the minidump in the [code:upload_file_minidump] field:',
{
- postCode:
,
- uploadFileMinidumpCode:
,
- minidumpEndpointUrlItalic: ,
- projectSettingsItalic: ,
+ code:
,
+ italic: ,
}
),
language: 'bash',
diff --git a/static/app/gettingStartedDocs/node/awslambda.tsx b/static/app/gettingStartedDocs/node/awslambda.tsx
index 4bf3f653ed6ed..5aa82f99df7f9 100644
--- a/static/app/gettingStartedDocs/node/awslambda.tsx
+++ b/static/app/gettingStartedDocs/node/awslambda.tsx
@@ -46,6 +46,10 @@ Sentry.init({
});`;
const onboarding: OnboardingConfig = {
+ introduction: () =>
+ tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ }),
install: params => [
{
type: StepType.INSTALL,
@@ -59,10 +63,8 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- "Ensure that Sentry is imported and initialized at the beginning of your file, prior to any other [require:require] or [import:import] statements. Then, wrap your lambda handler with Sentry's [code:wraphandler] function:",
+ "Ensure that Sentry is imported and initialized at the beginning of your file, prior to any other [code:require] or [code:import] statements. Then, wrap your lambda handler with Sentry's [code:wraphandler] function:",
{
- import:
,
- require:
,
code:
,
}
),
@@ -112,10 +114,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'You need a minimum version [codeVersion:8.0.0] of [codePackage:@sentry/aws-serverless]:',
+ 'You need a minimum version [code:8.0.0] of [code:@sentry/aws-serverless]:',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
),
configurations: getInstallConfig(params, {
diff --git a/static/app/gettingStartedDocs/node/azurefunctions.tsx b/static/app/gettingStartedDocs/node/azurefunctions.tsx
index e751566f08c74..102fc94012ba9 100644
--- a/static/app/gettingStartedDocs/node/azurefunctions.tsx
+++ b/static/app/gettingStartedDocs/node/azurefunctions.tsx
@@ -38,6 +38,10 @@ module.exports = async function (context, req) {
`;
const onboarding: OnboardingConfig = {
+ introduction: () =>
+ tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ }),
install: params => [
{
type: StepType.INSTALL,
@@ -49,15 +53,15 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'Ensure that Sentry is imported and initialized at the beginning of your file, prior to any other [require:require] or [import:import] statements.',
- {import:
, require:
}
+ 'Ensure that Sentry is imported and initialized at the beginning of your file, prior to any other [code:require] or [code:import] statements.',
+ {code:
}
),
configurations: [
{
language: 'javascript',
description: tct(
- 'Note: You need to call both [captureExceptionCode:captureException] and [flushCode:flush] for captured events to be successfully delivered to Sentry.',
- {captureExceptionCode:
, flushCode:
}
+ 'Note: You need to call both [code:captureException] and [code:flush] for captured events to be successfully delivered to Sentry.',
+ {code:
}
),
},
{
diff --git a/static/app/gettingStartedDocs/node/connect.tsx b/static/app/gettingStartedDocs/node/connect.tsx
index d417f4b303393..0c021c40167db 100644
--- a/static/app/gettingStartedDocs/node/connect.tsx
+++ b/static/app/gettingStartedDocs/node/connect.tsx
@@ -39,6 +39,10 @@ app.listen(3000);
`;
const onboarding: OnboardingConfig = {
+ introduction: () =>
+ tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ }),
install: params => [
{
type: StepType.INSTALL,
diff --git a/static/app/gettingStartedDocs/node/express.tsx b/static/app/gettingStartedDocs/node/express.tsx
index 31b41073f8d15..e3a9c10cf926d 100644
--- a/static/app/gettingStartedDocs/node/express.tsx
+++ b/static/app/gettingStartedDocs/node/express.tsx
@@ -53,6 +53,13 @@ app.listen(3000);
`;
const onboarding: OnboardingConfig = {
+ introduction: () =>
+ tct(
+ 'In this quick guide you’ll use [strong:npm], [strong:yarn] or [strong:pnpm] to set up:',
+ {
+ strong: ,
+ }
+ ),
install: (params: Params) => [
{
type: StepType.INSTALL,
diff --git a/static/app/gettingStartedDocs/node/fastify.tsx b/static/app/gettingStartedDocs/node/fastify.tsx
index 7d20e49c0738e..088042b1e0ebb 100644
--- a/static/app/gettingStartedDocs/node/fastify.tsx
+++ b/static/app/gettingStartedDocs/node/fastify.tsx
@@ -42,6 +42,10 @@ app.listen(3000);
`;
const onboarding: OnboardingConfig = {
+ introduction: () =>
+ tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ }),
install: (params: Params) => [
{
type: StepType.INSTALL,
diff --git a/static/app/gettingStartedDocs/node/gcpfunctions.tsx b/static/app/gettingStartedDocs/node/gcpfunctions.tsx
index eee860f477560..ba91f6c2996bd 100644
--- a/static/app/gettingStartedDocs/node/gcpfunctions.tsx
+++ b/static/app/gettingStartedDocs/node/gcpfunctions.tsx
@@ -55,6 +55,10 @@ Sentry.init({
});`;
const onboarding: OnboardingConfig = {
+ introduction: () =>
+ tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ }),
install: params => [
{
type: StepType.INSTALL,
@@ -76,10 +80,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'Ensure that Sentry is imported and initialized at the beginning of your file, prior to any other [require:require] or [import:import] statements. Then, use the Sentry SDK to wrap your functions:',
+ 'Ensure that Sentry is imported and initialized at the beginning of your file, prior to any other [code:require] or [code:import] statements. Then, use the Sentry SDK to wrap your functions:',
{
- import:
,
- require:
,
+ code:
,
}
),
configurations: [
@@ -116,10 +119,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'You need a minimum version [codeVersion:8.0.0] of [codePackage:@sentry/google-cloud-serverless]:',
+ 'You need a minimum version [code:8.0.0] of [code:@sentry/google-cloud-serverless]:',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
),
configurations: getInstallConfig(params, {
diff --git a/static/app/gettingStartedDocs/node/hapi.tsx b/static/app/gettingStartedDocs/node/hapi.tsx
index 0fe73f422a065..9272dce653bda 100644
--- a/static/app/gettingStartedDocs/node/hapi.tsx
+++ b/static/app/gettingStartedDocs/node/hapi.tsx
@@ -53,6 +53,10 @@ app.use(async function () {
`;
const onboarding: OnboardingConfig = {
+ introduction: () =>
+ tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ }),
install: params => [
{
type: StepType.INSTALL,
diff --git a/static/app/gettingStartedDocs/node/koa.tsx b/static/app/gettingStartedDocs/node/koa.tsx
index 2771e0f58a3fe..9d8587af3cf19 100644
--- a/static/app/gettingStartedDocs/node/koa.tsx
+++ b/static/app/gettingStartedDocs/node/koa.tsx
@@ -46,6 +46,10 @@ app.use(async function () {
`;
const onboarding: OnboardingConfig = {
+ introduction: () =>
+ tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ }),
install: params => [
{
type: StepType.INSTALL,
diff --git a/static/app/gettingStartedDocs/node/nestjs.tsx b/static/app/gettingStartedDocs/node/nestjs.tsx
index 6f90400ae8e3a..02e4b31c133b1 100644
--- a/static/app/gettingStartedDocs/node/nestjs.tsx
+++ b/static/app/gettingStartedDocs/node/nestjs.tsx
@@ -93,6 +93,10 @@ export class AppModule {}
`;
const onboarding: OnboardingConfig = {
+ introduction: () =>
+ tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ }),
install: params => [
{
type: StepType.INSTALL,
diff --git a/static/app/gettingStartedDocs/node/node.tsx b/static/app/gettingStartedDocs/node/node.tsx
index 0e475838d8a62..9cbe022a35352 100644
--- a/static/app/gettingStartedDocs/node/node.tsx
+++ b/static/app/gettingStartedDocs/node/node.tsx
@@ -36,6 +36,10 @@ server.listen(3000, "127.0.0.1");
`;
const onboarding: OnboardingConfig = {
+ introduction: () =>
+ tct('In this quick guide you’ll use [strong:npm] or [strong:yarn] to set up:', {
+ strong: ,
+ }),
install: (params: Params) => [
{
type: StepType.INSTALL,
diff --git a/static/app/gettingStartedDocs/php/laravel.tsx b/static/app/gettingStartedDocs/php/laravel.tsx
index bad31a6291e89..86ff3cdfd4083 100644
--- a/static/app/gettingStartedDocs/php/laravel.tsx
+++ b/static/app/gettingStartedDocs/php/laravel.tsx
@@ -126,8 +126,8 @@ const onboarding: OnboardingConfig = {
},
{
description: tct(
- 'It creates the config file ([sentryPHPCode:config/sentry.php]) and adds the [dsnCode:DSN] to your [envCode:.env] file where you can add further configuration options:',
- {sentryPHPCode:
, dsnCode:
, envCode:
}
+ 'It creates the config file ([code:config/sentry.php]) and adds the [code:DSN] to your [code:.env] file where you can add further configuration options:',
+ {code:
}
),
language: 'shell',
code: getConfigureSnippet(params),
@@ -172,10 +172,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'You need a minimum version [codeVersionLaravel:4.2.0] of the Laravel SDK and a minimum version [codeVersion:4.3.0] of the PHP SDK installed',
+ 'You need a minimum version [code:4.2.0] of the Laravel SDK and a minimum version [code:4.3.0] of the PHP SDK installed',
{
- codeVersionLaravel:
,
- codeVersion:
,
+ code:
,
}
),
configurations: [
@@ -213,13 +212,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: tct(
- "Then you'll be able to add metrics as [codeCounters:counters], [codeSets:sets], [codeDistribution:distributions], and [codeGauge:gauges].",
+ "Then you'll be able to add metrics as [code:counters], [code:sets], [code:distributions], and [code:gauges].",
{
- codeCounters:
,
- codeSets:
,
- codeDistribution:
,
- codeGauge:
,
- codeNamespace:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/php/php.tsx b/static/app/gettingStartedDocs/php/php.tsx
index 2063ef5d4e7d7..a4ebd8dd35cf4 100644
--- a/static/app/gettingStartedDocs/php/php.tsx
+++ b/static/app/gettingStartedDocs/php/php.tsx
@@ -182,13 +182,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: tct(
- "Then you'll be able to add metrics as [codeCounters:counters], [codeSets:sets], [codeDistribution:distributions], and [codeGauge:gauges].",
+ "Then you'll be able to add metrics as [code:counters], [code:sets], [code:distributions], and [code:gauges].",
{
- codeCounters:
,
- codeSets:
,
- codeDistribution:
,
- codeGauge:
,
- codeNamespace:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/python/aiohttp.tsx b/static/app/gettingStartedDocs/python/aiohttp.tsx
index 6e179f53719b2..784072eb4b85a 100644
--- a/static/app/gettingStartedDocs/python/aiohttp.tsx
+++ b/static/app/gettingStartedDocs/python/aiohttp.tsx
@@ -59,10 +59,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/asgi.tsx b/static/app/gettingStartedDocs/python/asgi.tsx
index 1c254dd8ba569..e09f3b2be1172 100644
--- a/static/app/gettingStartedDocs/python/asgi.tsx
+++ b/static/app/gettingStartedDocs/python/asgi.tsx
@@ -85,10 +85,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/awslambda.tsx b/static/app/gettingStartedDocs/python/awslambda.tsx
index 478a8cfac0256..f9d8e511d4628 100644
--- a/static/app/gettingStartedDocs/python/awslambda.tsx
+++ b/static/app/gettingStartedDocs/python/awslambda.tsx
@@ -78,10 +78,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
@@ -125,8 +124,8 @@ const onboarding: OnboardingConfig = {
configurations: [
{
description: tct(
- 'To enable the warning, update the SDK initialization to set [codeTimeout:timeout_warning] to [codeStatus:true]:',
- {codeTimeout:
, codeStatus:
}
+ 'To enable the warning, update the SDK initialization to set [code:timeout_warning] to [code:true]:',
+ {code:
}
),
language: 'python',
code: getTimeoutWarningSnippet(params),
diff --git a/static/app/gettingStartedDocs/python/bottle.tsx b/static/app/gettingStartedDocs/python/bottle.tsx
index ea33a62c8b227..23b80f9f3b6b3 100644
--- a/static/app/gettingStartedDocs/python/bottle.tsx
+++ b/static/app/gettingStartedDocs/python/bottle.tsx
@@ -47,10 +47,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'Install [sentrySdkCode:sentry-sdk] from PyPI with the [sentryBotteCode:bottle] extra:',
+ 'Install [code:sentry-sdk] from PyPI with the [code:bottle] extra:',
{
- sentrySdkCode:
,
- sentryBotteCode:
,
+ code:
,
}
),
configurations: [
@@ -58,10 +57,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/celery.tsx b/static/app/gettingStartedDocs/python/celery.tsx
index 834376d8be413..2f933f7b9b648 100644
--- a/static/app/gettingStartedDocs/python/celery.tsx
+++ b/static/app/gettingStartedDocs/python/celery.tsx
@@ -61,10 +61,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/chalice.tsx b/static/app/gettingStartedDocs/python/chalice.tsx
index f448c484b9d2a..4ed37f0fb9676 100644
--- a/static/app/gettingStartedDocs/python/chalice.tsx
+++ b/static/app/gettingStartedDocs/python/chalice.tsx
@@ -56,10 +56,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'Install [sentrySdkCode:sentry-sdk] from PyPI with the [sentryBotteCode:chalice] extra:',
+ 'Install [code:sentry-sdk] from PyPI with the [code:chalice] extra:',
{
- sentrySdkCode:
,
- sentryBotteCode:
,
+ code:
,
}
),
configurations: [
@@ -67,10 +66,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/django.tsx b/static/app/gettingStartedDocs/python/django.tsx
index 0a3335737059d..d8eec2733b9a5 100644
--- a/static/app/gettingStartedDocs/python/django.tsx
+++ b/static/app/gettingStartedDocs/python/django.tsx
@@ -53,10 +53,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
@@ -70,10 +69,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'Initialize the Sentry SDK in your Django [codeSettings:settings.py] file:',
+ 'Initialize the Sentry SDK in your Django [code:settings.py] file:',
{
- codeDjango:
,
- codeSettings:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/python/falcon.tsx b/static/app/gettingStartedDocs/python/falcon.tsx
index e25fb4df76479..4f5d8c563f205 100644
--- a/static/app/gettingStartedDocs/python/falcon.tsx
+++ b/static/app/gettingStartedDocs/python/falcon.tsx
@@ -48,10 +48,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'Install [sentrySdkCode:sentry-sdk] from PyPI with the [sentryFalconCode:falcon] extra:',
+ 'Install [code:sentry-sdk] from PyPI with the [code:falcon] extra:',
{
- sentrySdkCode:
,
- sentryFalconCode:
,
+ code:
,
}
),
configurations: [
@@ -59,10 +58,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/fastapi.tsx b/static/app/gettingStartedDocs/python/fastapi.tsx
index e1143836d08fe..6878128b9c2b1 100644
--- a/static/app/gettingStartedDocs/python/fastapi.tsx
+++ b/static/app/gettingStartedDocs/python/fastapi.tsx
@@ -48,10 +48,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'Install [sentrySdkCode:sentry-sdk] from PyPI with the [sentryFastApiCode:fastapi] extra:',
+ 'Install [code:sentry-sdk] from PyPI with the [code:fastapi] extra:',
{
- sentrySdkCode:
,
- sentryFastApiCode:
,
+ code:
,
}
),
configurations: [
@@ -59,10 +58,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/flask.tsx b/static/app/gettingStartedDocs/python/flask.tsx
index 4a386877aaf55..a85411e847383 100644
--- a/static/app/gettingStartedDocs/python/flask.tsx
+++ b/static/app/gettingStartedDocs/python/flask.tsx
@@ -48,10 +48,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'Install [sentrySdkCode:sentry-sdk] from PyPI with the [sentryFlaskCode:flask] extra:',
+ 'Install [code:sentry-sdk] from PyPI with the [code:flask] extra:',
{
- sentrySdkCode:
,
- sentryFlaskCode:
,
+ code:
,
}
),
configurations: [
@@ -59,10 +58,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/gcpfunctions.tsx b/static/app/gettingStartedDocs/python/gcpfunctions.tsx
index b3600c4717ee7..c54d2d38c95a8 100644
--- a/static/app/gettingStartedDocs/python/gcpfunctions.tsx
+++ b/static/app/gettingStartedDocs/python/gcpfunctions.tsx
@@ -65,10 +65,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
@@ -112,8 +111,8 @@ const onboarding: OnboardingConfig = {
configurations: [
{
description: tct(
- 'To enable the warning, update the SDK initialization to set [codeTimeout:timeout_warning] to [codeStatus:true]:',
- {codeTimeout:
, codeStatus:
}
+ 'To enable the warning, update the SDK initialization to set [code:timeout_warning] to [code:true]:',
+ {code:
}
),
language: 'python',
code: getTimeoutWarningSnippet(params),
diff --git a/static/app/gettingStartedDocs/python/mongo.tsx b/static/app/gettingStartedDocs/python/mongo.tsx
index c13ce27193d6d..2dfc1378e18db 100644
--- a/static/app/gettingStartedDocs/python/mongo.tsx
+++ b/static/app/gettingStartedDocs/python/mongo.tsx
@@ -43,10 +43,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'Install [sentrySdkCode:sentry-sdk] from PyPI with the [pymongoCode:pymongo] extra:',
+ 'Install [code:sentry-sdk] from PyPI with the [code:pymongo] extra:',
{
- sentrySdkCode:
,
- pymongoCode:
,
+ code:
,
}
),
configurations: [
@@ -54,10 +53,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/pylons.tsx b/static/app/gettingStartedDocs/python/pylons.tsx
index b64df4d14ffbc..9215de10e499f 100644
--- a/static/app/gettingStartedDocs/python/pylons.tsx
+++ b/static/app/gettingStartedDocs/python/pylons.tsx
@@ -96,10 +96,9 @@ const onboarding: OnboardingConfig = {
{
language: 'python',
description: tct(
- 'Add the following lines to your project’s [initCode:.ini] file to setup [sentryHandlerCode:SentryHandler]:',
+ 'Add the following lines to your project’s [code:.ini] file to setup [code:SentryHandler]:',
{
- initCode:
,
- sentryHandlerCode:
,
+ code:
,
}
),
code: getLoggerSnippet(),
diff --git a/static/app/gettingStartedDocs/python/python.tsx b/static/app/gettingStartedDocs/python/python.tsx
index 7c987523b8e0e..796d434dd23e0 100644
--- a/static/app/gettingStartedDocs/python/python.tsx
+++ b/static/app/gettingStartedDocs/python/python.tsx
@@ -52,10 +52,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/quart.tsx b/static/app/gettingStartedDocs/python/quart.tsx
index 8ae15630299b5..fc11210620f5f 100644
--- a/static/app/gettingStartedDocs/python/quart.tsx
+++ b/static/app/gettingStartedDocs/python/quart.tsx
@@ -50,10 +50,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'Install [sentrySdkCode:sentry-sdk] from PyPI with the [sentryQuartCode:quart] extra:',
+ 'Install [code:sentry-sdk] from PyPI with the [code:quart] extra:',
{
- sentrySdkCode:
,
- sentryQuartCode:
,
+ code:
,
}
),
configurations: [
@@ -61,10 +60,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/rq.tsx b/static/app/gettingStartedDocs/python/rq.tsx
index 1921deaf3f70c..6cc8e887f6178 100644
--- a/static/app/gettingStartedDocs/python/rq.tsx
+++ b/static/app/gettingStartedDocs/python/rq.tsx
@@ -80,13 +80,9 @@ const onboarding: OnboardingConfig = {
install: () => [
{
type: StepType.INSTALL,
- description: tct(
- 'Install [code:sentry-sdk] from PyPI with the [sentryRQCode:rq] extra:',
- {
- code:
,
- sentryRQCode:
,
- }
- ),
+ description: tct('Install [code:sentry-sdk] from PyPI with the [code:rq] extra:', {
+ code:
,
+ }),
configurations: [
{
language: 'bash',
@@ -189,19 +185,17 @@ const onboarding: OnboardingConfig = {
{tct(
- 'When you run [codeMain:python main.py] a transaction named [codeTrxName:testing_sentry] in the Performance section of Sentry will be created.',
+ 'When you run [code:python main.py] a transaction named [code:testing_sentry] in the Performance section of Sentry will be created.',
{
- codeMain:
,
- codeTrxName:
,
+ code:
,
}
)}
{tct(
- 'If you run the RQ worker with [codeWorker:rq worker -c mysettings] a transaction for the execution of [codeFunction:hello()] will be created. Additionally, an error event will be sent to Sentry and will be connected to the transaction.',
+ 'If you run the RQ worker with [code:rq worker -c mysettings] a transaction for the execution of [code:hello()] will be created. Additionally, an error event will be sent to Sentry and will be connected to the transaction.',
{
- codeWorker:
,
- codeFunction:
,
+ code:
,
}
)}
diff --git a/static/app/gettingStartedDocs/python/sanic.tsx b/static/app/gettingStartedDocs/python/sanic.tsx
index 1c13cba8b5688..9a47574bc77ad 100644
--- a/static/app/gettingStartedDocs/python/sanic.tsx
+++ b/static/app/gettingStartedDocs/python/sanic.tsx
@@ -31,10 +31,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'Install [sentrySdkCode:sentry-sdk] from PyPI with the [sentrySanicCode:sanic] extra:',
+ 'Install [code:sentry-sdk] from PyPI with the [code:sanic] extra:',
{
- sentrySdkCode:
,
- sentrySanicCode:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/python/serverless.tsx b/static/app/gettingStartedDocs/python/serverless.tsx
index c7913a88343e9..e74febabe360b 100644
--- a/static/app/gettingStartedDocs/python/serverless.tsx
+++ b/static/app/gettingStartedDocs/python/serverless.tsx
@@ -82,10 +82,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
@@ -125,9 +124,8 @@ const onboarding: OnboardingConfig = {
code: getVerifySnippet(),
},
],
- additionalInfo: tct(
- 'Now deploy your function. When you now run your function an error event will be sent to Sentry.',
- {}
+ additionalInfo: t(
+ 'Now deploy your function. When you now run your function an error event will be sent to Sentry.'
),
},
],
diff --git a/static/app/gettingStartedDocs/python/starlette.tsx b/static/app/gettingStartedDocs/python/starlette.tsx
index 3ad63445c12a6..848e8ff99a33f 100644
--- a/static/app/gettingStartedDocs/python/starlette.tsx
+++ b/static/app/gettingStartedDocs/python/starlette.tsx
@@ -48,10 +48,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'Install [sentrySdkCode:sentry-sdk] from PyPI with the [sentryStarletteCode:starlette] extra:',
+ 'Install [code:sentry-sdk] from PyPI with the [code:starlette] extra:',
{
- sentrySdkCode:
,
- sentryStarletteCode:
,
+ code:
,
}
),
configurations: [
@@ -59,10 +58,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/tornado.tsx b/static/app/gettingStartedDocs/python/tornado.tsx
index 8b31aaf0f5ee7..9a3ba7553cea9 100644
--- a/static/app/gettingStartedDocs/python/tornado.tsx
+++ b/static/app/gettingStartedDocs/python/tornado.tsx
@@ -46,10 +46,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'Install [sentrySdkCode:sentry-sdk] from PyPI with the [sentryTornadoCode:tornado] extra:',
+ 'Install [code:sentry-sdk] from PyPI with the [code:tornado] extra:',
{
- sentrySdkCode:
,
- sentryTornadoCode:
,
+ code:
,
}
),
configurations: [
@@ -57,10 +56,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/python/wsgi.tsx b/static/app/gettingStartedDocs/python/wsgi.tsx
index faa966b092478..5c6c358f80a8b 100644
--- a/static/app/gettingStartedDocs/python/wsgi.tsx
+++ b/static/app/gettingStartedDocs/python/wsgi.tsx
@@ -92,10 +92,9 @@ const onboarding: OnboardingConfig = {
description:
params.docsLocation === DocsPageLocation.PROFILING_PAGE
? tct(
- 'You need a minimum version [codeVersion:1.18.0] of the [codePackage:sentry-python] SDK for the profiling feature.',
+ 'You need a minimum version [code:1.18.0] of the [code:sentry-python] SDK for the profiling feature.',
{
- codeVersion:
,
- codePackage:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/react-native/react-native.spec.tsx b/static/app/gettingStartedDocs/react-native/react-native.spec.tsx
index b3c254ca8d416..84812402758e5 100644
--- a/static/app/gettingStartedDocs/react-native/react-native.spec.tsx
+++ b/static/app/gettingStartedDocs/react-native/react-native.spec.tsx
@@ -47,7 +47,7 @@ describe('getting started with react-native', function () {
).toBeInTheDocument();
expect(
await screen.findByText(
- textWithMarkupMatcher(/React Native Profiling beta is available/)
+ textWithMarkupMatcher(/React Native Profiling is available/)
)
).toBeInTheDocument();
});
diff --git a/static/app/gettingStartedDocs/react-native/react-native.tsx b/static/app/gettingStartedDocs/react-native/react-native.tsx
index 9bd387bf9be9f..d015c35a80d03 100644
--- a/static/app/gettingStartedDocs/react-native/react-native.tsx
+++ b/static/app/gettingStartedDocs/react-native/react-native.tsx
@@ -37,11 +37,9 @@ Sentry.init({
}${
params.isProfilingSelected
? `
- _experiments: {
- // profilesSampleRate is relative to tracesSampleRate.
- // Here, we'll capture profiles for 100% of transactions.
- profilesSampleRate: 1.0,
- },`
+ // profilesSampleRate is relative to tracesSampleRate.
+ // Here, we'll capture profiles for 100% of transactions.
+ profilesSampleRate: 1.0,`
: ''
}
});`;
@@ -134,11 +132,9 @@ const onboarding: OnboardingConfig = {
{tct(
- "Android Specifics: We hook into Gradle for the source map build process. When you run [gradLewCode:./gradlew] assembleRelease, source maps are automatically built and uploaded to Sentry. If you have enabled Gradle's [orgGradleCode:org.gradle.configureondemand] feature, you'll need a clean build, or you'll need to disable this feature to upload the source map on every build by setting [orgGradleCodeConfigureCode:org.gradle.configureondemand=false] or remove it.",
+ "Android Specifics: We hook into Gradle for the source map build process. When you run [code:./gradlew] assembleRelease, source maps are automatically built and uploaded to Sentry. If you have enabled Gradle's [code:org.gradle.configureondemand] feature, you'll need a clean build, or you'll need to disable this feature to upload the source map on every build by setting [code:org.gradle.configureondemand=false] or remove it.",
{
- gradLewCode:
,
- orgGradleCode:
,
- orgGradleCodeConfigureCode:
,
+ code:
,
}
)}
@@ -157,7 +153,7 @@ const onboarding: OnboardingConfig = {
? [
{
description: t(
- 'React Native Profiling beta is available since SDK version 5.8.0.'
+ 'React Native Profiling is available since SDK version 5.32.0.'
),
},
]
diff --git a/static/app/gettingStartedDocs/ruby/rack.tsx b/static/app/gettingStartedDocs/ruby/rack.tsx
index 96ddda0c258b0..9bd4280497944 100644
--- a/static/app/gettingStartedDocs/ruby/rack.tsx
+++ b/static/app/gettingStartedDocs/ruby/rack.tsx
@@ -67,13 +67,12 @@ const onboarding: OnboardingConfig = {
{
description: params.isProfilingSelected
? tct(
- 'Ruby Profiling beta is available since SDK version 5.9.0. We use the [stackprofLink:stackprof gem] to collect profiles for Ruby. Make sure [stackprofCode:stackprof] is loaded before [sentryRubyCode:sentry-ruby].',
+ 'Ruby Profiling beta is available since SDK version 5.9.0. We use the [stackprofLink:stackprof gem] to collect profiles for Ruby. Make sure [code:stackprof] is loaded before [code:sentry-ruby].',
{
stackprofLink: (
),
- stackprofCode:
,
- sentryRubyCode:
,
+ code:
,
}
)
: undefined,
@@ -92,10 +91,9 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'Add [sentryRackCode:use Sentry::Rack::CaptureExceptions] to your [sentryConfigCode:config.ru] or other rackup file (this is automatically inserted in Rails):',
+ 'Add [code:use Sentry::Rack::CaptureExceptions] to your [code:config.ru] or other rackup file (this is automatically inserted in Rails):',
{
- sentryRackCode:
,
- sentryConfigCode:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/ruby/rails.tsx b/static/app/gettingStartedDocs/ruby/rails.tsx
index 93ac25922fe8f..f88b218e40846 100644
--- a/static/app/gettingStartedDocs/ruby/rails.tsx
+++ b/static/app/gettingStartedDocs/ruby/rails.tsx
@@ -79,13 +79,12 @@ const onboarding: OnboardingConfig = {
code: getInstallSnippet(params),
additionalInfo: params.isProfilingSelected
? tct(
- 'Ruby Profiling beta is available since SDK version 5.9.0. We use the [stackprofLink:stackprof gem] to collect profiles for Ruby. Make sure [stackprofCode:stackprof] is loaded before [sentryRubyCode:sentry-ruby].',
+ 'Ruby Profiling beta is available since SDK version 5.9.0. We use the [stackprofLink:stackprof gem] to collect profiles for Ruby. Make sure [code:stackprof] is loaded before [code:sentry-ruby].',
{
stackprofLink: (
),
- stackprofCode:
,
- sentryRubyCode:
,
+ code:
,
}
)
: undefined,
diff --git a/static/app/gettingStartedDocs/ruby/ruby.tsx b/static/app/gettingStartedDocs/ruby/ruby.tsx
index 38a9ff5b9e6d1..b05193725c54f 100644
--- a/static/app/gettingStartedDocs/ruby/ruby.tsx
+++ b/static/app/gettingStartedDocs/ruby/ruby.tsx
@@ -63,13 +63,12 @@ const onboarding: OnboardingConfig = {
{
description: params.isProfilingSelected
? tct(
- 'Ruby Profiling beta is available since SDK version 5.9.0. We use the [stackprofLink:stackprof gem] to collect profiles for Ruby. Make sure [stackprofCode:stackprof] is loaded before [sentryRubyCode:sentry-ruby].',
+ 'Ruby Profiling beta is available since SDK version 5.9.0. We use the [stackprofLink:stackprof gem] to collect profiles for Ruby. Make sure [code:stackprof] is loaded before [code:sentry-ruby].',
{
stackprofLink: (
),
- stackprofCode:
,
- sentryRubyCode:
,
+ code:
,
}
)
: undefined,
@@ -88,8 +87,8 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- 'To use Sentry Ruby all you need is your DSN. Like most Sentry libraries it will honor the [sentryDSN:SENTRY_DSN] environment variable. You can find it on the project settings page under API Keys. You can either export it as environment variable or manually configure it with [sentryInit:Sentry.init]:',
- {sentryDSN:
, sentryInit:
}
+ 'To use Sentry Ruby all you need is your DSN. Like most Sentry libraries it will honor the [code:SENTRY_DSN] environment variable. You can find it on the project settings page under API Keys. You can either export it as environment variable or manually configure it with [code:Sentry.init]:',
+ {code:
}
),
configurations: [
{
diff --git a/static/app/gettingStartedDocs/rust/rust.tsx b/static/app/gettingStartedDocs/rust/rust.tsx
index 9e596cc5ca2aa..6025b294de6a0 100644
--- a/static/app/gettingStartedDocs/rust/rust.tsx
+++ b/static/app/gettingStartedDocs/rust/rust.tsx
@@ -102,11 +102,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.INSTALL,
description: tct(
- 'You need at least version 0.32.1 of the [codeSentry:sentry] or [codeSentryCore:sentry-core] crates installed. Enable the [codeFeature:UNSTABLE_metrics] feature:',
+ 'You need at least version 0.32.1 of the [code:sentry] or [code:sentry-core] crates installed. Enable the [code:UNSTABLE_metrics] feature:',
{
- codeSentry:
,
- codeSentryCore:
,
- codeSentryFeature:
,
+ code:
,
}
),
configurations: [
@@ -123,13 +121,9 @@ const customMetricsOnboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: tct(
- "Then you'll be able to add metrics as [codeCounters:counters], [codeSets:sets], [codeDistribution:distributions], and [codeGauge:gauges]. These are available under the [codeNamespace:Sentry.metrics] namespace. Try out this example:",
+ "Then you'll be able to add metrics as [code:counters], [code:sets], [code:distributions], and [code:gauges]. These are available under the [code:Sentry.metrics] namespace. Try out this example:",
{
- codeCounters:
,
- codeSets:
,
- codeDistribution:
,
- codeGauge:
,
- codeNamespace:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/unity/unity.tsx b/static/app/gettingStartedDocs/unity/unity.tsx
index e088714937371..b0a8aa3a641b9 100644
--- a/static/app/gettingStartedDocs/unity/unity.tsx
+++ b/static/app/gettingStartedDocs/unity/unity.tsx
@@ -73,8 +73,8 @@ const onboarding: OnboardingConfig = {
{
type: StepType.CONFIGURE,
description: tct(
- "Access the Sentry configuration window by going to Unity's top menu: [toolsCode:Tools] > [sentryCode:Sentry] and enter the following DSN:",
- {toolsCode:
, sentryCode:
}
+ "Access the Sentry configuration window by going to Unity's top menu: [code:Tools] > [code:Sentry] and enter the following DSN:",
+ {code:
}
),
configurations: [
{
@@ -198,13 +198,9 @@ const metricsOnboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: tct(
- "Then you'll be able to add metrics as [codeCounters:counters], [codeSets:sets], [codeDistribution:distributions], [codeGauge:gauges], and [codeTimings:timings].",
+ "Then you'll be able to add metrics as [code:counters], [code:sets], [code:distributions], [code:gauges], and [code:timings].",
{
- codeCounters:
,
- codeSets:
,
- codeDistribution:
,
- codeGauge:
,
- codeTimings:
,
+ code:
,
}
),
configurations: [
diff --git a/static/app/gettingStartedDocs/unreal/unreal.tsx b/static/app/gettingStartedDocs/unreal/unreal.tsx
index 21adc92122119..cbedb8ee52716 100644
--- a/static/app/gettingStartedDocs/unreal/unreal.tsx
+++ b/static/app/gettingStartedDocs/unreal/unreal.tsx
@@ -172,8 +172,8 @@ const onboarding: OnboardingConfig = {
additionalInfo: (
{tct(
- 'If a [crashReportCode:CrashReportClient] section already exists, simply changing the value of [dataRouterUrlCode:DataRouterUrl] is enough.',
- {crashReportCode:
, dataRouterUrlCode:
}
+ 'If a [code:CrashReportClient] section already exists, simply changing the value of [code:DataRouterUrl] is enough.',
+ {code:
}
)}
),
@@ -188,11 +188,9 @@ const onboarding: OnboardingConfig = {
{tct(
- 'To allow Sentry to fully process native crashes and provide you with symbolicated stack traces, you need to upload [debugInformationItalic:debug information files] (sometimes also referred to as [debugSymbolsItalic:debug symbols] or just [symbolsItalic:symbols]). We recommend uploading debug information during your build or release process.',
+ 'To allow Sentry to fully process native crashes and provide you with symbolicated stack traces, you need to upload [italic:debug information files] (sometimes also referred to as [italic:debug symbols] or just [italic:symbols]). We recommend uploading debug information during your build or release process.',
{
- debugInformationItalic: ,
- symbolsItalic: ,
- debugSymbolsItalic: ,
+ italic: ,
}
)}
diff --git a/static/app/icons/iconCellSignal.tsx b/static/app/icons/iconCellSignal.tsx
new file mode 100644
index 0000000000000..a2cb0d846a044
--- /dev/null
+++ b/static/app/icons/iconCellSignal.tsx
@@ -0,0 +1,26 @@
+import {forwardRef} from 'react';
+import {useTheme} from '@emotion/react';
+
+import {SvgIcon, type SVGIconProps} from 'sentry/icons/svgIcon';
+
+interface Props extends SVGIconProps {
+ bars?: 0 | 1 | 2 | 3;
+}
+const IconCellSignal = forwardRef(({bars = 3, ...props}, ref) => {
+ const theme = useTheme();
+ const firstBarColor = bars > 0 ? theme.gray300 : theme.gray100;
+ const secondBarColor = bars > 1 ? theme.gray300 : theme.gray100;
+ const thirdBarColor = bars > 2 ? theme.gray300 : theme.gray100;
+
+ return (
+
+
+
+
+
+ );
+});
+
+IconCellSignal.displayName = 'IconCellSignal';
+
+export {IconCellSignal};
diff --git a/static/app/index.tsx b/static/app/index.tsx
index 4a5dd49a969f7..680e8979672fb 100644
--- a/static/app/index.tsx
+++ b/static/app/index.tsx
@@ -67,20 +67,6 @@
//
// [1]: https://sentry.io/careers/
-// TODO(__SENTRY_USING_REACT_ROUTER_SIX): Very early on check if we're running
-// using the react-router 6 faeture flag so we can enable ths beefore the app
-// boots.
-//
-try {
- // @ts-expect-error features is an array at this point. It is unfortuantely
- // typed incorrectly
- if (window.__initialData?.features?.includes('organizations:react-router-6')) {
- window.__SENTRY_USING_REACT_ROUTER_SIX = true;
- }
-} catch {
- // XXX: Just don't crash the app for any reason
-}
-
async function app() {
// We won't need initalizeMainImport until we complete bootstrapping.
// Initaite the fetch, just don't await it until we need it.
diff --git a/static/app/main.tsx b/static/app/main.tsx
index 169605f35e992..ea1fc5a9da54f 100644
--- a/static/app/main.tsx
+++ b/static/app/main.tsx
@@ -1,6 +1,4 @@
import {useState} from 'react';
-// biome-ignore lint/nursery/noRestrictedImports: Will be removed with react router 6
-import {Router, RouterContext} from 'react-router';
import {createBrowserRouter, RouterProvider} from 'react-router-dom';
import {wrapCreateBrowserRouter} from '@sentry/react';
import {ReactQueryDevtools} from '@tanstack/react-query-devtools';
@@ -11,59 +9,34 @@ import {ThemeAndStyleProvider} from 'sentry/components/themeAndStyleProvider';
import {USE_REACT_QUERY_DEVTOOL} from 'sentry/constants';
import {routes} from 'sentry/routes';
import ConfigStore from 'sentry/stores/configStore';
-import {
- browserHistory,
- DANGEROUS_SET_REACT_ROUTER_6_HISTORY,
-} from 'sentry/utils/browserHistory';
+import {DANGEROUS_SET_REACT_ROUTER_6_HISTORY} from 'sentry/utils/browserHistory';
import {
DEFAULT_QUERY_CLIENT_CONFIG,
QueryClient,
QueryClientProvider,
} from 'sentry/utils/queryClient';
-import {RouteContext} from 'sentry/views/routeContext';
import {buildReactRouter6Routes} from './utils/reactRouter6Compat/router';
-/**
- * Renders our compatibility RouteContext.Provider. This will go away with
- * react-router v6.
- */
-function renderRouter(props: any) {
- return (
-
-
-
- );
-}
-
const queryClient = new QueryClient(DEFAULT_QUERY_CLIENT_CONFIG);
-function createReactRouter6Routes() {
+function buildRouter() {
const sentryCreateBrowserRouter = wrapCreateBrowserRouter(createBrowserRouter);
const router = sentryCreateBrowserRouter(buildReactRouter6Routes(routes()));
-
- if (window.__SENTRY_USING_REACT_ROUTER_SIX) {
- DANGEROUS_SET_REACT_ROUTER_6_HISTORY(router);
- }
+ DANGEROUS_SET_REACT_ROUTER_6_HISTORY(router);
return router;
}
function Main() {
- const [router6] = useState(createReactRouter6Routes);
+ const [router] = useState(buildRouter);
return (
{ConfigStore.get('demoMode') && }
- {window.__SENTRY_USING_REACT_ROUTER_SIX ? (
-
- ) : (
-
- {routes()}
-
- )}
+
{USE_REACT_QUERY_DEVTOOL && (
diff --git a/static/app/plugins/jira/components/issueActions.tsx b/static/app/plugins/jira/components/issueActions.tsx
index 22612f603259c..db8194d7b8ca7 100644
--- a/static/app/plugins/jira/components/issueActions.tsx
+++ b/static/app/plugins/jira/components/issueActions.tsx
@@ -30,9 +30,9 @@ class IssueActions extends DefaultIssueActions {
// Try not to change things the user might have edited
// unless they're no longer valid
const oldData = this.state.createFormData;
- const createFormData = {};
+ const createFormData: Record = {};
data?.forEach(field => {
- let val;
+ let val: any;
if (
field.choices &&
!field.choices.find(c => c[0] === oldData[field.name])
diff --git a/static/app/routes.spec.tsx b/static/app/routes.spec.tsx
index 7f847b4497781..4f8e583b4451d 100644
--- a/static/app/routes.spec.tsx
+++ b/static/app/routes.spec.tsx
@@ -1,9 +1,8 @@
-// biome-ignore lint/nursery/noRestrictedImports: Will be removed with react router 6
-import {createRoutes} from 'react-router';
+import type {RouteObject} from 'react-router-dom';
import * as constants from 'sentry/constants';
import {buildRoutes} from 'sentry/routes';
-import type {RouteComponent} from 'sentry/types/legacyReactRouter';
+import {buildReactRouter6Routes} from 'sentry/utils/reactRouter6Compat/router';
import normalizeUrl from 'sentry/utils/url/normalizeUrl';
// Setup a module mock so that we can replace
@@ -20,58 +19,51 @@ jest.mock('sentry/constants', () => {
};
});
-// Workaround react-router PlainRoute type not covering redirect routes.
-type RouteShape = {
- childRoutes?: RouteShape[];
- component?: RouteComponent;
- from?: string;
- path?: string;
-};
-
type RouteMetadata = {
leadingPath: string;
- route: RouteShape;
+ route: RouteObject;
};
-function extractRoutes(rootRoute: any): Record {
- const routeTree = createRoutes(rootRoute);
- const routeMap: Record = {};
+function extractRoutes(rootRoute: RouteObject[]): Set {
+ const routes = new Set();
// A queue of routes we need to visit
- const visitQueue: RouteMetadata[] = [{leadingPath: '', route: routeTree[0]}];
+ const visitQueue: RouteMetadata[] = [{leadingPath: '', route: rootRoute[0]}];
while (visitQueue.length > 0) {
const current = visitQueue.pop();
if (!current) {
break;
}
- let leading = current.leadingPath;
- if (current.route.path?.startsWith('/')) {
- leading = '';
- }
+ const leading = current.leadingPath;
const currentPath = `${leading}${current.route.path ?? ''}`.replace('//', '/');
- if (current.route.childRoutes) {
- for (const childRoute of current.route.childRoutes ?? []) {
+ if (current.route.children) {
+ for (const childRoute of current.route.children ?? []) {
visitQueue.push({
leadingPath: currentPath,
route: childRoute,
});
}
- } else {
- if (current.route.from) {
- // Redirect routes are not relevant to us.
- continue;
- }
+ }
- // We are on a terminal route in the tree. Add to the map of route components.
- // We are less interested in container route components.
- if (current.route.component) {
- routeMap[currentPath] = current.route.component;
- }
+ if (
+ current.route.element &&
+ (
+ current.route.element as React.ReactElement
+ ).type.displayName?.endsWith('Redirect')
+ ) {
+ // Redirect routes are not relevant to us.
+ continue;
+ }
+
+ // We are on a terminal route in the tree. Add to the map of route components.
+ // We are less interested in container route components.
+ if (current.route.element) {
+ routes.add(currentPath);
}
}
- return routeMap;
+ return routes;
}
describe('buildRoutes()', function () {
@@ -83,16 +75,16 @@ describe('buildRoutes()', function () {
// Get routes for with customer domains off.
spy.mockReturnValue(false);
- const routeMap = extractRoutes(buildRoutes());
+ const routes = extractRoutes(buildReactRouter6Routes(buildRoutes()));
// Get routes with customer domains on.
spy.mockReturnValue(true);
- const domainRoutes = extractRoutes(buildRoutes());
+ const domainRoutes = extractRoutes(buildReactRouter6Routes(buildRoutes()));
// All routes that exist under orgId path slugs should
// have a sibling under customer-domains.
const mismatch: Array<{domain: string; slug: string}> = [];
- for (const path in routeMap) {
+ for (const path of routes) {
// Normalize the URLs so that we know the path we're looking for.
const domainPath = normalizeUrl(path, {forceCustomerDomain: true});
@@ -101,7 +93,7 @@ describe('buildRoutes()', function () {
continue;
}
- if (!domainRoutes[domainPath]) {
+ if (!domainRoutes.has(domainPath)) {
mismatch.push({slug: path, domain: domainPath});
}
}
diff --git a/static/app/routes.tsx b/static/app/routes.tsx
index 04b18d40ff083..8701c79c8778b 100644
--- a/static/app/routes.tsx
+++ b/static/app/routes.tsx
@@ -1,6 +1,4 @@
import {Fragment, lazy} from 'react';
-// biome-ignore lint/nursery/noRestrictedImports: warning
-import {IndexRedirect, Redirect} from 'react-router';
import memoize from 'lodash/memoize';
import LazyLoad from 'sentry/components/lazyLoad';
@@ -15,10 +13,10 @@ import withDomainRequired from 'sentry/utils/withDomainRequired';
import App from 'sentry/views/app';
import AuthLayout from 'sentry/views/auth/layout';
import {MODULE_BASE_URLS} from 'sentry/views/insights/common/utils/useModuleURL';
-import {AI_LANDING_SUB_PATH} from 'sentry/views/insights/pages/aiLandingPage';
-import {BACKEND_LANDING_SUB_PATH} from 'sentry/views/insights/pages/backendLandingPage';
-import {FRONTEND_LANDING_SUB_PATH} from 'sentry/views/insights/pages/frontendLandingPage';
-import {MOBILE_LANDING_SUB_PATH} from 'sentry/views/insights/pages/mobileLandingPage';
+import {AI_LANDING_SUB_PATH} from 'sentry/views/insights/pages/ai/settings';
+import {BACKEND_LANDING_SUB_PATH} from 'sentry/views/insights/pages/backend/settings';
+import {FRONTEND_LANDING_SUB_PATH} from 'sentry/views/insights/pages/frontend/settings';
+import {MOBILE_LANDING_SUB_PATH} from 'sentry/views/insights/pages/mobile/settings';
import {INSIGHTS_BASE_URL} from 'sentry/views/insights/settings';
import {ModuleName} from 'sentry/views/insights/types';
import {Tab, TabPaths} from 'sentry/views/issueDetails/types';
@@ -32,7 +30,7 @@ import redirectDeprecatedProjectRoute from 'sentry/views/projects/redirectDeprec
import RouteNotFound from 'sentry/views/routeNotFound';
import SettingsWrapper from 'sentry/views/settings/components/settingsWrapper';
-import {IndexRoute, Route} from './components/route';
+import {IndexRedirect, IndexRoute, Redirect, Route} from './components/route';
const hook = (name: HookName) => HookStore.get(name).map(cb => cb());
@@ -561,35 +559,19 @@ function buildRoutes() {
import('sentry/views/settings/project/toolbar'))}
+ component={make(() => import('sentry/views/settings/project/projectToolbar'))}
/>
import('sentry/views/settings/projectSourceMaps'))}
/>
import('sentry/views/settings/projectSourceMaps'))}
- >
- import('sentry/views/settings/projectSourceMaps'))}
- />
-
- import('sentry/views/settings/projectSourceMaps'))}
- >
- import('sentry/views/settings/projectSourceMaps'))}
- />
-
-
+ />
+
+
import('sentry/views/settings/project/loaderScript'))}
/>
-
+
import('sentry/views/settings/projectSecurityHeaders'))}
@@ -737,23 +722,12 @@ function buildRoutes() {
/>
-
- import(
- 'sentry/views/settings/organizationMembers/organizationMembersWrapper'
- )
+ import('sentry/views/settings/organizationMembers/organizationMembersList')
)}
- >
-
- import(
- 'sentry/views/settings/organizationMembers/organizationMembersList'
- )
- )}
- />
-
+ />
import('sentry/views/settings/earlyFeatures'))}
/>
+ import('sentry/views/settings/dynamicSampling'))}
+ />
);
@@ -1641,22 +1620,208 @@ function buildRoutes() {
path="trends/"
component={make(() => import('sentry/views/performance/trends'))}
/>
- import('sentry/views/insights/pages/frontendLandingPage'))}
- />
- import('sentry/views/insights/pages/backendLandingPage'))}
- />
- import('sentry/views/insights/pages/mobileLandingPage'))}
- />
- import('sentry/views/insights/pages/aiLandingPage'))}
- />
+
+ import('sentry/views/insights/pages/frontend/frontendOverviewPage')
+ )}
+ />
+
+ import('sentry/views/insights/http/views/httpLandingPage')
+ )}
+ />
+ import('sentry/views/insights/http/views/httpDomainSummaryPage')
+ )}
+ />
+
+
+
+ import(
+ 'sentry/views/insights/browser/webVitals/views/webVitalsLandingPage'
+ )
+ )}
+ />
+ import('sentry/views/insights/browser/webVitals/views/pageOverview')
+ )}
+ />
+
+
+
+ import(
+ 'sentry/views/insights/browser/resources/views/resourcesLandingPage'
+ )
+ )}
+ />
+
+ import(
+ 'sentry/views/insights/browser/resources/views/resourceSummaryPage'
+ )
+ )}
+ />
+
+
+
+ import('sentry/views/insights/pages/backend/backendOverviewPage')
+ )}
+ />
+
+ import('sentry/views/insights/database/views/databaseLandingPage')
+ )}
+ />
+ import('sentry/views/insights/database/views/databaseSpanSummaryPage')
+ )}
+ />
+
+
+ import('sentry/views/insights/http/views/httpLandingPage')
+ )}
+ />
+ import('sentry/views/insights/http/views/httpDomainSummaryPage')
+ )}
+ />
+
+
+ import('sentry/views/insights/cache/views/cacheLandingPage')
+ )}
+ />
+
+
+ import('sentry/views/insights/queues/views/queuesLandingPage')
+ )}
+ />
+ import('sentry/views/insights/queues/views/destinationSummaryPage')
+ )}
+ />
+
+
+
+ import('sentry/views/insights/pages/mobile/mobileOverviewPage')
+ )}
+ />
+
+
+ import('sentry/views/insights/mobile/screens/views/screensLandingPage')
+ )}
+ />
+ import('sentry/views/insights/mobile/screens/views/screenDetailsPage')
+ )}
+ />
+
+
+
+ import(
+ 'sentry/views/insights/mobile/appStarts/views/appStartsLandingPage'
+ )
+ )}
+ />
+
+ import('sentry/views/insights/mobile/appStarts/views/screenSummaryPage')
+ )}
+ />
+
+
+ import('sentry/views/insights/mobile/ui/views/uiLandingPage')
+ )}
+ />
+ import('sentry/views/insights/mobile/ui/views/screenSummaryPage')
+ )}
+ />
+
+
+
+ import(
+ 'sentry/views/insights/mobile/screenload/views/screenloadLandingPage'
+ )
+ )}
+ />
+
+ import(
+ 'sentry/views/insights/mobile/screenload/views/screenLoadSpansPage'
+ )
+ )}
+ />
+
+
+
+ import('sentry/views/insights/pages/ai/aiOverviewPage'))}
+ />
+
+
+ import(
+ 'sentry/views/insights/llmMonitoring/views/llmMonitoringLandingPage'
+ )
+ )}
+ />
+
+ import(
+ 'sentry/views/insights/llmMonitoring/views/llmMonitoringDetailsPage'
+ )
+ )}
+ />
+
+
import('sentry/views/issueDetails/groupSimilarIssues'))
+ make(
+ () =>
+ import(
+ 'sentry/views/issueDetails/groupSimilarIssues/groupSimilarIssuesTab'
+ )
+ )
)}
/>
import('sentry/views/issueDetails/groupMerged')))}
+ component={hoc(
+ make(() => import('sentry/views/issueDetails/groupMerged/groupMergedTab'))
+ )}
/>
);
diff --git a/static/app/sentryPropTypeValidators.tsx b/static/app/sentryPropTypeValidators.tsx
index 3bcfb4ce82e47..2dbba500731bc 100644
--- a/static/app/sentryPropTypeValidators.tsx
+++ b/static/app/sentryPropTypeValidators.tsx
@@ -1,276 +1,3 @@
-import type {Avatar} from 'sentry/types/core';
-import type {Group} from 'sentry/types/group';
-import type {User, UserEmail} from 'sentry/types/user';
-
-/**
- * @deprecated
- */
-function isAvatarShape(avatar: unknown): null | Error {
- if (typeof avatar !== 'object' || avatar === null) {
- return new Error('avatar is not an object');
- }
-
- if (!('avatarType' in avatar) || typeof avatar.avatarType !== 'string') {
- return new Error(`avatarType must be string.`);
- }
-
- const maybeAvatarShape = avatar as Partial
;
- if (
- maybeAvatarShape.avatarType !== 'letter_avatar' &&
- maybeAvatarShape.avatarType !== 'upload' &&
- maybeAvatarShape.avatarType !== 'gravatar'
- ) {
- return new Error(`avatarType must be one of 'letter_avatar', 'upload', 'gravatar'.`);
- }
-
- if (!('avatarUuid' in avatar) || typeof maybeAvatarShape.avatarUuid !== 'string') {
- return new Error(`avatarUuid must be string`);
- }
-
- return null;
-}
-
-/**
- * @deprecated
- */
-function isEmailShape(email: unknown): null | Error {
- if (typeof email !== 'object' || email === null) {
- return new Error('email is not of object type');
- }
-
- const maybeEmailShape = email as Partial;
-
- if ('email' in maybeEmailShape && typeof maybeEmailShape.email !== 'string') {
- return new Error(`email must be string.`);
- }
-
- if ('id' in maybeEmailShape && typeof maybeEmailShape.id !== 'string') {
- return new Error(`id must be string.`);
- }
-
- if (
- 'is_verified' in maybeEmailShape &&
- typeof maybeEmailShape.is_verified !== 'boolean'
- ) {
- return new Error(`is_verified must be boolean.`);
- }
-
- return null;
-}
-
-/**
- * @deprecated
- */
-const USER_STRING_KEYS: (keyof User)[] = [
- 'avatarUrl',
- 'dateJoined',
- 'email',
- 'id',
- 'lastActive',
- 'lastLogin',
- 'username',
-];
-const USER_BOOLEAN_KEYS: (keyof User)[] = [
- 'has2fa',
- 'hasPasswordAuth',
- 'isActive',
- 'isManaged',
-];
-function isUserShape(user: unknown): null | Error {
- if (user === null) {
- return null;
- }
- if (typeof user !== 'object') {
- return new Error('user is not of object type');
- }
-
- const maybeUserShape = user as Partial;
-
- if ('avatar' in maybeUserShape && isAvatarShape(maybeUserShape.avatar) !== null) {
- return new Error('user.avatar is not of type Avatar');
- }
-
- if (
- 'emails' in maybeUserShape &&
- Array.isArray(maybeUserShape.emails) &&
- !maybeUserShape.emails.every(e => isEmailShape(e) === null)
- ) {
- return null;
- }
-
- for (const key of USER_BOOLEAN_KEYS) {
- if (key in maybeUserShape && typeof maybeUserShape[key] !== 'boolean') {
- return new Error(`user.${key} is not of type string`);
- }
- }
-
- if ('identities' in maybeUserShape && !Array.isArray(maybeUserShape.identities)) {
- return new Error('user.id identities not of type array');
- }
-
- for (const key of USER_STRING_KEYS) {
- if (key in user && typeof user[key] !== 'string') {
- return new Error(`user.${key} is not of type string`);
- }
- }
-
- return null;
-}
-
-/**
- * @deprecated
- */
-function isPartialProjectShape(project: unknown): null | Error {
- if (typeof project !== 'object' || project === null) {
- return new Error('project is not of object type');
- }
-
- for (const key of ['name', 'slug']) {
- if (key in project && typeof project[key] !== 'string') {
- return new Error(`${key} must be string.`);
- }
- }
-
- return null;
-}
-
-const METADATA_STRING_KEYS = ['value', 'message', 'directive', 'type', 'title', 'uri'];
-/**
- * @deprecated
- */
-function isMetaDataShape(metaData: unknown): null | Error {
- if (typeof metaData !== 'object' || metaData === null) {
- return new Error('metaData is not of object type');
- }
-
- for (const key of METADATA_STRING_KEYS) {
- if (key in metaData && typeof metaData[key] !== 'string') {
- return new Error(`value must be string.`);
- }
- }
-
- return null;
-}
-
-/**
- * @deprecated
- */
-
-/**
- * @deprecated
- */
-const GROUP_NUMBER_KEYS: (keyof Group)[] = ['userCount', 'numComments'];
-const GROUP_BOOLEAN_KEYS: (keyof Group)[] = [
- 'hasSeen',
- 'isBookmarked',
- 'isPublic',
- 'isSubscribed',
-];
-const GROUP_STRING_KEYS: (keyof Group)[] = [
- 'lastSeen',
- 'count',
- 'culprit',
- 'firstSeen',
- 'level',
- 'permalink',
- 'shareId',
- 'shortId',
- 'status',
- 'title',
-];
-
-/**
- * @deprecated
- */
-function isGroup(
- props: unknown,
- propName: string,
- _componentName: unknown
-): null | Error {
- if (typeof props !== 'object' || props === null) {
- return new Error('props is not an object');
- }
-
- if (!(propName in props) || typeof props[propName] !== 'object') {
- return null;
- }
-
- if (!props[propName]) {
- return null;
- }
-
- const group = props[propName];
-
- if (!('id' in group) || typeof group.id !== 'string') {
- return new Error(`id must be string.`);
- }
-
- for (const key of GROUP_NUMBER_KEYS) {
- if (key in group && typeof group[key] !== 'number') {
- return new Error(`${key} must be number.`);
- }
- }
-
- for (const key of GROUP_BOOLEAN_KEYS) {
- if (key in group && typeof group[key] !== 'boolean') {
- return new Error(`${key} must be boolean.`);
- }
- }
-
- if ('logger' in group) {
- if (typeof group.logger !== 'string' && group.logger !== null) {
- return new Error(`logger must be of string or null type.`);
- }
- }
-
- for (const key of GROUP_STRING_KEYS) {
- if (key in group && typeof group[key] !== 'string') {
- return new Error(`${key} must be string. got ${group[key]}`);
- }
- }
-
- if ('type' in group) {
- if (typeof group.type !== 'string') {
- return new Error(`type must be string.`);
- }
- if (
- group.type !== 'error' &&
- group.type !== 'csp' &&
- group.type !== 'hpkp' &&
- group.type !== 'expectct' &&
- group.type !== 'expectstaple' &&
- group.type !== 'default' &&
- group.type !== 'transaction'
- ) {
- return new Error(
- `type must be one of 'error', 'csp', 'hpkp', 'expectct', 'expectstaple', 'default', 'transaction'.`
- );
- }
- }
-
- if ('statusDetails' in group && typeof group.statusDetails !== 'object') {
- return new Error(`statusDetails must be object.`);
- }
-
- if ('annotations' in group && !Array.isArray(group.annotations)) {
- return new Error(`annotations must be of array type.`);
- }
-
- if ('assignedTo' in group && isUserShape(group.assignedTo) !== null) {
- return new Error(`assignedTo must be of type User.`);
- }
-
- if ('metadata' in group && isMetaDataShape(group.metadata) !== null) {
- return new Error(`metadata must be of type MetaData.`);
- }
-
- if ('project' in group && isPartialProjectShape(group.project) !== null) {
- return new Error(`project must be of type PartialProject.`);
- }
-
- return null;
-}
-
/**
* @deprecated
*/
@@ -301,6 +28,5 @@ function isObject(
* @deprecated
*/
export const SentryPropTypeValidators = {
- isGroup,
isObject,
};
diff --git a/static/app/stores/groupingStore.spec.tsx b/static/app/stores/groupingStore.spec.tsx
index db9339e1cda53..b2840d9f3ed0c 100644
--- a/static/app/stores/groupingStore.spec.tsx
+++ b/static/app/stores/groupingStore.spec.tsx
@@ -2,7 +2,7 @@ import * as GroupActionCreators from 'sentry/actionCreators/group';
import GroupingStore from 'sentry/stores/groupingStore';
describe('Grouping Store', function () {
- let trigger;
+ let trigger!: jest.SpyInstance;
beforeAll(function () {
MockApiClient.asyncDelay = 1;
diff --git a/static/app/stores/tagStore.spec.tsx b/static/app/stores/tagStore.spec.tsx
index e3042f1756293..d73f20f7a5f7a 100644
--- a/static/app/stores/tagStore.spec.tsx
+++ b/static/app/stores/tagStore.spec.tsx
@@ -1,5 +1,3 @@
-import {OrganizationFixture} from 'sentry-fixture/organization';
-
import TagStore from 'sentry/stores/tagStore';
describe('TagStore', function () {
@@ -37,79 +35,6 @@ describe('TagStore', function () {
});
});
- describe('getIssueAttributes()', function () {
- it('should populate the has tag with values', () => {
- TagStore.loadTagsSuccess([
- {
- key: 'mytag',
- name: 'My Custom Tag',
- },
- {
- key: 'otherkey',
- name: 'My other tag',
- },
- ]);
-
- expect(TagStore.getIssueAttributes(OrganizationFixture()).has).toEqual({
- key: 'has',
- name: 'Has Tag',
- values: ['mytag', 'otherkey'],
- predefined: true,
- });
- });
-
- it('should not overwrite predefined filters', () => {
- TagStore.loadTagsSuccess([
- {
- key: 'is',
- name: 'Custom Assigned To',
- },
- ]);
-
- const tags = TagStore.getIssueAttributes(OrganizationFixture());
- expect(tags.is).toBeTruthy();
- expect(tags.is.key).toBe('is');
- expect(tags.assigned).toBeTruthy();
- });
-
- it('should replace ignore with archive', () => {
- TagStore.loadTagsSuccess([
- {
- key: 'is',
- name: 'Custom Assigned To',
- },
- ]);
-
- const tags = TagStore.getIssueAttributes(OrganizationFixture());
- expect(tags.is.values).toContain('archived');
- });
- });
-
- describe('getIssueTags()', function () {
- it('should have built in, state, and issue attribute tags', () => {
- TagStore.loadTagsSuccess([
- {
- key: 'mytag',
- name: 'My Custom Tag',
- },
- ]);
-
- const tags = TagStore.getIssueTags(OrganizationFixture());
-
- // state
- expect(tags.mytag).toBeTruthy();
- expect(tags.mytag.key).toBe('mytag');
-
- // attribute
- expect(tags.has).toBeTruthy();
- expect(tags.has.key).toBe('has');
-
- // built in
- expect(tags['device.family']).toBeTruthy();
- expect(tags['device.family'].key).toBe('device.family');
- });
- });
-
it('returns a stable reference from getState', () => {
TagStore.loadTagsSuccess([
{
diff --git a/static/app/stores/tagStore.tsx b/static/app/stores/tagStore.tsx
index 190a159425116..3ab9d15c084e3 100644
--- a/static/app/stores/tagStore.tsx
+++ b/static/app/stores/tagStore.tsx
@@ -1,55 +1,10 @@
import {createStore} from 'reflux';
-import {ItemType, type SearchGroup} from 'sentry/components/smartSearchBar/types';
import type {Tag, TagCollection} from 'sentry/types/group';
-import {
- getIssueTitleFromType,
- IssueCategory,
- IssueType,
- PriorityLevel,
-} from 'sentry/types/group';
-import type {Organization} from 'sentry/types/organization';
-import {SEMVER_TAGS} from 'sentry/utils/discover/fields';
-import {
- FieldKey,
- FieldKind,
- ISSUE_EVENT_PROPERTY_FIELDS,
- ISSUE_FIELDS,
- ISSUE_PROPERTY_FIELDS,
-} from 'sentry/utils/fields';
import type {StrictStoreDefinition} from './types';
-// This list is only used on issues. Events/discover
-// have their own field list that exists elsewhere.
-const BUILTIN_TAGS = ISSUE_FIELDS.reduce((acc, tag) => {
- acc[tag] = {key: tag, name: tag};
- return acc;
-}, {});
-
-// For the new query builder, we need to differentiate between issue and event fields
-const BUILTIN_TAGS_BY_CATEGORY = {
- ...ISSUE_PROPERTY_FIELDS.reduce((acc, tag) => {
- acc[tag] = {key: tag, name: tag, predefined: true, kind: FieldKind.ISSUE_FIELD};
- return acc;
- }, {}),
- ...ISSUE_EVENT_PROPERTY_FIELDS.reduce((acc, tag) => {
- acc[tag] = {key: tag, name: tag, predefined: false, kind: FieldKind.EVENT_FIELD};
- return acc;
- }, {}),
-};
-
-export function getBuiltInTags(organization: Organization) {
- if (organization.features.includes('issue-stream-search-query-builder')) {
- return BUILTIN_TAGS_BY_CATEGORY;
- }
-
- return BUILTIN_TAGS;
-}
-
interface TagStoreDefinition extends StrictStoreDefinition {
- getIssueAttributes(organization: Organization): TagCollection;
- getIssueTags(org: Organization): TagCollection;
loadTagsSuccess(data: Tag[]): void;
reset(): void;
}
@@ -63,190 +18,6 @@ const storeConfig: TagStoreDefinition = {
this.state = {};
},
- /**
- * Gets only predefined issue attributes
- */
- getIssueAttributes(organization: Organization) {
- // TODO(mitsuhiko): what do we do with translations here?
- const isSuggestions = [
- 'resolved',
- 'unresolved',
- ...['archived', 'escalating', 'new', 'ongoing', 'regressed'],
- 'assigned',
- 'unassigned',
- 'for_review',
- 'linked',
- 'unlinked',
- ];
-
- const sortedTagKeys = Object.keys(this.state).sort((a, b) => {
- return a.toLowerCase().localeCompare(b.toLowerCase());
- });
-
- const builtinTags = getBuiltInTags(organization);
-
- const tagCollection = {
- [FieldKey.IS]: {
- ...builtinTags[FieldKey.IS],
- key: FieldKey.IS,
- name: 'Status',
- values: isSuggestions,
- maxSuggestedValues: isSuggestions.length,
- predefined: true,
- },
- [FieldKey.HAS]: {
- ...builtinTags[FieldKey.HAS],
- key: FieldKey.HAS,
- name: 'Has Tag',
- values: sortedTagKeys,
- predefined: true,
- },
- [FieldKey.ASSIGNED]: {
- ...builtinTags[FieldKey.ASSIGNED],
- key: FieldKey.ASSIGNED,
- name: 'Assigned To',
- values: [],
- predefined: true,
- },
- [FieldKey.BOOKMARKS]: {
- ...builtinTags[FieldKey.BOOKMARKS],
- name: 'Bookmarked By',
- values: [],
- predefined: true,
- },
- [FieldKey.ISSUE_CATEGORY]: {
- ...builtinTags[FieldKey.ISSUE_CATEGORY],
- name: 'Issue Category',
- values: [
- IssueCategory.ERROR,
- IssueCategory.PERFORMANCE,
- IssueCategory.REPLAY,
- IssueCategory.CRON,
- IssueCategory.UPTIME,
- ],
- predefined: true,
- },
- [FieldKey.ISSUE_TYPE]: {
- ...builtinTags[FieldKey.ISSUE_TYPE],
- name: 'Issue Type',
- values: [
- IssueType.PERFORMANCE_N_PLUS_ONE_DB_QUERIES,
- IssueType.PERFORMANCE_N_PLUS_ONE_API_CALLS,
- IssueType.PERFORMANCE_CONSECUTIVE_DB_QUERIES,
- IssueType.PERFORMANCE_SLOW_DB_QUERY,
- IssueType.PERFORMANCE_RENDER_BLOCKING_ASSET,
- IssueType.PERFORMANCE_UNCOMPRESSED_ASSET,
- IssueType.PERFORMANCE_ENDPOINT_REGRESSION,
- IssueType.PROFILE_FILE_IO_MAIN_THREAD,
- IssueType.PROFILE_IMAGE_DECODE_MAIN_THREAD,
- IssueType.PROFILE_JSON_DECODE_MAIN_THREAD,
- IssueType.PROFILE_REGEX_MAIN_THREAD,
- IssueType.PROFILE_FUNCTION_REGRESSION,
- ].map(value => ({
- icon: null,
- title: value,
- name: value,
- documentation: getIssueTitleFromType(value),
- value,
- type: ItemType.TAG_VALUE,
- children: [],
- })) as SearchGroup[],
- predefined: true,
- },
- [FieldKey.LAST_SEEN]: {
- ...builtinTags[FieldKey.LAST_SEEN],
- name: 'Last Seen',
- values: [],
- predefined: false,
- },
- [FieldKey.FIRST_SEEN]: {
- ...builtinTags[FieldKey.FIRST_SEEN],
- name: 'First Seen',
- values: [],
- predefined: false,
- },
- [FieldKey.FIRST_RELEASE]: {
- ...builtinTags[FieldKey.FIRST_RELEASE],
- name: 'First Release',
- values: ['latest'],
- predefined: true,
- },
- [FieldKey.EVENT_TIMESTAMP]: {
- ...builtinTags[FieldKey.EVENT_TIMESTAMP],
- name: 'Event Timestamp',
- values: [],
- predefined: true,
- },
- [FieldKey.TIMES_SEEN]: {
- ...builtinTags[FieldKey.TIMES_SEEN],
- name: 'Times Seen',
- isInput: true,
- // Below values are required or else SearchBar will attempt to get values
- // This is required or else SearchBar will attempt to get values
- values: [],
- predefined: true,
- },
- [FieldKey.ASSIGNED_OR_SUGGESTED]: {
- ...builtinTags[FieldKey.ASSIGNED_OR_SUGGESTED],
- name: 'Assigned or Suggested',
- isInput: true,
- values: [],
- predefined: true,
- },
- [FieldKey.ISSUE_PRIORITY]: {
- ...builtinTags[FieldKey.ISSUE_PRIORITY],
- name: 'Issue Priority',
- values: [PriorityLevel.HIGH, PriorityLevel.MEDIUM, PriorityLevel.LOW],
- predefined: true,
- },
- };
-
- // Ony include fields that that are part of the ISSUE_FIELDS. This is
- // because we may sometimes have fields that are turned off by removing
- // them from ISSUE_FIELDS
- const filteredCollection = Object.entries(tagCollection).filter(([key]) =>
- ISSUE_FIELDS.includes(key as FieldKey)
- );
-
- return Object.fromEntries(filteredCollection);
- },
-
- /**
- * Get all tags including builtin issue tags and issue attributes
- */
- getIssueTags(org: Organization) {
- const eventTags = Object.values(this.state).reduce((acc, tag) => {
- return {
- ...acc,
- [tag.key]: {
- ...tag,
- kind: FieldKind.TAG,
- },
- };
- }, {});
-
- const semverFields = Object.values(SEMVER_TAGS).reduce((acc, tag) => {
- return {
- ...acc,
- [tag.key]: {
- predefined: false,
- ...tag,
- kind: org.features.includes('issue-stream-search-query-builder')
- ? FieldKind.EVENT_FIELD
- : FieldKind.FIELD,
- },
- };
- }, {});
-
- const issueTags = {
- ...getBuiltInTags(org),
- ...semverFields,
- ...eventTags,
- ...this.getIssueAttributes(org),
- };
- return issueTags;
- },
-
getState() {
return this.state;
},
diff --git a/static/app/styles/global.tsx b/static/app/styles/global.tsx
index e0b65f130ec1b..6d283db5af7db 100644
--- a/static/app/styles/global.tsx
+++ b/static/app/styles/global.tsx
@@ -167,6 +167,10 @@ const styles = (theme: Theme, isDark: boolean) => css`
background-color: ${theme.backgroundSecondary};
white-space: pre-wrap;
overflow-x: auto;
+
+ &:focus-visible {
+ outline: ${theme.focusBorder} auto 1px;
+ }
}
code {
diff --git a/static/app/types/core.tsx b/static/app/types/core.tsx
index ea76de84c0287..fd74d12c97d6c 100644
--- a/static/app/types/core.tsx
+++ b/static/app/types/core.tsx
@@ -108,6 +108,7 @@ export interface DataCategoryInfo {
displayName: string;
name: DataCategoryExact;
plural: string;
+ productName: string;
titleName: string;
uid: number;
}
diff --git a/static/app/types/echarts.tsx b/static/app/types/echarts.tsx
index 9d762c34007de..d095eb511a81c 100644
--- a/static/app/types/echarts.tsx
+++ b/static/app/types/echarts.tsx
@@ -7,12 +7,12 @@ import type {
import type ReactEchartsCore from 'echarts-for-react/lib/core';
export type SeriesDataUnit = {
+ // number because we sometimes use timestamps
name: string | number;
value: number;
itemStyle?: {
color?: string;
};
- // number because we sometimes use timestamps
onClick?: (series: Series, instance: ECharts) => void;
};
diff --git a/static/app/types/group.tsx b/static/app/types/group.tsx
index 94a85b2750502..08125568c8afc 100644
--- a/static/app/types/group.tsx
+++ b/static/app/types/group.tsx
@@ -254,6 +254,7 @@ export type Tag = {
maxSuggestedValues?: number;
predefined?: boolean;
totalValues?: number;
+ uniqueValues?: number;
/**
* Usually values are strings, but a predefined tag can define its SearchGroups
*/
@@ -378,6 +379,7 @@ export enum GroupActivityType {
UNMERGE_SOURCE = 'unmerge_source',
UNMERGE_DESTINATION = 'unmerge_destination',
FIRST_SEEN = 'first_seen',
+ LAST_SEEN = 'last_seen',
ASSIGNED = 'assigned',
UNASSIGNED = 'unassigned',
MERGE = 'merge',
@@ -483,6 +485,11 @@ interface GroupActivityFirstSeen extends GroupActivityBase {
type: GroupActivityType.FIRST_SEEN;
}
+interface GroupActivityLastSeen extends GroupActivityBase {
+ data: Record;
+ type: GroupActivityType.LAST_SEEN;
+}
+
interface GroupActivityMarkReviewed extends GroupActivityBase {
data: Record;
type: GroupActivityType.MARK_REVIEWED;
@@ -678,7 +685,8 @@ export type GroupActivity =
| GroupActivityAutoSetOngoing
| GroupActivitySetEscalating
| GroupActivitySetPriority
- | GroupActivityDeletedAttachment;
+ | GroupActivityDeletedAttachment
+ | GroupActivityLastSeen;
export type Activity = GroupActivity;
diff --git a/static/app/types/legacyReactRouter.tsx b/static/app/types/legacyReactRouter.tsx
index f71e6eaca2410..2ea7fd801b57f 100644
--- a/static/app/types/legacyReactRouter.tsx
+++ b/static/app/types/legacyReactRouter.tsx
@@ -13,47 +13,47 @@ import type {
Query,
} from 'history';
-export interface Params {
+interface Params {
[key: string]: string;
}
-export type RoutePattern = string;
+type RoutePattern = string;
export type RouteComponent = React.ComponentClass | React.FunctionComponent;
-export interface RouteComponents {
+interface RouteComponents {
[name: string]: RouteComponent;
}
-export interface RouterState {
+interface RouterState {
components: RouteComponent[];
location: Location;
params: Params;
routes: PlainRoute[];
}
-export interface RedirectFunction {
+interface RedirectFunction {
(location: LocationDescriptor): void;
(state: LocationState, pathname: Pathname | Path, query?: Query): void;
}
type AnyFunction = (...args: any[]) => any;
-export type EnterHook = (
+type EnterHook = (
nextState: RouterState,
replace: RedirectFunction,
callback?: AnyFunction
) => any;
-export type LeaveHook = (prevState: RouterState) => any;
+type LeaveHook = (prevState: RouterState) => any;
-export type ChangeHook = (
+type ChangeHook = (
prevState: RouterState,
nextState: RouterState,
replace: RedirectFunction,
callback?: AnyFunction
) => any;
-export type RouteHook = (nextLocation?: Location) => any;
+type RouteHook = (nextLocation?: Location) => any;
type ComponentCallback = (err: any, component: RouteComponent) => any;
type ComponentsCallback = (err: any, components: RouteComponents) => any;
@@ -130,3 +130,12 @@ export interface RouteContextInterface, Q = any> {
}
export type Route = React.ComponentClass;
+
+export interface IndexRedirectProps {
+ to: RoutePattern;
+ query?: Query | undefined;
+}
+
+export interface RedirectProps extends IndexRedirectProps {
+ from: RoutePattern;
+}
diff --git a/static/app/types/onboarding.tsx b/static/app/types/onboarding.tsx
index d0d8df11eb967..3125b472b6b11 100644
--- a/static/app/types/onboarding.tsx
+++ b/static/app/types/onboarding.tsx
@@ -9,6 +9,11 @@ import type {Organization} from './organization';
import type {PlatformIntegration, PlatformKey, Project} from './project';
import type {AvatarUser} from './user';
+export enum OnboardingTaskGroup {
+ GETTING_STARTED = 'getting_started',
+ BEYOND_BASICS = 'beyond_basics',
+}
+
export enum OnboardingTaskKey {
FIRST_PROJECT = 'create_project',
FIRST_EVENT = 'send_first_event',
@@ -18,11 +23,12 @@ export enum OnboardingTaskKey {
RELEASE_TRACKING = 'setup_release_tracking',
SOURCEMAPS = 'setup_sourcemaps',
USER_REPORTS = 'setup_user_reports',
- ISSUE_TRACKER = 'setup_issue_tracker',
ALERT_RULE = 'setup_alert_rules',
FIRST_TRANSACTION = 'setup_transactions',
METRIC_ALERT = 'setup_metric_alert_rules',
USER_SELECTED_PROJECTS = 'setup_userselected_projects',
+ REAL_TIME_NOTIFICATIONS = 'setup_real_time_notifications',
+ LINK_SENTRY_TO_SOURCE_CODE = 'link_sentry_to_source_code',
/// Customized card that shows the selected integrations during onboarding
INTEGRATIONS = 'integrations',
/// Regular card that tells the user to setup integrations if no integrations were selected during onboarding
@@ -68,6 +74,10 @@ interface OnboardingTaskDescriptorBase {
* An extra component that may be rendered within the onboarding task item.
*/
SupplementComponent?: React.ComponentType;
+ /**
+ * The group that this task belongs to, e.g. basic and level up
+ */
+ group?: OnboardingTaskGroup;
/**
* If a render function was provided, it will be used to render the entire card,
* and the card will be rendered before any other cards regardless of completion status.
diff --git a/static/app/types/organization.tsx b/static/app/types/organization.tsx
index 7304e9072a162..99aeedede9a7c 100644
--- a/static/app/types/organization.tsx
+++ b/static/app/types/organization.tsx
@@ -71,6 +71,10 @@ export interface Organization extends OrganizationSummary {
isDynamicallySampled: boolean;
onboardingTasks: OnboardingTaskStatus[];
openMembership: boolean;
+ /**
+ * A list of roles that are available to the organization.
+ * eg: billing, admin, member, manager, owner
+ */
orgRoleList: OrgRole[];
pendingAccessRequests: number;
quota: {
@@ -132,7 +136,10 @@ export interface BaseRole {
export interface OrgRole extends BaseRole {
minimumTeamRole: string;
isGlobal?: boolean;
- is_global?: boolean; // Deprecated: use isGlobal
+ /**
+ * @deprecated use isGlobal
+ */
+ is_global?: boolean;
}
export interface TeamRole extends BaseRole {
isMinimumRoleFor: string;
@@ -311,6 +318,10 @@ export type MultiSeriesEventsStats = {
[seriesName: string]: EventsStats;
};
+export type GroupedMultiSeriesEventsStats = {
+ [seriesName: string]: MultiSeriesEventsStats & {order: number};
+};
+
export type EventsStatsSeries = {
data: {
axis: F;
diff --git a/static/app/types/project.tsx b/static/app/types/project.tsx
index 74e5e7051b365..d5bd5dcf76c27 100644
--- a/static/app/types/project.tsx
+++ b/static/app/types/project.tsx
@@ -217,6 +217,7 @@ export type PlatformKey =
| 'javascript-ember'
| 'javascript-gatsby'
| 'javascript-nextjs'
+ | 'javascript-nuxt'
| 'javascript-react'
| 'javascript-remix'
| 'javascript-solid'
diff --git a/static/app/types/release.tsx b/static/app/types/release.tsx
index 9ef101f5bdcc8..9e6f80cd468bd 100644
--- a/static/app/types/release.tsx
+++ b/static/app/types/release.tsx
@@ -40,7 +40,7 @@ interface RawVersion {
raw: string;
}
-export interface SemverVerison extends RawVersion {
+export interface SemverVersion extends RawVersion {
buildCode: string | null;
components: number;
major: number;
@@ -53,7 +53,7 @@ export type VersionInfo = {
buildHash: string | null;
description: string;
package: string | null;
- version: RawVersion | SemverVerison;
+ version: RawVersion | SemverVersion;
};
export interface BaseRelease {
diff --git a/static/app/types/system.tsx b/static/app/types/system.tsx
index ce59ef7a7d9c7..c63e54ad91716 100644
--- a/static/app/types/system.tsx
+++ b/static/app/types/system.tsx
@@ -95,11 +95,6 @@ declare global {
* Used by webpack-devserver + html-webpack
*/
__SENTRY_DEV_UI?: boolean;
- /**
- * Use react-router v6 in compatability mode. This exists while we migrate
- * off of react-router v3.
- */
- __SENTRY_USING_REACT_ROUTER_SIX?: boolean;
/**
* Sentrys version string
*/
diff --git a/static/app/utils/__mocks__/localStorage.tsx b/static/app/utils/__mocks__/localStorage.tsx
index cebf2eb86d345..74bbdab979dce 100644
--- a/static/app/utils/__mocks__/localStorage.tsx
+++ b/static/app/utils/__mocks__/localStorage.tsx
@@ -1,5 +1,5 @@
const localStorageMock = function () {
- let store = {};
+ let store: Record = {};
return {
getItem: jest.fn(key => store[key]),
setItem: jest.fn((key, value) => {
diff --git a/static/app/utils/analytics.tsx b/static/app/utils/analytics.tsx
index 91b489f47286c..746db6e7e0b04 100644
--- a/static/app/utils/analytics.tsx
+++ b/static/app/utils/analytics.tsx
@@ -9,6 +9,14 @@ import {
featureFlagEventMap,
type FeatureFlagEventParameters,
} from 'sentry/utils/analytics/featureFlagAnalyticsEvents';
+import {
+ quickStartEventMap,
+ type QuickStartEventParameters,
+} from 'sentry/utils/analytics/quickStartAnalyticsEvents';
+import {
+ statsEventMap,
+ type StatsEventParameters,
+} from 'sentry/utils/analytics/statsAnalyticsEvents';
import type {AiSuggestedSolutionEventParameters} from './analytics/aiSuggestedSolutionAnalyticsEvents';
import {aiSuggestedSolutionEventMap} from './analytics/aiSuggestedSolutionAnalyticsEvents';
@@ -87,6 +95,8 @@ interface EventParameters
ProjectCreationEventParameters,
SignupAnalyticsParameters,
TracingEventParameters,
+ StatsEventParameters,
+ QuickStartEventParameters,
Record> {}
const allEventMap: Record = {
@@ -117,6 +127,8 @@ const allEventMap: Record = {
...projectCreationEventMap,
...starfishEventMap,
...signupEventMap,
+ ...statsEventMap,
+ ...quickStartEventMap,
};
/**
diff --git a/static/app/utils/analytics/dashboardsAnalyticsEvents.tsx b/static/app/utils/analytics/dashboardsAnalyticsEvents.tsx
index 38c7e29759a28..529f1a601716a 100644
--- a/static/app/utils/analytics/dashboardsAnalyticsEvents.tsx
+++ b/static/app/utils/analytics/dashboardsAnalyticsEvents.tsx
@@ -33,6 +33,7 @@ export type DashboardsEventParameters = {
'dashboards2.edit.complete': {};
'dashboards2.edit.start': {};
'dashboards2.filter.cancel': {};
+ 'dashboards2.filter.change': {filter_type: string};
'dashboards2.filter.save': {};
'dashboards_manage.change_sort': {
sort: string;
@@ -122,6 +123,7 @@ export const dashboardsEventMap: Record = {
'dashboards2.edit.start': 'Dashboards2: Edit start',
'dashboards2.filter.save': 'Dashboards2: Filter bar save',
'dashboards2.filter.cancel': 'Dashboards2: Filter bar cancel',
+ 'dashboards2.filter.change': 'Dashboards2: Filter bar changed',
'dashboards_views.query_selector.opened':
'Dashboards2: Query Selector opened for Widget',
'dashboards_views.query_selector.selected':
diff --git a/static/app/utils/analytics/feedbackAnalyticsEvents.tsx b/static/app/utils/analytics/feedbackAnalyticsEvents.tsx
index ed0f5821edf40..0e397cc496c97 100644
--- a/static/app/utils/analytics/feedbackAnalyticsEvents.tsx
+++ b/static/app/utils/analytics/feedbackAnalyticsEvents.tsx
@@ -2,6 +2,7 @@ export type FeedbackEventParameters = {
'feedback.details-integration-issue-clicked': {
integration_key: string;
};
+ 'feedback.feedback-item-not-found': {feedbackId: string};
'feedback.feedback-item-rendered': {};
'feedback.index-setup-viewed': {};
'feedback.list-item-selected': {};
@@ -17,6 +18,7 @@ export type FeedbackEventParameters = {
export type FeedbackEventKey = keyof FeedbackEventParameters;
export const feedbackEventMap: Record = {
+ 'feedback.feedback-item-not-found': 'Feedback item not found',
'feedback.feedback-item-rendered': 'Loaded and rendered a feedback item',
'feedback.index-setup-viewed': 'Viewed Feedback Onboarding Setup',
'feedback.list-item-selected': 'Selected Item in Feedback List',
diff --git a/static/app/utils/analytics/growthAnalyticsEvents.tsx b/static/app/utils/analytics/growthAnalyticsEvents.tsx
index 570de8e01cfc4..ba1aed00feaf7 100644
--- a/static/app/utils/analytics/growthAnalyticsEvents.tsx
+++ b/static/app/utils/analytics/growthAnalyticsEvents.tsx
@@ -119,7 +119,11 @@ export type GrowthEventParameters = {
'growth.submitted_mobile_prompt_ask_teammate': MobilePromptBannerParams;
'invite_modal.add_more': InviteModal;
'invite_modal.closed': InviteModal;
- 'invite_modal.invites_sent': InviteModal;
+ 'invite_modal.invites_sent': InviteModal & {
+ failed_invites: number;
+ is_new_modal: boolean;
+ sent_invites: number;
+ };
'invite_modal.opened': InviteModal & {
can_invite: boolean;
source?: string;
diff --git a/static/app/utils/analytics/issueAnalyticsEvents.tsx b/static/app/utils/analytics/issueAnalyticsEvents.tsx
index ef04c6cae21ca..6f1c01fbb12c5 100644
--- a/static/app/utils/analytics/issueAnalyticsEvents.tsx
+++ b/static/app/utils/analytics/issueAnalyticsEvents.tsx
@@ -221,6 +221,8 @@ export type IssueEventParameters = {
search_source: string;
search_type: string;
};
+ 'issue_views.add_view.all_saved_searches_saved': {};
+ 'issue_views.add_view.banner_dismissed': {};
'issue_views.add_view.clicked': {};
'issue_views.add_view.custom_query_saved': {
query: string;
@@ -385,7 +387,10 @@ export const issueEventMap: Record = {
'issue_views.add_view.custom_query_saved':
'Issue Views: Custom Query Saved From Add View',
'issue_views.add_view.saved_search_saved': 'Issue Views: Saved Search Saved',
+ 'issue_views.add_view.all_saved_searches_saved':
+ 'Issue Views: All Saved Searches Saved',
'issue_views.add_view.recommended_view_saved': 'Issue Views: Recommended View Saved',
+ 'issue_views.add_view.banner_dismissed': 'Issue Views: Add View Banner Dismissed',
'issue_views.shared_view_opened': 'Issue Views: Shared View Opened',
'issue_views.temp_view_discarded': 'Issue Views: Temporary View Discarded',
'issue_views.temp_view_saved': 'Issue Views: Temporary View Saved',
diff --git a/static/app/utils/analytics/onboardingAnalyticsEvents.tsx b/static/app/utils/analytics/onboardingAnalyticsEvents.tsx
index 10eabb198a78e..c4f2521f92379 100644
--- a/static/app/utils/analytics/onboardingAnalyticsEvents.tsx
+++ b/static/app/utils/analytics/onboardingAnalyticsEvents.tsx
@@ -31,9 +31,7 @@ export type OnboardingEventParameters = {
'onboarding.messaging_integration_external_install_clicked': {
provider_key: string;
};
- 'onboarding.messaging_integration_modal_rendered': {
- project_id: string;
- };
+ 'onboarding.messaging_integration_modal_rendered': {};
'onboarding.messaging_integration_steps_refreshed': {};
'onboarding.next_step_clicked': {
newOrg: boolean;
diff --git a/static/app/utils/analytics/quickStartAnalyticsEvents.tsx b/static/app/utils/analytics/quickStartAnalyticsEvents.tsx
new file mode 100644
index 0000000000000..a7a051f4c94c8
--- /dev/null
+++ b/static/app/utils/analytics/quickStartAnalyticsEvents.tsx
@@ -0,0 +1,17 @@
+export type QuickStartEventParameters = {
+ 'quick_start.completed': {
+ referrer: string;
+ };
+ 'quick_start.opened': {};
+ 'quick_start.task_card_clicked': {
+ action: string;
+ todo_id: string;
+ todo_title: string;
+ };
+};
+
+export const quickStartEventMap: Record = {
+ 'quick_start.opened': 'Quick Start: Opened',
+ 'quick_start.task_card_clicked': 'Quick Start: Task Card Clicked',
+ 'quick_start.completed': 'Quick Start: Completed',
+};
diff --git a/static/app/utils/analytics/replayAnalyticsEvents.tsx b/static/app/utils/analytics/replayAnalyticsEvents.tsx
index 49b6864e628ca..5656b41779f06 100644
--- a/static/app/utils/analytics/replayAnalyticsEvents.tsx
+++ b/static/app/utils/analytics/replayAnalyticsEvents.tsx
@@ -3,10 +3,6 @@ import type {Output} from 'sentry/views/replays/detail/network/details/getOutput
import type {ReferrerTableType} from 'sentry/views/replays/replayTable/tableCell';
export type ReplayEventParameters = {
- 'replay.accessibility-issue-clicked': {
- issue_description: string;
- issue_impact: string | undefined;
- };
'replay.canvas-detected-banner-clicked': {
sdk_needs_update?: boolean;
};
@@ -132,7 +128,6 @@ export type ReplayEventParameters = {
export type ReplayEventKey = keyof ReplayEventParameters;
export const replayEventMap: Record = {
- 'replay.accessibility-issue-clicked': 'Clicked Replay Accessibility Issue',
'replay.canvas-detected-banner-clicked': 'Clicked Canvas Detected in Replay Banner',
'replay.details-data-loaded': 'Replay Details Data Loaded',
'replay.details-has-hydration-error': 'Replay Details Has Hydration Error',
diff --git a/static/app/utils/analytics/statsAnalyticsEvents.tsx b/static/app/utils/analytics/statsAnalyticsEvents.tsx
new file mode 100644
index 0000000000000..867c8d1a93c3a
--- /dev/null
+++ b/static/app/utils/analytics/statsAnalyticsEvents.tsx
@@ -0,0 +1,15 @@
+export type StatsEventParameters = {
+ 'stats.docs_clicked': {
+ dataCategory: string;
+ source:
+ | 'card-accepted'
+ | 'card-filtered'
+ | 'card-rate-limited'
+ | 'card-invalid'
+ | 'chart-title';
+ };
+};
+
+export const statsEventMap: Record = {
+ 'stats.docs_clicked': 'Stats: Docs Clicked',
+};
diff --git a/static/app/utils/array/lastOfArray.tsx b/static/app/utils/array/lastOfArray.tsx
deleted file mode 100644
index 00136f0806877..0000000000000
--- a/static/app/utils/array/lastOfArray.tsx
+++ /dev/null
@@ -1,5 +0,0 @@
-export function lastOfArray | ReadonlyArray>(
- t: T
-): T[number] {
- return t[t.length - 1];
-}
diff --git a/static/app/utils/browserHistory.tsx b/static/app/utils/browserHistory.tsx
index aa8427457eda0..430b6d2432b64 100644
--- a/static/app/utils/browserHistory.tsx
+++ b/static/app/utils/browserHistory.tsx
@@ -1,6 +1,5 @@
-// biome-ignore lint/nursery/noRestrictedImports: Will be removed with react router 6
-import {browserHistory as react3BrowserHistory} from 'react-router';
import type {Router} from '@remix-run/router/dist/router';
+import * as Sentry from '@sentry/react';
import type {History} from 'history';
import {
@@ -8,6 +7,43 @@ import {
locationDescriptorToTo,
} from './reactRouter6Compat/location';
+const historyMethods: Array = [
+ 'listenBefore',
+ 'listen',
+ 'transitionTo',
+ 'push',
+ 'replace',
+ 'go',
+ 'goBack',
+ 'goForward',
+ 'createKey',
+ 'createPath',
+ 'createHref',
+ 'createLocation',
+ 'getCurrentLocation',
+];
+
+/**
+ * Configures a proxy object for the default value of browserHistory. This
+ * should NOT be called before the DANGEROUS_SET_REACT_ROUTER_6_HISTORY
+ * fucntion is called. But let's be sure it isn't by adding some logging.
+ */
+const proxyLegacyBrowserHistory: ProxyHandler = {
+ get(_target, prop, _receiver) {
+ if (historyMethods.includes(prop.toString() as keyof History)) {
+ // eslint-disable-next-line no-console
+ console.warn('Legacy browserHistory called before patched!');
+ Sentry.captureException(new Error('legacy browserHistory called!'), {
+ level: 'info',
+ extra: {prop},
+ });
+
+ return () => {};
+ }
+ return undefined;
+ },
+};
+
/**
* @deprecated Prefer using useNavigate
*
@@ -22,7 +58,7 @@ import {
* browserHistory.push({...location, query: {someKey: 1}})
* navigate({...location, query: {someKey: 1}})
*/
-export let browserHistory = react3BrowserHistory;
+export let browserHistory = new Proxy({} as History, proxyLegacyBrowserHistory);
/**
* This shim sets the global `browserHistory` to a shim object that matches
@@ -84,3 +120,7 @@ export function DANGEROUS_SET_REACT_ROUTER_6_HISTORY(router: Router) {
browserHistory = compat6BrowserHistory;
}
+
+export function DANGEROUS_SET_TEST_HISTORY(router: any) {
+ browserHistory = router;
+}
diff --git a/static/app/utils/discover/discoverQuery.tsx b/static/app/utils/discover/discoverQuery.tsx
index 672c4c7b9f2c0..ea13f2e8094dd 100644
--- a/static/app/utils/discover/discoverQuery.tsx
+++ b/static/app/utils/discover/discoverQuery.tsx
@@ -56,7 +56,7 @@ function DiscoverQuery(props: DiscoverQueryComponentProps) {
const {fields, ...otherMeta} = data.meta ?? {};
return {
...data,
- meta: {...fields, ...otherMeta},
+ meta: {...fields, ...otherMeta, fields},
};
};
return (
@@ -74,7 +74,7 @@ export function useDiscoverQuery(props: Omit {
+ const field = 'apdex()';
+
+ return (
+
+ {typeof data[field] === 'number' ? formatApdex(data[field]) : emptyValue}
+
+ );
+ },
+ },
attachments: {
sortField: null,
renderFunc: (data, {organization, projectSlug}) => {
@@ -990,7 +1004,7 @@ export function getFieldRenderer(
}
const fieldName = isAlias ? getAggregateAlias(field) : field;
- const fieldType = meta[fieldName];
+ const fieldType = meta[fieldName] || meta.fields?.[fieldName];
for (const alias in SPECIAL_FUNCTIONS) {
if (fieldName.startsWith(alias)) {
@@ -1024,7 +1038,7 @@ export function getFieldFormatter(
isAlias: boolean = true
): FieldTypeFormatterRenderFunctionPartial {
const fieldName = isAlias ? getAggregateAlias(field) : field;
- const fieldType = meta[fieldName];
+ const fieldType = meta[fieldName] || meta.fields?.[fieldName];
if (FIELD_FORMATTERS.hasOwnProperty(fieldType)) {
return partial(FIELD_FORMATTERS[fieldType].renderFunc, fieldName);
diff --git a/static/app/utils/discover/fields.spec.tsx b/static/app/utils/discover/fields.spec.tsx
index bd6def863b063..86d76b1c1e142 100644
--- a/static/app/utils/discover/fields.spec.tsx
+++ b/static/app/utils/discover/fields.spec.tsx
@@ -84,6 +84,13 @@ describe('parseFunction', function () {
arguments: ['release', '"0.81,123,152,()"', 'others', 'current'],
});
});
+
+ it('handles functions with numeric tag arguments', function () {
+ expect(parseFunction('count(tags[foo,number])')).toEqual({
+ name: 'count',
+ arguments: ['tags[foo,number]'],
+ });
+ });
});
describe('getAggregateAlias', function () {
diff --git a/static/app/utils/discover/fields.tsx b/static/app/utils/discover/fields.tsx
index 846fb04976e22..5593c3c873a0b 100644
--- a/static/app/utils/discover/fields.tsx
+++ b/static/app/utils/discover/fields.tsx
@@ -763,6 +763,7 @@ export const ERRORS_AGGREGATION_FUNCTIONS = [
AggregationKey.COUNT_UNIQUE,
AggregationKey.EPS,
AggregationKey.EPM,
+ AggregationKey.LAST_SEEN,
];
// This list contains fields/functions that are available with profiling feature.
@@ -811,39 +812,40 @@ export function parseFunction(field: string): ParsedFunction | null {
if (results && results.length === 3) {
return {
name: results[1],
- arguments: parseArguments(results[1], results[2]),
+ arguments: parseArguments(results[2]),
};
}
return null;
}
-export function parseArguments(functionText: string, columnText: string): string[] {
- // Some functions take a quoted string for their arguments that may contain commas
- // This function attempts to be identical with the similarly named parse_arguments
- // found in src/sentry/search/events/fields.py
- if (
- (functionText !== 'to_other' &&
- functionText !== 'count_if' &&
- functionText !== 'spans_histogram') ||
- columnText?.length === 0
- ) {
- return columnText ? columnText.split(',').map(result => result.trim()) : [];
+function _lookback(columnText: string, j: number, str: string) {
+ // For parse_arguments, check that the current character is preceeded by string
+ if (j < str.length) {
+ return false;
}
+ return columnText.substring(j - str.length, j) === str;
+}
+export function parseArguments(columnText: string): string[] {
const args: string[] = [];
let quoted = false;
+ let inTag = false;
let escaped = false;
let i: number = 0;
let j: number = 0;
while (j < columnText?.length) {
- if (i === j && columnText[j] === '"') {
+ if (!inTag && i === j && columnText[j] === '"') {
// when we see a quote at the beginning of
// an argument, then this is a quoted string
quoted = true;
+ } else if (!quoted && columnText[j] === '[' && _lookback(columnText, j, 'tags')) {
+ // when the argument begins with tags[,
+ // then this is the beginning of the tag that may contain commas
+ inTag = true;
} else if (i === j && columnText[j] === ' ') {
// argument has leading spaces, skip over them
i += 1;
@@ -855,12 +857,16 @@ export function parseArguments(functionText: string, columnText: string): string
// when we see a non-escaped quote while inside
// of a quoted string, we should end it
quoted = false;
+ } else if (inTag && !escaped && columnText[j] === ']') {
+ // when we see a non-escaped quote while inside
+ // of a quoted string, we should end it
+ inTag = false;
} else if (quoted && escaped) {
// when we are inside a quoted string and have
// begun an escape character, we should end it
escaped = false;
- } else if (quoted && columnText[j] === ',') {
- // when we are inside a quoted string and see
+ } else if ((quoted || inTag) && columnText[j] === ',') {
+ // when we are inside a quoted string or tag and see
// a comma, it should not be considered an
// argument separator
} else if (columnText[j] === ',') {
@@ -1615,3 +1621,13 @@ export const COMBINED_DATASET_FILTER_KEY_SECTIONS: FilterKeySection[] = [
// export const PLATFORM_KEY_TO_FILTER_SECTIONS
// will take in a project platform key, and output only the relevant filter key sections.
// This way, users will not be suggested mobile fields for a backend transaction, for example.
+
+export const TYPED_TAG_KEY_RE = /tags\[(.*),(.*)\]/;
+
+export function formatParsedFunction(func: ParsedFunction) {
+ const args = func.arguments.map(arg => {
+ const result = arg.match(TYPED_TAG_KEY_RE);
+ return result?.[1] ?? arg;
+ });
+ return `${func.name}(${args.join(',')})`;
+}
diff --git a/static/app/utils/discover/urls.tsx b/static/app/utils/discover/urls.tsx
index e37d2770882c9..13d0c0e04114c 100644
--- a/static/app/utils/discover/urls.tsx
+++ b/static/app/utils/discover/urls.tsx
@@ -52,11 +52,12 @@ export function generateLinkToEventInTraceView({
eventId,
transactionName,
eventView,
+ targetId,
demo,
source,
type = 'performance',
}: {
- eventId: string;
+ eventId: string | undefined;
location: Location;
organization: Organization;
projectSlug: string;
@@ -67,6 +68,9 @@ export function generateLinkToEventInTraceView({
isHomepage?: boolean;
source?: string;
spanId?: string;
+ // targetId represents the span id of the transaction. It will replace eventId once all links
+ // to trace view are updated to use spand ids of transactions instead of event ids.
+ targetId?: string;
transactionName?: string;
type?: 'performance' | 'discover';
}) {
@@ -90,6 +94,7 @@ export function generateLinkToEventInTraceView({
dateSelection,
timestamp: normalizedTimestamp,
eventId,
+ targetId,
spanId,
demo,
location,
diff --git a/static/app/utils/displayReprocessEventAction.spec.tsx b/static/app/utils/displayReprocessEventAction.spec.tsx
index 390254c52a89a..c719ab7f9832c 100644
--- a/static/app/utils/displayReprocessEventAction.spec.tsx
+++ b/static/app/utils/displayReprocessEventAction.spec.tsx
@@ -7,7 +7,7 @@ import {displayReprocessEventAction} from 'sentry/utils/displayReprocessEventAct
describe('DisplayReprocessEventAction', function () {
it('returns false in case of no event', function () {
- expect(displayReprocessEventAction()).toBe(false);
+ expect(displayReprocessEventAction(null)).toBe(false);
});
it('returns false if no exception entry is found', function () {
diff --git a/static/app/utils/displayReprocessEventAction.tsx b/static/app/utils/displayReprocessEventAction.tsx
index e271c05097312..3b192a463c7ce 100644
--- a/static/app/utils/displayReprocessEventAction.tsx
+++ b/static/app/utils/displayReprocessEventAction.tsx
@@ -21,7 +21,7 @@ const MAYBE_DEBUG_FILE_PLATFORMS: Set = new Set(['csharp', 'java'])
* Debug Files for proper processing, as those Debug Files could have been uploaded *after*
* the Event was ingested.
*/
-export function displayReprocessEventAction(event?: Event): boolean {
+export function displayReprocessEventAction(event: Event | null): boolean {
if (!event) {
return false;
}
diff --git a/static/app/utils/dynamicSampling/features.tsx b/static/app/utils/dynamicSampling/features.tsx
new file mode 100644
index 0000000000000..278da74e2a54a
--- /dev/null
+++ b/static/app/utils/dynamicSampling/features.tsx
@@ -0,0 +1,8 @@
+import type {Organization} from 'sentry/types/organization';
+
+export function hasDynamicSamplingCustomFeature(organization: Organization) {
+ return (
+ organization.features.includes('dynamic-sampling') &&
+ organization.features.includes('dynamic-sampling-custom')
+ );
+}
diff --git a/static/app/utils/events.tsx b/static/app/utils/events.tsx
index 3e9256edc2b23..855ddd284ce1d 100644
--- a/static/app/utils/events.tsx
+++ b/static/app/utils/events.tsx
@@ -24,6 +24,20 @@ import {getDaysSinceDatePrecise} from 'sentry/utils/getDaysSinceDate';
import {isMobilePlatform, isNativePlatform} from 'sentry/utils/platform';
import {getReplayIdFromEvent} from 'sentry/utils/replays/getReplayIdFromEvent';
+const EVENT_TYPES_WITH_LOG_LEVEL = new Set([
+ EventOrGroupType.ERROR,
+ EventOrGroupType.CSP,
+ EventOrGroupType.EXPECTCT,
+ EventOrGroupType.DEFAULT,
+ EventOrGroupType.EXPECTSTAPLE,
+ EventOrGroupType.HPKP,
+ EventOrGroupType.NEL,
+]);
+
+export function eventTypeHasLogLevel(type: EventOrGroupType) {
+ return EVENT_TYPES_WITH_LOG_LEVEL.has(type);
+}
+
export function isTombstone(
maybe: BaseGroup | Event | GroupTombstoneHelper
): maybe is GroupTombstoneHelper {
diff --git a/static/app/utils/featureObserver.spec.ts b/static/app/utils/featureObserver.spec.ts
index 9e3125b0b3991..761f4c7a9ab44 100644
--- a/static/app/utils/featureObserver.spec.ts
+++ b/static/app/utils/featureObserver.spec.ts
@@ -40,16 +40,18 @@ describe('FeatureObserver', () => {
]);
});
- it('should remove duplicate flags', () => {
+ it('should remove duplicate flags with a full queue', () => {
const inst = new FeatureObserver();
inst.observeFlags({organization, bufferSize: 3});
expect(inst.getFeatureFlags().values).toEqual([]);
organization.features.includes('enable-issues');
organization.features.includes('replay-mobile-ui');
+ organization.features.includes('enable-discover');
expect(inst.getFeatureFlags().values).toEqual([
{flag: 'enable-issues', result: true},
{flag: 'replay-mobile-ui', result: false},
+ {flag: 'enable-discover', result: false},
]);
// this is already in the queue; it should be removed and
@@ -57,12 +59,13 @@ describe('FeatureObserver', () => {
organization.features.includes('enable-issues');
expect(inst.getFeatureFlags().values).toEqual([
{flag: 'replay-mobile-ui', result: false},
+ {flag: 'enable-discover', result: false},
{flag: 'enable-issues', result: true},
]);
organization.features.includes('spam-ingest');
expect(inst.getFeatureFlags().values).toEqual([
- {flag: 'replay-mobile-ui', result: false},
+ {flag: 'enable-discover', result: false},
{flag: 'enable-issues', result: true},
{flag: 'spam-ingest', result: false},
]);
@@ -71,12 +74,41 @@ describe('FeatureObserver', () => {
// the queue should not change
organization.features.includes('spam-ingest');
expect(inst.getFeatureFlags().values).toEqual([
- {flag: 'replay-mobile-ui', result: false},
+ {flag: 'enable-discover', result: false},
{flag: 'enable-issues', result: true},
{flag: 'spam-ingest', result: false},
]);
});
+ it('should remove duplicate flags with an unfilled queue', () => {
+ const inst = new FeatureObserver();
+ inst.observeFlags({organization, bufferSize: 3});
+ expect(inst.getFeatureFlags().values).toEqual([]);
+
+ organization.features.includes('enable-issues');
+ organization.features.includes('replay-mobile-ui');
+ expect(inst.getFeatureFlags().values).toEqual([
+ {flag: 'enable-issues', result: true},
+ {flag: 'replay-mobile-ui', result: false},
+ ]);
+
+ // this is already in the queue; it should be removed and
+ // added back to the end of the queue
+ organization.features.includes('enable-issues');
+ expect(inst.getFeatureFlags().values).toEqual([
+ {flag: 'replay-mobile-ui', result: false},
+ {flag: 'enable-issues', result: true},
+ ]);
+
+ // this is already in the queue but in the back
+ // the queue should not change
+ organization.features.includes('enable-issues');
+ expect(inst.getFeatureFlags().values).toEqual([
+ {flag: 'replay-mobile-ui', result: false},
+ {flag: 'enable-issues', result: true},
+ ]);
+ });
+
it('should not change the functionality of `includes`', () => {
const inst = new FeatureObserver();
inst.observeFlags({organization, bufferSize: 3});
diff --git a/static/app/utils/featureObserver.ts b/static/app/utils/featureObserver.ts
index 3f625d0a43114..9d757e241c465 100644
--- a/static/app/utils/featureObserver.ts
+++ b/static/app/utils/featureObserver.ts
@@ -1,7 +1,6 @@
import type {Flags} from 'sentry/types/event';
import type {Organization} from 'sentry/types/organization';
-const DEFAULT_BUFFER_SIZE = 10;
let __SINGLETON: FeatureObserver | null = null;
export default class FeatureObserver {
@@ -27,10 +26,10 @@ export default class FeatureObserver {
public observeFlags({
organization,
- bufferSize = DEFAULT_BUFFER_SIZE,
+ bufferSize,
}: {
+ bufferSize: number;
organization: Organization;
- bufferSize?: number;
}) {
const FLAGS = this.FEATURE_FLAGS;
@@ -43,16 +42,17 @@ export default class FeatureObserver {
// Check if the flag is already in the buffer
const index = FLAGS.values.findIndex(f => f.flag === flagName[0]);
- // If at capacity AND the duplicate is not at the end, we need to remove the earliest flag
- if (FLAGS.values.length === bufferSize && !(index === bufferSize - 1)) {
- FLAGS.values.shift();
- }
-
// The flag is already in the buffer
if (index !== -1) {
FLAGS.values.splice(index, 1);
}
+ // If at capacity, we need to remove the earliest flag
+ // This will only happen if not a duplicate flag
+ if (FLAGS.values.length === bufferSize) {
+ FLAGS.values.shift();
+ }
+
// Store the flag and its result in the buffer
FLAGS.values.push({
flag: flagName[0],
diff --git a/static/app/utils/fields/index.ts b/static/app/utils/fields/index.ts
index 60d02a3fc2456..9305eb9588f9b 100644
--- a/static/app/utils/fields/index.ts
+++ b/static/app/utils/fields/index.ts
@@ -1697,7 +1697,7 @@ export const ISSUE_EVENT_PROPERTY_FIELDS: FieldKey[] = [
FieldKey.MESSAGE,
FieldKey.OS_BUILD,
FieldKey.OS_KERNEL_VERSION,
- FieldKey.PLATFORM,
+ FieldKey.PLATFORM_NAME,
FieldKey.RELEASE_BUILD,
FieldKey.RELEASE_PACKAGE,
FieldKey.RELEASE_VERSION,
@@ -1726,6 +1726,67 @@ export const ISSUE_FIELDS: FieldKey[] = [
...ISSUE_EVENT_PROPERTY_FIELDS,
];
+/**
+ * These are valid filter keys in the issue search which are aliases for
+ * values in the event context. In cases where a user provides custom event
+ * tags with the same name, these may conflict and `tags[name]` should be
+ * used instead.
+ *
+ * Search locations are defined in sentry/snuba/events.py, anything that
+ * references a tag should not be defined here.
+ */
+export const ISSUE_EVENT_FIELDS_THAT_MAY_CONFLICT_WITH_TAGS: Set = new Set([
+ FieldKey.APP_IN_FOREGROUND,
+ FieldKey.DEVICE_ARCH,
+ FieldKey.DEVICE_BRAND,
+ FieldKey.DEVICE_CLASS,
+ FieldKey.DEVICE_LOCALE,
+ FieldKey.DEVICE_LOCALE,
+ FieldKey.DEVICE_MODEL_ID,
+ FieldKey.DEVICE_ORIENTATION,
+ FieldKey.DEVICE_UUID,
+ FieldKey.ERROR_HANDLED,
+ FieldKey.ERROR_MAIN_THREAD,
+ FieldKey.ERROR_MECHANISM,
+ FieldKey.ERROR_TYPE,
+ FieldKey.ERROR_UNHANDLED,
+ FieldKey.ERROR_VALUE,
+ FieldKey.EVENT_TIMESTAMP,
+ FieldKey.EVENT_TYPE,
+ FieldKey.GEO_CITY,
+ FieldKey.GEO_COUNTRY_CODE,
+ FieldKey.GEO_REGION,
+ FieldKey.GEO_SUBDIVISION,
+ FieldKey.HTTP_METHOD,
+ FieldKey.HTTP_REFERER,
+ FieldKey.HTTP_URL,
+ FieldKey.ID,
+ FieldKey.LOCATION,
+ FieldKey.MESSAGE,
+ FieldKey.OS_BUILD,
+ FieldKey.OS_KERNEL_VERSION,
+ FieldKey.PLATFORM_NAME,
+ FieldKey.RELEASE_BUILD,
+ FieldKey.RELEASE_PACKAGE,
+ FieldKey.RELEASE_VERSION,
+ FieldKey.SDK_NAME,
+ FieldKey.SDK_VERSION,
+ FieldKey.STACK_ABS_PATH,
+ FieldKey.STACK_FILENAME,
+ FieldKey.STACK_FUNCTION,
+ FieldKey.STACK_MODULE,
+ FieldKey.STACK_PACKAGE,
+ FieldKey.STACK_STACK_LEVEL,
+ FieldKey.TIMESTAMP,
+ FieldKey.TITLE,
+ FieldKey.TRACE,
+ FieldKey.UNREAL_CRASH_TYPE,
+ FieldKey.USER_EMAIL,
+ FieldKey.USER_ID,
+ FieldKey.USER_IP,
+ FieldKey.USER_USERNAME,
+]);
+
/**
* Refer to src/sentry/snuba/events.py, search for Columns
*/
@@ -1742,6 +1803,7 @@ export const DISCOVER_FIELDS = [
FieldKey.CULPRIT,
FieldKey.LOCATION,
FieldKey.MESSAGE,
+ FieldKey.PLATFORM,
FieldKey.PLATFORM_NAME,
FieldKey.ENVIRONMENT,
FieldKey.RELEASE,
diff --git a/static/app/utils/getDaysSinceDate.spec.tsx b/static/app/utils/getDaysSinceDate.spec.tsx
index 52d4d596789ba..b9667ac9f4cfd 100644
--- a/static/app/utils/getDaysSinceDate.spec.tsx
+++ b/static/app/utils/getDaysSinceDate.spec.tsx
@@ -1,7 +1,7 @@
import getDaysSinceDate from 'sentry/utils/getDaysSinceDate';
jest.mock('moment-timezone', () => {
- const moment = jest.requireActual('moment');
+ const moment = jest.requireActual('moment-timezone');
// Jun 06 2022
moment.now = jest.fn().mockReturnValue(1654492173000);
return moment;
diff --git a/static/app/utils/getProjectsByTeams.tsx b/static/app/utils/getProjectsByTeams.tsx
index 5d9230483e09f..6522b9d48c16b 100644
--- a/static/app/utils/getProjectsByTeams.tsx
+++ b/static/app/utils/getProjectsByTeams.tsx
@@ -6,7 +6,7 @@ export default function getProjectsByTeams(
projects: Project[],
isSuperuser: boolean = false
): {projectsByTeam: {[teamSlug: string]: Project[]}; teamlessProjects: Project[]} {
- const projectsByTeam = {};
+ const projectsByTeam: Record = {};
const teamlessProjects: Project[] = [];
let usersTeams = new Set(teams.filter(team => team.isMember).map(team => team.slug));
diff --git a/static/app/utils/gettingStartedDocs/node.ts b/static/app/utils/gettingStartedDocs/node.ts
index 01cc1c88ad035..8c199661ba084 100644
--- a/static/app/utils/gettingStartedDocs/node.ts
+++ b/static/app/utils/gettingStartedDocs/node.ts
@@ -29,7 +29,7 @@ export function getInstallSnippet({
additionalPackages = [],
basePackage = '@sentry/node',
}: {
- packageManager: 'npm' | 'yarn';
+ packageManager: 'npm' | 'yarn' | 'pnpm';
params: DocsParams;
additionalPackages?: string[];
basePackage?: string;
@@ -40,9 +40,15 @@ export function getInstallSnippet({
}
packages = packages.concat(additionalPackages);
- return packageManager === 'yarn'
- ? `yarn add ${packages.join(' ')}`
- : `npm install --save ${packages.join(' ')}`;
+ if (packageManager === 'yarn') {
+ return `yarn add ${packages.join(' ')}`;
+ }
+
+ if (packageManager === 'pnpm') {
+ return `pnpm add ${packages.join(' ')}`;
+ }
+
+ return `npm install ${packages.join(' ')} --save`;
}
export function getInstallConfig(
@@ -80,6 +86,17 @@ export function getInstallConfig(
basePackage,
}),
},
+ {
+ label: 'pnpm',
+ value: 'pnpm',
+ language: 'bash',
+ code: getInstallSnippet({
+ params,
+ additionalPackages,
+ packageManager: 'pnpm',
+ basePackage,
+ }),
+ },
],
},
];
diff --git a/static/app/utils/isCtrlKeyPressed.tsx b/static/app/utils/isCtrlKeyPressed.tsx
index ec42536d1c786..d0f32dd25264c 100644
--- a/static/app/utils/isCtrlKeyPressed.tsx
+++ b/static/app/utils/isCtrlKeyPressed.tsx
@@ -8,7 +8,7 @@ import {isMac} from '@react-aria/utils';
*
* [1] https://github.com/adobe/react-spectrum/blob/main/packages/%40react-aria/selection/src/utils.ts
*/
-export function isCtrlKeyPressed(e: React.KeyboardEvent) {
+export function isCtrlKeyPressed(e: React.KeyboardEvent | React.MouseEvent) {
if (isMac()) {
return e.metaKey;
}
diff --git a/static/app/utils/metrics/features.tsx b/static/app/utils/metrics/features.tsx
index b089f77792190..a191d281f1f38 100644
--- a/static/app/utils/metrics/features.tsx
+++ b/static/app/utils/metrics/features.tsx
@@ -18,10 +18,6 @@ export function hasMetricsNewInputs(organization: Organization) {
return organization.features.includes('metrics-new-inputs');
}
-export function hasMetricsNewSearchQueryBuilder(organization: Organization) {
- return organization.features.includes('search-query-builder-metrics');
-}
-
/**
* Returns the forceMetricsLayer query param for the alert
* wrapped in an object so it can be spread into existing query params
diff --git a/static/app/utils/number/formatApdex.spec.tsx b/static/app/utils/number/formatApdex.spec.tsx
new file mode 100644
index 0000000000000..baa3efc8a5341
--- /dev/null
+++ b/static/app/utils/number/formatApdex.spec.tsx
@@ -0,0 +1,15 @@
+import {formatApdex} from 'sentry/utils/number/formatApdex';
+
+describe('formatApdex', function () {
+ it.each([
+ [0, '0'],
+ [0.2, '0.200'],
+ [0.61, '0.610'],
+ [0.781, '0.781'],
+ [0.771231, '0.771'],
+ [0.99999, '0.999'],
+ [1.0, '1'],
+ ])('%s', (value, expected) => {
+ expect(formatApdex(value)).toEqual(expected);
+ });
+});
diff --git a/static/app/utils/number/formatApdex.tsx b/static/app/utils/number/formatApdex.tsx
new file mode 100644
index 0000000000000..089c0062e1af4
--- /dev/null
+++ b/static/app/utils/number/formatApdex.tsx
@@ -0,0 +1,15 @@
+export function formatApdex(value: number) {
+ if (value === 0) {
+ return '0';
+ }
+
+ if (value === 1) {
+ return '1';
+ }
+
+ return value.toLocaleString(undefined, {
+ minimumFractionDigits: 3,
+ maximumFractionDigits: 3,
+ roundingMode: 'trunc',
+ });
+}
diff --git a/static/app/utils/performance/contexts/metricsEnhancedSetting.tsx b/static/app/utils/performance/contexts/metricsEnhancedSetting.tsx
index cda1f046c8c2e..8738a3a8bf43f 100644
--- a/static/app/utils/performance/contexts/metricsEnhancedSetting.tsx
+++ b/static/app/utils/performance/contexts/metricsEnhancedSetting.tsx
@@ -83,7 +83,14 @@ export function canUseMetricsData(organization: Organization) {
const isRollingOut =
samplingFeatureFlag && organization.features.includes('mep-rollout-flag');
- return isDevFlagOn || isInternalViewOn || isRollingOut;
+ // For plans transitioning from AM2 to AM3, we still want to show metrics
+ // until 90d after 100% transaction ingestion to avoid spikes in charts
+ // coming from old sampling rates.
+ const isTransitioningPlan = organization.features.includes(
+ 'dashboards-metrics-transition'
+ );
+
+ return isDevFlagOn || isInternalViewOn || isRollingOut || isTransitioningPlan;
}
export function MEPSettingProvider({
diff --git a/static/app/utils/prism.tsx b/static/app/utils/prism.tsx
index 63e722a307791..d1b13b196b2f9 100644
--- a/static/app/utils/prism.tsx
+++ b/static/app/utils/prism.tsx
@@ -55,6 +55,12 @@ const EXTRA_LANGUAGE_ALIASES: Record = {
bundle: 'javascript',
vue: 'javascript',
svelte: 'javascript',
+ 'js?': 'javascript',
+
+ // Clojure
+ clj: 'clojure',
+ cljc: 'clojure',
+ cljs: 'clojure',
};
export const getPrismLanguage = (lang: string) => {
diff --git a/static/app/utils/profiling/colors/utils.tsx b/static/app/utils/profiling/colors/utils.tsx
index ab5a162fc3a88..4ff009843c97c 100644
--- a/static/app/utils/profiling/colors/utils.tsx
+++ b/static/app/utils/profiling/colors/utils.tsx
@@ -11,7 +11,7 @@ function uniqueCountBy(
arr: ReadonlyArray,
predicate: (t: T) => string | boolean
): number {
- const visited = {};
+ const visited: Record = {};
let count = 0;
for (let i = 0; i < arr.length; i++) {
diff --git a/static/app/utils/profiling/hooks/useProfileEvents.tsx b/static/app/utils/profiling/hooks/useProfileEvents.tsx
index b25adb458dbd7..8696daa42f5a4 100644
--- a/static/app/utils/profiling/hooks/useProfileEvents.tsx
+++ b/static/app/utils/profiling/hooks/useProfileEvents.tsx
@@ -89,30 +89,27 @@ export function formatError(error: any): string | null {
return t('An unknown error occurred.');
}
-const ALL_FIELDS = [
- 'id',
- 'trace',
- 'profile.id',
- 'profiler.id',
- 'thread.id',
- 'precise.start_ts',
- 'precise.finish_ts',
- 'project.name',
- 'timestamp',
- 'release',
- 'device.model',
- 'device.classification',
- 'device.arch',
- 'transaction.duration',
- 'p50()',
- 'p75()',
- 'p95()',
- 'p99()',
- 'count()',
- 'last_seen()',
-] as const;
-
-export type ProfilingFieldType = (typeof ALL_FIELDS)[number];
+export type ProfilingFieldType =
+ | 'id'
+ | 'trace'
+ | 'profile.id'
+ | 'profiler.id'
+ | 'thread.id'
+ | 'precise.start_ts'
+ | 'precise.finish_ts'
+ | 'project.name'
+ | 'timestamp'
+ | 'release'
+ | 'device.model'
+ | 'device.classification'
+ | 'device.arch'
+ | 'transaction.duration'
+ | 'p50()'
+ | 'p75()'
+ | 'p95()'
+ | 'p99()'
+ | 'count()'
+ | 'last_seen()';
export function getProfilesTableFields(platform: Project['platform']) {
if (mobile.includes(platform as any)) {
diff --git a/static/app/utils/profiling/platforms.tsx b/static/app/utils/profiling/platforms.tsx
index d0e6ec2e7d2b5..f8fd19c92fb53 100644
--- a/static/app/utils/profiling/platforms.tsx
+++ b/static/app/utils/profiling/platforms.tsx
@@ -1,39 +1,34 @@
import type {Project} from 'sentry/types/project';
-const supportedProfilingPlatformSDKs = [
- 'android',
- 'apple-ios',
- 'flutter',
- 'dart-flutter',
- 'go',
- 'node',
- 'python',
- 'php',
- 'php',
- 'php-laravel',
- 'php-symfony2',
- 'ruby',
- 'javascript-angular',
- 'javascript-astro',
- 'javascript-ember',
- 'javascript-gatsby',
- 'javascript-nextjs',
- 'javascript-react',
- 'javascript-remix',
- 'javascript-svelte',
- 'javascript-solid',
- 'javascript-sveltekit',
- 'javascript-vue',
- 'javascript',
- 'react-native',
-] as const;
-export type SupportedProfilingPlatform = (typeof supportedProfilingPlatformSDKs)[number];
-export type SupportedProfilingPlatformSDK =
- (typeof supportedProfilingPlatformSDKs)[number];
+type SupportedProfilingPlatformSDK =
+ | 'android'
+ | 'apple-ios'
+ | 'flutter'
+ | 'dart-flutter'
+ | 'go'
+ | 'node'
+ | 'python'
+ | 'php'
+ | 'php-laravel'
+ | 'php-symfony2'
+ | 'ruby'
+ | 'javascript-angular'
+ | 'javascript-astro'
+ | 'javascript-ember'
+ | 'javascript-gatsby'
+ | 'javascript-nextjs'
+ | 'javascript-react'
+ | 'javascript-remix'
+ | 'javascript-svelte'
+ | 'javascript-solid'
+ | 'javascript-sveltekit'
+ | 'javascript-vue'
+ | 'javascript'
+ | 'react-native';
export function getDocsPlatformSDKForPlatform(
platform: string | undefined
-): SupportedProfilingPlatform | null {
+): SupportedProfilingPlatformSDK | null {
if (!platform) {
return null;
}
diff --git a/static/app/utils/profiling/profile/eventedProfile.tsx b/static/app/utils/profiling/profile/eventedProfile.tsx
index 407cbbbac5e9f..bf6ba54e8e578 100644
--- a/static/app/utils/profiling/profile/eventedProfile.tsx
+++ b/static/app/utils/profiling/profile/eventedProfile.tsx
@@ -1,4 +1,3 @@
-import {lastOfArray} from 'sentry/utils/array/lastOfArray';
import {CallTreeNode} from 'sentry/utils/profiling/callTreeNode';
import type {Frame} from 'sentry/utils/profiling/frame';
import {assertValidProfilingUnit, formatTo} from 'sentry/utils/profiling/units/units';
@@ -107,7 +106,7 @@ export class EventedProfile extends Profile {
frame.totalWeight += weightDelta;
}
- const top = lastOfArray(this.stack);
+ const top = this.stack[this.stack.length - 1];
if (top) {
top.selfWeight += weight;
}
@@ -119,7 +118,7 @@ export class EventedProfile extends Profile {
for (const node of this.calltree) {
node.totalWeight += delta;
}
- const stackTop = lastOfArray(this.calltree);
+ const stackTop = this.calltree[this.calltree.length - 1];
if (stackTop) {
stackTop.selfWeight += delta;
@@ -130,7 +129,7 @@ export class EventedProfile extends Profile {
this.addWeightToFrames(at);
this.addWeightsToNodes(at);
- const lastTop = lastOfArray(this.calltree);
+ const lastTop = this.calltree[this.calltree.length - 1];
if (lastTop) {
const sampleDelta = at - this.lastValue;
@@ -164,7 +163,7 @@ export class EventedProfile extends Profile {
lastTop.children.push(node);
}
} else {
- const last = lastOfArray(lastTop.children);
+ const last = lastTop.children[lastTop.children.length - 1];
if (last && !last.isLocked() && last.frame === frame) {
node = last;
} else {
diff --git a/static/app/utils/profiling/profile/jsSelfProfile.tsx b/static/app/utils/profiling/profile/jsSelfProfile.tsx
index 7bb8fd5536812..4af3dbaac9e68 100644
--- a/static/app/utils/profiling/profile/jsSelfProfile.tsx
+++ b/static/app/utils/profiling/profile/jsSelfProfile.tsx
@@ -1,4 +1,3 @@
-import {lastOfArray} from 'sentry/utils/array/lastOfArray';
import {CallTreeNode} from 'sentry/utils/profiling/callTreeNode';
import {Frame} from 'sentry/utils/profiling/frame';
@@ -43,7 +42,7 @@ export class JSSelfProfile extends Profile {
}
const startedAt = profile.samples[0].timestamp;
- const endedAt = lastOfArray(profile.samples).timestamp;
+ const endedAt = profile.samples[profile.samples.length - 1]?.timestamp;
const jsSelfProfile = new JSSelfProfile({
duration: endedAt - startedAt,
@@ -115,7 +114,7 @@ export class JSSelfProfile extends Profile {
const framesInStack: CallTreeNode[] = [];
for (const frame of stack) {
- const last = lastOfArray(node.children);
+ const last = node.children[node.children.length - 1];
if (last && !last.isLocked() && last.frame === frame) {
node = last;
@@ -166,7 +165,7 @@ export class JSSelfProfile extends Profile {
}
// If node is the same as the previous sample, add the weight to the previous sample
- if (node === lastOfArray(this.samples)) {
+ if (node === this.samples[this.samples.length - 1]) {
this.weights[this.weights.length - 1] += weight;
} else {
this.samples.push(node);
diff --git a/static/app/utils/profiling/profile/sentrySampledProfile.tsx b/static/app/utils/profiling/profile/sentrySampledProfile.tsx
index c276d491587a9..0931e8e96d1bf 100644
--- a/static/app/utils/profiling/profile/sentrySampledProfile.tsx
+++ b/static/app/utils/profiling/profile/sentrySampledProfile.tsx
@@ -1,7 +1,6 @@
import moment from 'moment-timezone';
import {defined} from 'sentry/utils';
-import {lastOfArray} from 'sentry/utils/array/lastOfArray';
import {CallTreeNode} from 'sentry/utils/profiling/callTreeNode';
import type {Frame} from './../frame';
@@ -189,7 +188,7 @@ export class SentrySampledProfile extends Profile {
// frames are ordered outermost -> innermost so we have to iterate backward
for (let i = stack.length - 1; i >= 0; i--) {
const frame = stack[i];
- const last = lastOfArray(node.children);
+ const last = node.children[node.children.length - 1];
// Find common frame between two stacks
if (last && !last.isLocked() && last.frame === frame) {
node = last;
@@ -236,7 +235,7 @@ export class SentrySampledProfile extends Profile {
}
// If node is the same as the previous sample, add the weight to the previous sample
- if (node === lastOfArray(this.samples)) {
+ if (node === this.samples[this.samples.length - 1]) {
this.weights[this.weights.length - 1] += weight;
} else {
this.samples.push(node);
diff --git a/static/app/utils/reactRouter6Compat/onRouteLeave.tsx b/static/app/utils/reactRouter6Compat/onRouteLeave.tsx
index 14e936a56e396..f2fb2274bb1ca 100644
--- a/static/app/utils/reactRouter6Compat/onRouteLeave.tsx
+++ b/static/app/utils/reactRouter6Compat/onRouteLeave.tsx
@@ -17,19 +17,14 @@ interface OnRouteLeaveProps {
}
export function OnRouteLeave(props: OnRouteLeaveProps) {
- if (window.__SENTRY_USING_REACT_ROUTER_SIX) {
- unstable_usePrompt({
- message: props.message,
- when: state =>
- props.when({
- currentLocation: state.currentLocation,
- nextLocation: state.nextLocation,
- }),
- });
+ unstable_usePrompt({
+ message: props.message,
+ when: state =>
+ props.when({
+ currentLocation: state.currentLocation,
+ nextLocation: state.nextLocation,
+ }),
+ });
- return null;
- }
-
- props.router.setRouteLeaveHook(props.route, props.legacyWhen);
return null;
}
diff --git a/static/app/utils/reactRouter6Compat/router.tsx b/static/app/utils/reactRouter6Compat/router.tsx
index ab44a1274d489..23b6c383dabd4 100644
--- a/static/app/utils/reactRouter6Compat/router.tsx
+++ b/static/app/utils/reactRouter6Compat/router.tsx
@@ -1,6 +1,5 @@
import {Children, isValidElement} from 'react';
import {
- generatePath,
Navigate,
type NavigateProps,
Outlet,
@@ -9,6 +8,7 @@ import {
} from 'react-router-dom';
import {USING_CUSTOMER_DOMAIN} from 'sentry/constants';
+import replaceRouterParams from 'sentry/utils/replaceRouterParams';
import {useLocation} from 'sentry/utils/useLocation';
import {useParams} from 'sentry/utils/useParams';
import useRouter from 'sentry/utils/useRouter';
@@ -76,7 +76,7 @@ interface RedirectProps extends Omit {
function Redirect({to, ...rest}: RedirectProps) {
const params = useParams();
- return ;
+ return ;
}
Redirect.displayName = 'Redirect';
diff --git a/static/app/utils/replayCount/useReplayCount.tsx b/static/app/utils/replayCount/useReplayCount.tsx
index b613387b8e68e..a63cdd676486c 100644
--- a/static/app/utils/replayCount/useReplayCount.tsx
+++ b/static/app/utils/replayCount/useReplayCount.tsx
@@ -62,7 +62,10 @@ export default function useReplayCount({
data_source: dataSource,
project: -1,
statsPeriod,
- query: `${fieldName}:[${ids.join(',')}]`,
+ query:
+ fieldName === 'transaction'
+ ? `${fieldName}:[${ids.map(id => `"${id}"`).join(',')}]`
+ : `${fieldName}:[${ids.join(',')}]`,
},
},
],
diff --git a/static/app/utils/replays/fetchReplayList.tsx b/static/app/utils/replays/fetchReplayList.tsx
index 10bf2f07fe873..a19d766f3110f 100644
--- a/static/app/utils/replays/fetchReplayList.tsx
+++ b/static/app/utils/replays/fetchReplayList.tsx
@@ -3,6 +3,7 @@ import type {Location} from 'history';
import type {Client} from 'sentry/api';
import {ALL_ACCESS_PROJECTS} from 'sentry/constants/pageFilters';
+import type {PageFilters} from 'sentry/types/core';
import type {Organization} from 'sentry/types/organization';
import type EventView from 'sentry/utils/discover/eventView';
import {mapResponseToReplayRecord} from 'sentry/utils/replays/replayDataUtils';
@@ -24,6 +25,7 @@ type Props = {
eventView: EventView;
location: Location;
organization: Organization;
+ selection: PageFilters;
perPage?: number;
queryReferrer?: ReplayListQueryReferrer;
};
@@ -35,6 +37,7 @@ async function fetchReplayList({
eventView,
queryReferrer,
perPage,
+ selection,
}: Props): Promise {
try {
const path = `/organizations/${organization.slug}/replays/`;
@@ -60,9 +63,11 @@ async function fetchReplayList({
// we also require a project param otherwise we won't yield results
queryReferrer,
project:
- queryReferrer === 'issueReplays' || queryReferrer === 'transactionReplays'
+ queryReferrer === 'issueReplays'
? ALL_ACCESS_PROJECTS
- : payload.project,
+ : queryReferrer === 'transactionReplays'
+ ? selection.projects
+ : payload.project,
},
});
@@ -71,7 +76,13 @@ async function fetchReplayList({
return {
fetchError: undefined,
pageLinks,
- replays: data.map(mapResponseToReplayRecord),
+ replays: payload.query ? data.map(mapResponseToReplayRecord) : [],
+ // for the replay tab in transactions, if payload.query is undefined,
+ // this means the transaction has no related replays.
+ // but because we cannot query for an empty list of IDs (e.g. `id:[]` breaks our search endpoint),
+ // and leaving query empty results in ALL replays being returned for a specified project
+ // (which doesn't make sense as we want to show no replays),
+ // we essentially want to hardcode no replays being returned.
};
} catch (error) {
if (error.responseJSON?.detail) {
diff --git a/static/app/utils/replays/getFrameDetails.tsx b/static/app/utils/replays/getFrameDetails.tsx
index 6e9df5e34f260..b32e26ef8f3fd 100644
--- a/static/app/utils/replays/getFrameDetails.tsx
+++ b/static/app/utils/replays/getFrameDetails.tsx
@@ -40,6 +40,7 @@ import type {
MultiClickFrame,
MutationFrame,
NavFrame,
+ RawBreadcrumbFrame,
ReplayFrame,
SlowClickFrame,
TapFrame,
@@ -446,19 +447,19 @@ export default function getFrameDetails(frame: ReplayFrame): Details {
}
}
-function defaultTitle(frame: ReplayFrame) {
+export function defaultTitle(frame: ReplayFrame | RawBreadcrumbFrame) {
// Override title for User Feedback frames
if ('message' in frame && frame.message === 'User Feedback') {
return t('User Feedback');
}
- if ('category' in frame) {
+ if ('category' in frame && frame.category) {
const [type, action] = frame.category.split('.');
return `${type} ${action || ''}`.trim();
}
- if ('message' in frame) {
+ if ('message' in frame && frame.message) {
return frame.message as string; // TODO(replay): Included for backwards compat
}
- return frame.description ?? '';
+ return 'description' in frame ? frame.description ?? '' : '';
}
function stringifyNodeAttributes(node: SlowClickFrame['data']['node']) {
diff --git a/static/app/utils/replays/hooks/useA11yData.tsx b/static/app/utils/replays/hooks/useA11yData.tsx
deleted file mode 100644
index 8da1f6883a5b9..0000000000000
--- a/static/app/utils/replays/hooks/useA11yData.tsx
+++ /dev/null
@@ -1,38 +0,0 @@
-import {useMemo} from 'react';
-
-import {useReplayContext} from 'sentry/components/replays/replayContext';
-import {useQuery} from 'sentry/utils/queryClient';
-import type {RawA11yResponse} from 'sentry/utils/replays/hydrateA11yFrame';
-import hydrateA11yFrame from 'sentry/utils/replays/hydrateA11yFrame';
-import useApi from 'sentry/utils/useApi';
-import useOrganization from 'sentry/utils/useOrganization';
-import useProjects from 'sentry/utils/useProjects';
-
-export default function useA11yData() {
- const api = useApi();
- const organization = useOrganization();
- const {currentTime, replay} = useReplayContext();
- const {projects} = useProjects();
- const replayRecord = replay?.getReplay();
- const startTimestampMs = replayRecord?.started_at.getTime();
- const project = projects.find(p => p.id === replayRecord?.project_id);
- const unixTimestamp = ((startTimestampMs || 0) + currentTime) / 1000;
- const {data, ...rest} = useQuery({
- queryKey: [
- `/projects/${organization.slug}/${project?.slug}/replays/${replayRecord?.id}/accessibility-issues/`,
- ],
- queryFn: ({queryKey: [url]}) =>
- api.requestPromise(String(url), {
- method: 'GET',
- query: {timestamp: unixTimestamp},
- }),
- staleTime: 0,
- enabled: Boolean(project) && Boolean(replayRecord),
- });
-
- const hydrated = useMemo(
- () => data?.data?.flatMap(record => hydrateA11yFrame(record, startTimestampMs ?? 0)),
- [data?.data, startTimestampMs]
- );
- return {data: hydrated, dataOffsetMs: currentTime, ...rest};
-}
diff --git a/static/app/utils/replays/hooks/useActiveReplayTab.tsx b/static/app/utils/replays/hooks/useActiveReplayTab.tsx
index f0b4feb611eb5..241c95bfd6bb2 100644
--- a/static/app/utils/replays/hooks/useActiveReplayTab.tsx
+++ b/static/app/utils/replays/hooks/useActiveReplayTab.tsx
@@ -3,7 +3,6 @@ import {useCallback} from 'react';
import useUrlParams from 'sentry/utils/useUrlParams';
export enum TabKey {
- A11Y = 'a11y',
BREADCRUMBS = 'breadcrumbs',
CONSOLE = 'console',
ERRORS = 'errors',
diff --git a/static/app/utils/replays/hooks/useReplayList.tsx b/static/app/utils/replays/hooks/useReplayList.tsx
index efa6877de92a6..3264c9e7969fe 100644
--- a/static/app/utils/replays/hooks/useReplayList.tsx
+++ b/static/app/utils/replays/hooks/useReplayList.tsx
@@ -5,6 +5,7 @@ import type {Organization} from 'sentry/types/organization';
import type EventView from 'sentry/utils/discover/eventView';
import fetchReplayList from 'sentry/utils/replays/fetchReplayList';
import useApi from 'sentry/utils/useApi';
+import usePageFilters from 'sentry/utils/usePageFilters';
import type {
ReplayListLocationQuery,
ReplayListQueryReferrer,
@@ -30,6 +31,7 @@ function useReplayList({
perPage,
}: Options): Result {
const api = useApi();
+ const {selection} = usePageFilters();
const [data, setData] = useState({
fetchError: undefined,
@@ -51,10 +53,11 @@ function useReplayList({
eventView,
queryReferrer,
perPage,
+ selection,
});
setData({...response, isFetching: false});
- }, [api, organization, location, eventView, queryReferrer, perPage]);
+ }, [api, organization, location, eventView, queryReferrer, perPage, selection]);
useEffect(() => {
loadReplays();
diff --git a/static/app/utils/replays/hydrateA11yFrame.tsx b/static/app/utils/replays/hydrateA11yFrame.tsx
deleted file mode 100644
index 214d42a42e9ed..0000000000000
--- a/static/app/utils/replays/hydrateA11yFrame.tsx
+++ /dev/null
@@ -1,86 +0,0 @@
-export interface RawA11yResponse {
- data: RawA11yFrame[];
-}
-
-export interface RawA11yFrame {
- elements: A11yIssueElement[];
- help: string;
- help_url: string;
- id: string;
- timestamp: number;
- impact?: 'minor' | 'moderate' | 'serious' | 'critical';
-}
-
-interface A11yIssueElement {
- alternatives: A11yIssueElementAlternative[];
- element: string;
- target: string[];
-}
-
-interface A11yIssueElementAlternative {
- id: string;
- message: string;
-}
-
-type Overwrite = Pick> & U;
-
-export type HydratedA11yFrame = Overwrite<
- Omit,
- {
- /**
- * For compatibility with Frames, to highlight the element within the replay
- */
- data: {
- element: A11yIssueElement;
- label: string;
- };
- /**
- * Rename `help` to conform to ReplayFrame basics.
- */
- description: string;
- /**
- * The specific element instance
- */
- element: A11yIssueElement;
- /**
- * The difference in timestamp and replay.started_at, in millieseconds
- */
- offsetMs: number;
- /**
- * The Date when the a11yIssue happened
- */
- timestamp: Date;
- /**
- * Alias of timestamp, in milliseconds
- */
- timestampMs: number;
- }
->;
-
-export default function hydrateA11yFrame(
- raw: RawA11yFrame,
- startTimestampMs: number
-): HydratedA11yFrame[] {
- return raw.elements.map((element): HydratedA11yFrame => {
- const timestamp = new Date(raw.timestamp);
- const timestampMs = timestamp.getTime();
- const elementWithoutIframe = {
- ...element,
- target: element.target[0] === 'iframe' ? element.target.slice(1) : element.target,
- };
- return {
- data: {
- element: elementWithoutIframe,
- label: raw.id,
- },
- description: raw.help,
- element: elementWithoutIframe,
- help_url: raw.help_url,
- id: raw.id,
- impact: raw.impact,
- offsetMs: timestampMs - startTimestampMs,
- timestamp,
- timestampMs,
- };
- });
-}
diff --git a/static/app/utils/replays/hydrateBreadcrumbs.tsx b/static/app/utils/replays/hydrateBreadcrumbs.tsx
index 8112aa9c7cfac..88399d7a16059 100644
--- a/static/app/utils/replays/hydrateBreadcrumbs.tsx
+++ b/static/app/utils/replays/hydrateBreadcrumbs.tsx
@@ -3,6 +3,7 @@ import invariant from 'invariant';
import {t} from 'sentry/locale';
import {BreadcrumbType} from 'sentry/types/breadcrumbs';
import isValidDate from 'sentry/utils/date/isValidDate';
+import {defaultTitle} from 'sentry/utils/replays/getFrameDetails';
import type {BreadcrumbFrame, RawBreadcrumbFrame} from 'sentry/utils/replays/types';
import {isBreadcrumbFrame} from 'sentry/utils/replays/types';
import type {ReplayRecord} from 'sentry/views/replays/types';
@@ -26,6 +27,8 @@ export default function hydrateBreadcrumbs(
}
return {
...frame,
+ // custom frames might not have a defined category, so we need to set one
+ category: frame.category || defaultTitle(frame) || 'custom',
offsetMs: Math.abs(time.getTime() - startTimestampMs),
timestamp: time,
timestampMs: time.getTime(),
diff --git a/static/app/utils/replays/playback/providers/replayPlayerEventsContext.spec.tsx b/static/app/utils/replays/playback/providers/replayPlayerEventsContext.spec.tsx
index 5dc6310488472..c183c28ee17af 100644
--- a/static/app/utils/replays/playback/providers/replayPlayerEventsContext.spec.tsx
+++ b/static/app/utils/replays/playback/providers/replayPlayerEventsContext.spec.tsx
@@ -44,7 +44,7 @@ describe('replayPlayerEventsContext', () => {
errors: [],
replayRecord: ReplayRecordFixture(),
});
- const mockRRwebFrames = [];
+ const mockRRwebFrames: any[] = [];
mockReplay!.getRRWebFrames = jest.fn().mockReturnValue(mockRRwebFrames);
const {result} = renderHook(useReplayPlayerEvents, {
diff --git a/static/app/utils/replays/playback/providers/replayPlayerPluginsContextProvider.spec.tsx b/static/app/utils/replays/playback/providers/replayPlayerPluginsContextProvider.spec.tsx
index b8df32a0169df..c4114f891973e 100644
--- a/static/app/utils/replays/playback/providers/replayPlayerPluginsContextProvider.spec.tsx
+++ b/static/app/utils/replays/playback/providers/replayPlayerPluginsContextProvider.spec.tsx
@@ -39,7 +39,7 @@ describe('replayPlayerPluginsContext', () => {
it('should return no plugins if you dont use the Provider', () => {
const mockOrganization = OrganizationFixture();
- const mockEvents = [];
+ const mockEvents: any[] = [];
const {result} = renderHook(useReplayPlayerPlugins, {
wrapper: ({children}: {children?: ReactNode}) => (
@@ -57,7 +57,7 @@ describe('replayPlayerPluginsContext', () => {
const mockOrganizationWithCanvas = OrganizationFixture({
features: ['session-replay-enable-canvas-replayer'],
});
- const mockEvents = [];
+ const mockEvents: any[] = [];
const {result: noCanvasResult} = renderHook(useReplayPlayerPlugins, {
wrapper: makeWrapper(mockOrganizationNoCanvas),
diff --git a/static/app/utils/replays/replay.tsx b/static/app/utils/replays/replay.tsx
index b8727b9bb59ed..a88a87000918e 100644
--- a/static/app/utils/replays/replay.tsx
+++ b/static/app/utils/replays/replay.tsx
@@ -8,6 +8,7 @@ export type NetworkMetaWarning =
| 'INVALID_JSON'
| 'URL_SKIPPED'
| 'BODY_PARSE_ERROR'
+ | 'BODY_PARSE_TIMEOUT'
| 'UNPARSEABLE_BODY_TYPE';
interface NetworkMeta {
diff --git a/static/app/utils/replays/types.tsx b/static/app/utils/replays/types.tsx
index 2c004272f2bd8..2e23e4c542cc7 100644
--- a/static/app/utils/replays/types.tsx
+++ b/static/app/utils/replays/types.tsx
@@ -15,8 +15,6 @@ import type {
} from '@sentry/react';
import invariant from 'invariant';
-import type {HydratedA11yFrame} from 'sentry/utils/replays/hydrateA11yFrame';
-
export type Dimensions = {
height: number;
width: number;
@@ -444,7 +442,7 @@ export type ErrorFrame = Overwrite<
}
>;
-export type ReplayFrame = BreadcrumbFrame | ErrorFrame | SpanFrame | HydratedA11yFrame;
+export type ReplayFrame = BreadcrumbFrame | ErrorFrame | SpanFrame;
interface VideoFrame {
container: string;
diff --git a/static/app/utils/theme.tsx b/static/app/utils/theme.tsx
index b6d6e01d010a4..e94044f6c4e65 100644
--- a/static/app/utils/theme.tsx
+++ b/static/app/utils/theme.tsx
@@ -648,6 +648,16 @@ const generateUtils = (colors: BaseColors, aliases: Aliases) => ({
overflow: 'hidden',
textOverflow: 'ellipsis',
}),
+ // https://css-tricks.com/inclusively-hidden/
+ visuallyHidden: css({
+ clip: 'rect(0 0 0 0)',
+ clipPath: 'inset(50%)',
+ height: '1px',
+ overflow: 'hidden',
+ position: 'absolute',
+ whiteSpace: 'nowrap',
+ width: '1px',
+ }),
});
const generatePrismVariables = (
diff --git a/static/app/utils/useCommitters.tsx b/static/app/utils/useCommitters.tsx
index 1b522b463fc4f..3725cfbd7739c 100644
--- a/static/app/utils/useCommitters.tsx
+++ b/static/app/utils/useCommitters.tsx
@@ -29,6 +29,7 @@ function useCommitters(
{
staleTime: Infinity,
retry: false,
+ enabled: !!eventId,
...options,
}
);
diff --git a/static/app/utils/useHoverOverlay.tsx b/static/app/utils/useHoverOverlay.tsx
index a63d2d4222de0..2032436cfb229 100644
--- a/static/app/utils/useHoverOverlay.tsx
+++ b/static/app/utils/useHoverOverlay.tsx
@@ -122,7 +122,16 @@ interface UseHoverOverlayProps {
}
function isOverflown(el: Element): boolean {
- return el.scrollWidth > el.clientWidth || Array.from(el.children).some(isOverflown);
+ // Safari seems to calculate scrollWidth incorrectly, causing isOverflown to always return true in some cases.
+ // Adding a 2 pixel tolerance seems to account for this discrepancy.
+ const tolerance =
+ navigator.userAgent.includes('Safari') && !navigator.userAgent.includes('Chrome')
+ ? 2
+ : 0;
+ return (
+ el.scrollWidth - el.clientWidth > tolerance ||
+ Array.from(el.children).some(isOverflown)
+ );
}
function maybeClearRefTimeout(ref: React.MutableRefObject) {
diff --git a/static/app/utils/useIssueEventOwners.tsx b/static/app/utils/useIssueEventOwners.tsx
new file mode 100644
index 0000000000000..98f76497b2346
--- /dev/null
+++ b/static/app/utils/useIssueEventOwners.tsx
@@ -0,0 +1,31 @@
+import type {EventOwners} from 'sentry/components/group/assignedTo';
+import type {ApiQueryKey, UseApiQueryOptions} from 'sentry/utils/queryClient';
+import {useApiQuery} from 'sentry/utils/queryClient';
+import useOrganization from 'sentry/utils/useOrganization';
+
+interface UseIssueEventOwnersProps {
+ eventId: string;
+ projectSlug: string;
+}
+
+const makeCommittersQueryKey = (
+ orgSlug: string,
+ projectSlug: string,
+ eventId: string
+): ApiQueryKey => [`/projects/${orgSlug}/${projectSlug}/events/${eventId}/owners/`];
+
+export function useIssueEventOwners(
+ {eventId, projectSlug}: UseIssueEventOwnersProps,
+ options: Partial> = {}
+) {
+ const org = useOrganization();
+ return useApiQuery(
+ makeCommittersQueryKey(org.slug, projectSlug, eventId),
+ {
+ staleTime: Infinity,
+ retry: false,
+ enabled: !!eventId,
+ ...options,
+ }
+ );
+}
diff --git a/static/app/utils/useLocation.tsx b/static/app/utils/useLocation.tsx
index c9f5c737be37b..1f2a5fd0573e6 100644
--- a/static/app/utils/useLocation.tsx
+++ b/static/app/utils/useLocation.tsx
@@ -2,7 +2,6 @@ import {useMemo} from 'react';
import {useLocation as useReactRouter6Location} from 'react-router-dom';
import type {Location, Query} from 'history';
-import {NODE_ENV} from 'sentry/constants';
import {useRouteContext} from 'sentry/utils/useRouteContext';
import {location6ToLocation3} from './reactRouter6Compat/location';
@@ -14,11 +13,10 @@ type DefaultQuery = {
export function useLocation(): Location {
// When running in test mode we still read from the legacy route context to
// keep test compatability while we fully migrate to react router 6
- const useReactRouter6 = window.__SENTRY_USING_REACT_ROUTER_SIX && NODE_ENV !== 'test';
+ const legacyRouterContext = useRouteContext();
- if (!useReactRouter6) {
- // biome-ignore lint/correctness/useHookAtTopLevel: react-router 6 migration
- return useRouteContext().location;
+ if (legacyRouterContext) {
+ return legacyRouterContext.location;
}
// biome-ignore lint/correctness/useHookAtTopLevel: react-router 6 migration
diff --git a/static/app/utils/useNavigate.tsx b/static/app/utils/useNavigate.tsx
index 148a6b8b51429..0d924b8ab8151 100644
--- a/static/app/utils/useNavigate.tsx
+++ b/static/app/utils/useNavigate.tsx
@@ -2,7 +2,6 @@ import {useCallback, useEffect, useRef} from 'react';
import {useNavigate as useReactRouter6Navigate} from 'react-router-dom';
import type {LocationDescriptor} from 'history';
-import {NODE_ENV} from 'sentry/constants';
import normalizeUrl from 'sentry/utils/url/normalizeUrl';
import {locationDescriptorToTo} from './reactRouter6Compat/location';
@@ -27,9 +26,9 @@ interface ReactRouter3Navigate {
export function useNavigate(): ReactRouter3Navigate {
// When running in test mode we still read from the legacy route context to
// keep test compatability while we fully migrate to react router 6
- const useReactRouter6 = window.__SENTRY_USING_REACT_ROUTER_SIX && NODE_ENV !== 'test';
+ const legacyRouterContext = useRouteContext();
- if (useReactRouter6) {
+ if (!legacyRouterContext) {
// biome-ignore lint/correctness/useHookAtTopLevel: react-router-6 migration
const router6Navigate = useReactRouter6Navigate();
@@ -51,8 +50,7 @@ export function useNavigate(): ReactRouter3Navigate {
// XXX(epurkihser): We are using react-router 3 here, to avoid recursive
// dependencies we just use the useRouteContext instead of useRouter here
- // biome-ignore lint/correctness/useHookAtTopLevel: react-router-6 migration
- const {router} = useRouteContext();
+ const {router} = legacyRouterContext;
// biome-ignore lint/correctness/useHookAtTopLevel: react-router-6 migration
const hasMountedRef = useRef(false);
diff --git a/static/app/utils/useParams.spec.tsx b/static/app/utils/useParams.spec.tsx
index 7051ac4966870..340d379b48eae 100644
--- a/static/app/utils/useParams.spec.tsx
+++ b/static/app/utils/useParams.spec.tsx
@@ -89,7 +89,7 @@ describe('useParams', () => {
let useParamsValue;
function Component() {
- const {params} = useRouteContext();
+ const {params} = useRouteContext()!;
originalParams = params;
useParamsValue = useParams();
return (
@@ -127,7 +127,7 @@ describe('useParams', () => {
let useParamsValue;
function Component() {
- const {params} = useRouteContext();
+ const {params} = useRouteContext()!;
originalParams = params;
useParamsValue = useParams();
return (
diff --git a/static/app/utils/useParams.tsx b/static/app/utils/useParams.tsx
index 18c61dc06ef8c..99f8d96c618d4 100644
--- a/static/app/utils/useParams.tsx
+++ b/static/app/utils/useParams.tsx
@@ -1,22 +1,22 @@
import {useMemo} from 'react';
import {useParams as useReactRouter6Params} from 'react-router-dom';
-import {CUSTOMER_DOMAIN, NODE_ENV, USING_CUSTOMER_DOMAIN} from 'sentry/constants';
-import {useRouteContext} from 'sentry/utils/useRouteContext';
+import {CUSTOMER_DOMAIN, USING_CUSTOMER_DOMAIN} from 'sentry/constants';
+
+import {useRouteContext} from './useRouteContext';
export function useParams>(): P {
// When running in test mode we still read from the legacy route context to
// keep test compatability while we fully migrate to react router 6
- const useReactRouter6 = window.__SENTRY_USING_REACT_ROUTER_SIX && NODE_ENV !== 'test';
+ const legacyRouterContext = useRouteContext();
let contextParams: any;
- if (useReactRouter6) {
+ if (!legacyRouterContext) {
// biome-ignore lint/correctness/useHookAtTopLevel: react-router 6 migration
contextParams = useReactRouter6Params();
} else {
- // biome-ignore lint/correctness/useHookAtTopLevel: react-router 6 migration
- contextParams = useRouteContext().params;
+ contextParams = legacyRouterContext.params;
}
// Memoize params as mutating for customer domains causes other hooks
diff --git a/static/app/utils/usePrevious.stories.tsx b/static/app/utils/usePrevious.stories.tsx
new file mode 100644
index 0000000000000..cc2c650ec2398
--- /dev/null
+++ b/static/app/utils/usePrevious.stories.tsx
@@ -0,0 +1,39 @@
+import {Fragment, useState} from 'react';
+
+import StructuredEventData from 'sentry/components/structuredEventData';
+import storyBook from 'sentry/stories/storyBook';
+import usePrevious from 'sentry/utils/usePrevious';
+
+export default storyBook('usePrevious', story => {
+ story('Default', () => {
+ const [count, setCount] = useState(0);
+ const prevCount = usePrevious(count);
+
+ return (
+
+
+ Use usePrevious
to keep track of the previous state.
+
+ setCount(prev => prev + 1)}>Add 1
+ ;
+
+ );
+ });
+
+ story('Stacked', () => {
+ const [count, setCount] = useState(0);
+ const prevCount = usePrevious(count);
+ const penultimateCount = usePrevious(prevCount);
+
+ return (
+
+
+ You can even stack usePrevious
to keep track of the penultimate
+ states.
+
+ setCount(prev => prev + 1)}>Add 1
+ ;
+
+ );
+ });
+});
diff --git a/static/app/utils/useRouteContext.tsx b/static/app/utils/useRouteContext.tsx
index 49d7311e15c3a..12c905b368633 100644
--- a/static/app/utils/useRouteContext.tsx
+++ b/static/app/utils/useRouteContext.tsx
@@ -3,9 +3,5 @@ import {useContext} from 'react';
import {RouteContext} from 'sentry/views/routeContext';
export function useRouteContext() {
- const route = useContext(RouteContext);
- if (route === null) {
- throw new Error(`useRouteContext called outside of routes provider`);
- }
- return route;
+ return useContext(RouteContext);
}
diff --git a/static/app/utils/useRouter.spec.tsx b/static/app/utils/useRouter.spec.tsx
index 9cdae187bcbcf..81b4eb1a6b4c7 100644
--- a/static/app/utils/useRouter.spec.tsx
+++ b/static/app/utils/useRouter.spec.tsx
@@ -29,22 +29,4 @@ describe('useRouter', () => {
);
expect(actualRouter).toEqual(routeContext.router);
});
-
- it('throws error when called outside of routes provider', function () {
- // Error is expected, do not fail when calling console.error
- jest.spyOn(console, 'error').mockImplementation();
-
- function HomePage() {
- useRouter();
- return null;
- }
-
- expect(() =>
- render(
-
-
-
- )
- ).toThrow(/useRouteContext called outside of routes provider/);
- });
});
diff --git a/static/app/utils/useRouter.tsx b/static/app/utils/useRouter.tsx
index e28c744a2a461..3d193800aca6b 100644
--- a/static/app/utils/useRouter.tsx
+++ b/static/app/utils/useRouter.tsx
@@ -1,14 +1,12 @@
import {useMemo} from 'react';
-import type {RouteHook} from 'react-router/lib/Router';
import type {LocationDescriptor} from 'history';
-import {NODE_ENV} from 'sentry/constants';
import type {InjectedRouter} from 'sentry/types/legacyReactRouter';
-import {useRouteContext} from 'sentry/utils/useRouteContext';
import {useLocation} from './useLocation';
import {useNavigate} from './useNavigate';
import {useParams} from './useParams';
+import {useRouteContext} from './useRouteContext';
import {useRoutes} from './useRoutes';
/**
@@ -20,11 +18,10 @@ import {useRoutes} from './useRoutes';
function useRouter(): InjectedRouter {
// When running in test mode we still read from the legacy route context to
// keep test compatability while we fully migrate to react router 6
- const useReactRouter6 = window.__SENTRY_USING_REACT_ROUTER_SIX && NODE_ENV !== 'test';
+ const legacyRouterContext = useRouteContext();
- if (!useReactRouter6) {
- // biome-ignore lint/correctness/useHookAtTopLevel: react-router 6 migration
- return useRouteContext().router;
+ if (legacyRouterContext) {
+ return legacyRouterContext.router;
}
// biome-ignore lint/correctness/useHookAtTopLevel: react-router 6 migration
@@ -62,7 +59,7 @@ function useRouter(): InjectedRouter {
createHref: (_pathOrLoc: LocationDescriptor, _query?: any) => {
throw new Error('createHref not implemented for react-router 6 migration');
},
- setRouteLeaveHook: (_route: any, _callback: RouteHook) => () => {
+ setRouteLeaveHook: (_route: any, _callback: any) => () => {
throw new Error(
'setRouteLeave hook not implemented for react-router6 migration'
);
diff --git a/static/app/utils/useRoutes.spec.tsx b/static/app/utils/useRoutes.spec.tsx
index e07621ff0c41b..cb85d89f40668 100644
--- a/static/app/utils/useRoutes.spec.tsx
+++ b/static/app/utils/useRoutes.spec.tsx
@@ -35,22 +35,4 @@ describe('useRoutes', () => {
expect(routes.length).toEqual(1);
expect(routes[0]).toEqual({path: '/', component: HomePage});
});
-
- it('throws error when called outside of routes provider', function () {
- // Error is expected, do not fail when calling console.error
- jest.spyOn(console, 'error').mockImplementation();
-
- function HomePage() {
- useRoutes();
- return null;
- }
-
- expect(() =>
- render(
-
-
-
- )
- ).toThrow(/useRouteContext called outside of routes provider/);
- });
});
diff --git a/static/app/utils/useRoutes.tsx b/static/app/utils/useRoutes.tsx
index f4ffdf39c7efc..be0a13ea97c5d 100644
--- a/static/app/utils/useRoutes.tsx
+++ b/static/app/utils/useRoutes.tsx
@@ -1,18 +1,17 @@
import {useMemo} from 'react';
import {useMatches} from 'react-router-dom';
-import {NODE_ENV} from 'sentry/constants';
import type {PlainRoute} from 'sentry/types/legacyReactRouter';
-import {useRouteContext} from 'sentry/utils/useRouteContext';
+
+import {useRouteContext} from './useRouteContext';
export function useRoutes(): PlainRoute[] {
// When running in test mode we still read from the legacy route context to
// keep test compatability while we fully migrate to react router 6
- const useReactRouter6 = window.__SENTRY_USING_REACT_ROUTER_SIX && NODE_ENV !== 'test';
+ const legacyRouterContext = useRouteContext();
- if (!useReactRouter6) {
- // biome-ignore lint/correctness/useHookAtTopLevel: react-router-6 migration
- return useRouteContext().routes;
+ if (legacyRouterContext) {
+ return legacyRouterContext.routes;
}
// biome-ignore lint/correctness/useHookAtTopLevel: react-router-6 migration
diff --git a/static/app/utils/useUser.tsx b/static/app/utils/useUser.tsx
index da2542c1ab881..233e309255121 100644
--- a/static/app/utils/useUser.tsx
+++ b/static/app/utils/useUser.tsx
@@ -8,10 +8,10 @@ import type {User} from 'sentry/types/user';
export function useUser(): Readonly {
// Intentional exception to accessing the deprecated field as we want to
// deter users from consuming the user differently than through the hook.
- const {user} = useLegacyStore(configStore);
+ const config = useLegacyStore(configStore);
// @TODO: Return a readonly type as a mechanism to deter users from mutating the
// user directly. That said, this provides basic type safety and no runtime safety
// as there are still plenty of ways to mutate the user. The runtime safe way of
// enforcing this would be via Object.freeze.
- return user;
+ return config.user;
}
diff --git a/static/app/utils/withDomainRedirect.spec.tsx b/static/app/utils/withDomainRedirect.spec.tsx
index cac25d3b2e033..68d92776d05ba 100644
--- a/static/app/utils/withDomainRedirect.spec.tsx
+++ b/static/app/utils/withDomainRedirect.spec.tsx
@@ -19,7 +19,7 @@ const projectRoutes = [
{childRoutes: []},
{path: '/settings/', name: 'Settings', indexRoute: {}, childRoutes: []},
{name: 'Organizations', path: ':orgId/', childRoutes: []},
- {name: 'Projects', path: ':projectId/(searches/:searchId/)', childRoutes: []},
+ {name: 'Projects', path: ':projectId/', childRoutes: []},
{name: 'Alerts', path: 'alerts/'},
];
diff --git a/static/app/utils/withDomainRedirect.tsx b/static/app/utils/withDomainRedirect.tsx
index da64ae9efc73c..3660e04ca9de9 100644
--- a/static/app/utils/withDomainRedirect.tsx
+++ b/static/app/utils/withDomainRedirect.tsx
@@ -1,5 +1,5 @@
-// biome-ignore lint/nursery/noRestrictedImports: Will be removed with react router 6
-import {formatPattern} from 'react-router';
+import {generatePath} from 'react-router-dom';
+import trim from 'lodash/trim';
import trimEnd from 'lodash/trimEnd';
import trimStart from 'lodash/trimStart';
@@ -70,8 +70,8 @@ function withDomainRedirect>(
return ;
}
- const orglessRedirectPath = formatPattern(orglessSlugRoute, params);
- const redirectOrgURL = `/${trimStart(orglessRedirectPath, '/')}${
+ const orglessRedirectPath = generatePath(orglessSlugRoute, params);
+ const redirectOrgURL = `/${trim(orglessRedirectPath, '/')}/${
window.location.search
}${window.location.hash}`;
diff --git a/static/app/utils/withIssueTags.spec.tsx b/static/app/utils/withIssueTags.spec.tsx
deleted file mode 100644
index 0766a473120cc..0000000000000
--- a/static/app/utils/withIssueTags.spec.tsx
+++ /dev/null
@@ -1,121 +0,0 @@
-import {OrganizationFixture} from 'sentry-fixture/organization';
-import {TeamFixture} from 'sentry-fixture/team';
-import {UserFixture} from 'sentry-fixture/user';
-
-import {act, render, screen, waitFor} from 'sentry-test/reactTestingLibrary';
-
-import type {SearchGroup} from 'sentry/components/smartSearchBar/types';
-import MemberListStore from 'sentry/stores/memberListStore';
-import TagStore from 'sentry/stores/tagStore';
-import TeamStore from 'sentry/stores/teamStore';
-import type {WithIssueTagsProps} from 'sentry/utils/withIssueTags';
-import withIssueTags from 'sentry/utils/withIssueTags';
-
-interface MyComponentProps extends WithIssueTagsProps {
- forwardedValue: string;
-}
-function MyComponent(props: MyComponentProps) {
- return (
-
- ForwardedValue: {props.forwardedValue}
- {'is: ' + props.tags?.is?.values?.[0]}
- {'mechanism: ' + props.tags?.mechanism?.values?.join(', ')}
- {'bookmarks: ' + props.tags?.bookmarks?.values?.join(', ')}
- {'assigned: ' +
- (props.tags?.assigned?.values as SearchGroup[])
- .flatMap(x => x.children)
- .map(x => x.desc)
- ?.join(', ')}
- {'stack filename: ' + props.tags?.['stack.filename'].name}
-
- );
-}
-
-describe('withIssueTags HoC', function () {
- beforeEach(() => {
- TeamStore.reset();
- TagStore.reset();
- MemberListStore.loadInitialData([]);
- });
-
- it('forwards loaded tags to the wrapped component', async function () {
- const Container = withIssueTags(MyComponent);
- render( );
-
- // Should forward props.
- expect(await screen.findByText(/ForwardedValue: value/)).toBeInTheDocument();
-
- act(() => {
- TagStore.loadTagsSuccess([
- {name: 'MechanismTag', key: 'mechanism', values: ['MechanismTagValue']},
- ]);
- });
-
- // includes custom tags
- await waitFor(() => {
- expect(screen.getByText(/MechanismTagValue/)).toBeInTheDocument();
- });
-
- // should include special issue and attributes.
- expect(screen.getByText(/is: resolved/)).toBeInTheDocument();
- expect(screen.getByText(/bookmarks: me/)).toBeInTheDocument();
- expect(screen.getByText(/assigned: me/)).toBeInTheDocument();
- expect(screen.getByText(/stack filename: stack.filename/)).toBeInTheDocument();
- });
-
- it('updates the assigned tags with users and teams, and bookmark tags with users', function () {
- const Container = withIssueTags(MyComponent);
- render( );
-
- act(() => {
- TagStore.loadTagsSuccess([
- {name: 'MechanismTag', key: 'mechanism', values: ['MechanismTagValue']},
- ]);
- });
-
- expect(
- screen.getByText(/assigned: me, my_teams, none, \[me, my_teams, none\]/)
- ).toBeInTheDocument();
-
- act(() => {
- TeamStore.loadInitialData([
- TeamFixture({slug: 'best-team-na', name: 'Best Team NA', isMember: true}),
- ]);
- MemberListStore.loadInitialData([
- UserFixture(),
- UserFixture({username: 'joe@example.com'}),
- ]);
- });
-
- expect(
- screen.getByText(
- /assigned: me, my_teams, none, \[me, my_teams, none\], #best-team-na, foo@example.com, joe@example.com/
- )
- ).toBeInTheDocument();
-
- expect(
- screen.getByText(/bookmarks: me, foo@example.com, joe@example.com/)
- ).toBeInTheDocument();
- });
-
- it('groups assignees and puts suggestions first', function () {
- const Container = withIssueTags(MyComponent);
- TeamStore.loadInitialData([
- TeamFixture({id: '1', slug: 'best-team', name: 'Best Team', isMember: true}),
- TeamFixture({id: '2', slug: 'worst-team', name: 'Worst Team', isMember: false}),
- ]);
- MemberListStore.loadInitialData([
- UserFixture(),
- UserFixture({username: 'joe@example.com'}),
- ]);
- const {container} = render(
-
- );
-
- expect(container).toHaveTextContent(
- 'assigned: me, my_teams, none, [me, my_teams, none], #best-team'
- );
- // Has the other teams/members
- expect(container).toHaveTextContent('foo@example.com, joe@example.com, #worst-team');
- });
-});
diff --git a/static/app/utils/withIssueTags.tsx b/static/app/utils/withIssueTags.tsx
deleted file mode 100644
index 974483be4bb59..0000000000000
--- a/static/app/utils/withIssueTags.tsx
+++ /dev/null
@@ -1,126 +0,0 @@
-import {useEffect, useMemo, useState} from 'react';
-
-import type {SearchGroup} from 'sentry/components/smartSearchBar/types';
-import {ItemType} from 'sentry/components/smartSearchBar/types';
-import {escapeTagValue} from 'sentry/components/smartSearchBar/utils';
-import {IconStar, IconUser} from 'sentry/icons';
-import {t} from 'sentry/locale';
-import MemberListStore from 'sentry/stores/memberListStore';
-import TagStore from 'sentry/stores/tagStore';
-import TeamStore from 'sentry/stores/teamStore';
-import {useLegacyStore} from 'sentry/stores/useLegacyStore';
-import type {TagCollection} from 'sentry/types/group';
-import type {Organization} from 'sentry/types/organization';
-import type {User} from 'sentry/types/user';
-import getDisplayName from 'sentry/utils/getDisplayName';
-
-export interface WithIssueTagsProps {
- organization: Organization;
- tags: TagCollection;
-}
-
-type HocProps = {
- organization: Organization;
-};
-
-const uuidPattern = /[0-9a-f]{32}$/;
-const getUsername = ({isManaged, username, email}: User) => {
- // Users created via SAML receive unique UUID usernames. Use
- // their email in these cases, instead.
- if (username && uuidPattern.test(username)) {
- return email;
- }
- return !isManaged && username ? username : email;
-};
-
-function convertToSearchItem(value: string) {
- const escapedValue = escapeTagValue(value);
- return {
- value: escapedValue,
- desc: value,
- type: ItemType.TAG_VALUE,
- };
-}
-
-/**
- * HOC for getting tags and many useful issue attributes as 'tags' for use
- * in autocomplete selectors or condition builders.
- */
-function withIssueTags(
- WrappedComponent: React.ComponentType
-) {
- function ComponentWithTags(props: Omit & HocProps) {
- const {teams} = useLegacyStore(TeamStore);
- const {members} = useLegacyStore(MemberListStore);
- const [tags, setTags] = useState(
- TagStore.getIssueTags(props.organization)
- );
-
- const issueTags = useMemo((): TagCollection => {
- const usernames: string[] = members.map(getUsername);
- const userTeams = teams.filter(team => team.isMember).map(team => `#${team.slug}`);
- const nonMemberTeams = teams
- .filter(team => !team.isMember)
- .map(team => `#${team.slug}`);
-
- const suggestedAssignees: string[] = [
- 'me',
- 'my_teams',
- 'none',
- // New search builder only works with single value suggestions
- ...(props.organization.features.includes('issue-stream-search-query-builder')
- ? []
- : ['[me, my_teams, none]']),
- ...userTeams,
- ];
- const assignedValues: SearchGroup[] | string[] = [
- {
- title: t('Suggested Values'),
- type: 'header',
- icon: ,
- children: suggestedAssignees.map(convertToSearchItem),
- },
- {
- title: t('All Values'),
- type: 'header',
- icon: ,
- children: [
- ...usernames.map(convertToSearchItem),
- ...nonMemberTeams.map(convertToSearchItem),
- ],
- },
- ];
-
- return {
- ...tags,
- assigned: {
- ...tags.assigned,
- values: assignedValues,
- },
- bookmarks: {
- ...tags.bookmarks,
- values: ['me', ...usernames],
- },
- assigned_or_suggested: {
- ...tags.assigned_or_suggested,
- values: assignedValues,
- },
- };
- }, [members, teams, props.organization.features, tags]);
-
- // Listen to tag store updates and cleanup listener on unmount
- useEffect(() => {
- const unsubscribeTags = TagStore.listen(() => {
- setTags(TagStore.getIssueTags(props.organization));
- }, undefined);
-
- return () => unsubscribeTags();
- }, [props.organization, setTags]);
-
- return ;
- }
- ComponentWithTags.displayName = `withIssueTags(${getDisplayName(WrappedComponent)})`;
- return ComponentWithTags;
-}
-
-export default withIssueTags;
diff --git a/static/app/utils/withSentryRouter.tsx b/static/app/utils/withSentryRouter.tsx
index 258cf0cf954eb..d377951d5f23d 100644
--- a/static/app/utils/withSentryRouter.tsx
+++ b/static/app/utils/withSentryRouter.tsx
@@ -10,7 +10,7 @@ import useRouter from './useRouter';
*
* @deprecated only use in legacy react class components
*/
-function withSentryRouter(
+function withSentryRouter
>(
WrappedComponent: React.ComponentType
): React.ComponentType> {
function WithSentryRouterWrapper(props: Omit) {
diff --git a/static/app/views/admin/installWizard/index.tsx b/static/app/views/admin/installWizard/index.tsx
index c99239224dd4d..f4cfeace301ce 100644
--- a/static/app/views/admin/installWizard/index.tsx
+++ b/static/app/views/admin/installWizard/index.tsx
@@ -54,7 +54,7 @@ export default class InstallWizard extends DeprecatedAsyncView<
}
// A mapping of option name to Field object
- const fields = {};
+ const fields: Record = {};
for (const key of missingOptions) {
const option = options[key];
diff --git a/static/app/views/admin/options.tsx b/static/app/views/admin/options.tsx
index 882354a892fc3..7b70f3eda335d 100644
--- a/static/app/views/admin/options.tsx
+++ b/static/app/views/admin/options.tsx
@@ -661,21 +661,20 @@ export function getOptionField(option: string, field: Field) {
);
}
-function getSectionFieldSet(section: Section, fields: Field[]) {
+function getSectionFieldSet(section: Section, fields: React.ReactNode[]) {
return (
{section.heading && {section.heading} }
- {/* TODO(TS): Types indicate fields can be an object */}
- {fields as React.ReactNode}
+ {fields}
);
}
-export function getForm(fieldMap: Record) {
+export function getForm(fieldMap: Record) {
const sets: React.ReactNode[] = [];
for (const section of sections) {
- const set: Field[] = [];
+ const set: React.ReactNode[] = [];
for (const option of optionsForSection(section)) {
if (fieldMap[option.key]) {
diff --git a/static/app/views/alerts/create.spec.tsx b/static/app/views/alerts/create.spec.tsx
index 4027f24bec372..cb3ce94dd880a 100644
--- a/static/app/views/alerts/create.spec.tsx
+++ b/static/app/views/alerts/create.spec.tsx
@@ -161,7 +161,7 @@ describe('ProjectAlertsCreate', function () {
'The issue is older or newer than...',
]);
- await userEvent.click(screen.getAllByLabelText('Delete Node')[2]);
+ await userEvent.click(screen.getAllByLabelText('Delete Node')[1]);
await userEvent.click(screen.getByText('Save Rule'));
@@ -174,10 +174,7 @@ describe('ProjectAlertsCreate', function () {
actions: [],
conditions: [
expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.NewHighPriorityIssueCondition',
- }),
- expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.ExistingHighPriorityIssueCondition',
+ id: 'sentry.rules.conditions.first_seen_event.FirstSeenEventCondition',
}),
],
filterMatch: 'all',
@@ -200,7 +197,6 @@ describe('ProjectAlertsCreate', function () {
});
// delete node
await userEvent.click(screen.getAllByLabelText('Delete Node')[0]);
- await userEvent.click(screen.getAllByLabelText('Delete Node')[0]);
// Change name of alert rule
await userEvent.type(screen.getByPlaceholderText('Enter Alert Name'), 'myname');
@@ -267,7 +263,7 @@ describe('ProjectAlertsCreate', function () {
'Send a notification to all legacy integrations',
]);
- await userEvent.click(screen.getAllByLabelText('Delete Node')[2]);
+ await userEvent.click(screen.getAllByLabelText('Delete Node')[1]);
await userEvent.click(screen.getByText('Save Rule'));
@@ -280,10 +276,7 @@ describe('ProjectAlertsCreate', function () {
actions: [],
conditions: [
expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.NewHighPriorityIssueCondition',
- }),
- expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.ExistingHighPriorityIssueCondition',
+ id: 'sentry.rules.conditions.first_seen_event.FirstSeenEventCondition',
}),
],
filterMatch: 'all',
@@ -340,10 +333,7 @@ describe('ProjectAlertsCreate', function () {
filterMatch: 'any',
conditions: [
expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.NewHighPriorityIssueCondition',
- }),
- expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.ExistingHighPriorityIssueCondition',
+ id: 'sentry.rules.conditions.first_seen_event.FirstSeenEventCondition',
}),
],
actions: [],
@@ -392,10 +382,7 @@ describe('ProjectAlertsCreate', function () {
actions: [],
conditions: [
expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.NewHighPriorityIssueCondition',
- }),
- expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.ExistingHighPriorityIssueCondition',
+ id: 'sentry.rules.conditions.first_seen_event.FirstSeenEventCondition',
}),
],
filterMatch: 'all',
@@ -455,11 +442,7 @@ describe('ProjectAlertsCreate', function () {
},
],
actions: [],
- conditions: [
- expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.ExistingHighPriorityIssueCondition',
- }),
- ],
+ conditions: [],
frequency: 60 * 24,
name: 'myname',
owner: null,
@@ -501,10 +484,7 @@ describe('ProjectAlertsCreate', function () {
],
conditions: [
expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.NewHighPriorityIssueCondition',
- }),
- expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.ExistingHighPriorityIssueCondition',
+ id: 'sentry.rules.conditions.first_seen_event.FirstSeenEventCondition',
}),
],
filterMatch: 'all',
@@ -551,10 +531,7 @@ describe('ProjectAlertsCreate', function () {
actionMatch: 'any',
conditions: [
expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.NewHighPriorityIssueCondition',
- }),
- expect.objectContaining({
- id: 'sentry.rules.conditions.high_priority_issue.ExistingHighPriorityIssueCondition',
+ id: 'sentry.rules.conditions.first_seen_event.FirstSeenEventCondition',
}),
],
filterMatch: 'all',
@@ -585,7 +562,6 @@ describe('ProjectAlertsCreate', function () {
createWrapper();
// delete existion conditions
await userEvent.click(screen.getAllByLabelText('Delete Node')[0]);
- await userEvent.click(screen.getAllByLabelText('Delete Node')[0]);
await waitFor(() => {
expect(mock).toHaveBeenCalled();
@@ -629,7 +605,6 @@ describe('ProjectAlertsCreate', function () {
it('shows error for incompatible conditions', async () => {
createWrapper();
await userEvent.click(screen.getAllByLabelText('Delete Node')[0]);
- await userEvent.click(screen.getAllByLabelText('Delete Node')[0]);
await selectEvent.select(screen.getByText('Add optional trigger...'), [
'A new issue is created',
@@ -656,7 +631,6 @@ describe('ProjectAlertsCreate', function () {
it('test any filterMatch', async () => {
createWrapper();
await userEvent.click(screen.getAllByLabelText('Delete Node')[0]);
- await userEvent.click(screen.getAllByLabelText('Delete Node')[0]);
await selectEvent.select(screen.getByText('Add optional trigger...'), [
'A new issue is created',
@@ -704,9 +678,7 @@ describe('ProjectAlertsCreate', function () {
method: 'POST',
body: ProjectAlertRuleFixture(),
});
-
- createWrapper({organization: {features: ['noisy-alert-warning']}});
- await userEvent.click((await screen.findAllByLabelText('Delete Node'))[0]);
+ createWrapper();
await userEvent.click((await screen.findAllByLabelText('Delete Node'))[0]);
await selectEvent.select(screen.getByText('Add action...'), [
@@ -738,8 +710,7 @@ describe('ProjectAlertsCreate', function () {
});
it('does not display noisy alert banner for legacy integrations', async function () {
- createWrapper({organization: {features: ['noisy-alert-warning']}});
- await userEvent.click((await screen.findAllByLabelText('Delete Node'))[0]);
+ createWrapper();
await userEvent.click((await screen.findAllByLabelText('Delete Node'))[0]);
await selectEvent.select(screen.getByText('Add action...'), [
diff --git a/static/app/views/alerts/create.tsx b/static/app/views/alerts/create.tsx
index e3b17255015cb..f82d438436cdc 100644
--- a/static/app/views/alerts/create.tsx
+++ b/static/app/views/alerts/create.tsx
@@ -142,24 +142,11 @@ function Create(props: Props) {
) : (
- {organization.features.includes('uptime-api-create-update') &&
- alertType === AlertRuleType.UPTIME ? (
- {
- router.push(
- normalizeUrl(
- `/organizations/${organization.slug}/alerts/rules/uptime/${project.slug}/${response.id}/details`
- )
- );
- }}
- />
+ {alertType === AlertRuleType.UPTIME ? (
+
) : !hasMetricAlerts || alertType === AlertRuleType.ISSUE ? (
id)}
members={members}
/>
@@ -172,7 +159,6 @@ function Create(props: Props) {
eventView={eventView}
wizardTemplate={wizardTemplate}
sessionId={sessionId.current}
- project={project}
userTeamIds={teams.map(({id}) => id)}
/>
) : (
@@ -181,7 +167,6 @@ function Create(props: Props) {
eventView={eventView}
wizardTemplate={wizardTemplate}
sessionId={sessionId.current}
- project={project}
userTeamIds={teams.map(({id}) => id)}
/>
))
diff --git a/static/app/views/alerts/list/rules/alertLastIncidentActivationInfo.spec.tsx b/static/app/views/alerts/list/rules/alertLastIncidentActivationInfo.spec.tsx
index 8f85ad5075421..34cf805b2429d 100644
--- a/static/app/views/alerts/list/rules/alertLastIncidentActivationInfo.spec.tsx
+++ b/static/app/views/alerts/list/rules/alertLastIncidentActivationInfo.spec.tsx
@@ -63,6 +63,6 @@ describe('AlertLastIncidentActivationInfo', function () {
} as const;
render( );
- expect(screen.getByText('Actively monitoring every 5 seconds')).toBeInTheDocument();
+ expect(screen.getByText('Actively monitoring every 1 minute')).toBeInTheDocument();
});
});
diff --git a/static/app/views/alerts/list/rules/alertRulesList.tsx b/static/app/views/alerts/list/rules/alertRulesList.tsx
index 389557d7dfa92..8561c5345ef63 100644
--- a/static/app/views/alerts/list/rules/alertRulesList.tsx
+++ b/static/app/views/alerts/list/rules/alertRulesList.tsx
@@ -32,6 +32,7 @@ import {useLocation} from 'sentry/utils/useLocation';
import useOrganization from 'sentry/utils/useOrganization';
import useRouter from 'sentry/utils/useRouter';
+import {MetricsRemovedAlertsWidgetsAlert} from '../../../metrics/metricsRemovedAlertsWidgetsAlert';
import FilterBar from '../../filterBar';
import type {CombinedAlerts} from '../../types';
import {AlertRuleType, CombinedAlertType} from '../../types';
@@ -188,6 +189,7 @@ function AlertRulesList() {
+
) : (
-
+
{zoomRenderProps => (
r.id === IssueAlertFilterType.ISSUE_CATEGORY);
+ if (!filterSet || !filterSet.length) {
+ return null;
+ }
+ const filterFeedback = filterSet.find(f => f.value === '6'); // category: feedback
+ return filterFeedback ? (
+
+ {tct(
+ 'This issue category condition is ONLY for feedbacks from the [linkWidget:built-in widget]. [linkModal: Crash-report modal] alerts can be enabled in [link:Project Settings].',
+ {
+ link: ,
+ linkWidget: (
+
+ ),
+ linkModal: (
+
+ ),
+ }
+ )}
+
+ ) : null;
+}
+
+const StyledFeedbackAlert = styled(Alert)`
+ margin: ${space(1)} 0 0 0;
+`;
diff --git a/static/app/views/alerts/rules/issue/index.spec.tsx b/static/app/views/alerts/rules/issue/index.spec.tsx
index cda304a4e0651..5b676c32f6f03 100644
--- a/static/app/views/alerts/rules/issue/index.spec.tsx
+++ b/static/app/views/alerts/rules/issue/index.spec.tsx
@@ -368,7 +368,7 @@ describe('IssueRuleEditor', function () {
// Add the adopted release filter
await selectEvent.select(
screen.getByText('Add optional filter...'),
- /The {oldest_or_newest} release associated/
+ /The {oldest_or_newest} adopted release associated/
);
const filtersContainer = await screen.findByTestId('rule-filters');
diff --git a/static/app/views/alerts/rules/issue/index.tsx b/static/app/views/alerts/rules/issue/index.tsx
index 9349df36139c1..852c2ca40be75 100644
--- a/static/app/views/alerts/rules/issue/index.tsx
+++ b/static/app/views/alerts/rules/issue/index.tsx
@@ -69,6 +69,7 @@ import routeTitleGen from 'sentry/utils/routeTitle';
import normalizeUrl from 'sentry/utils/url/normalizeUrl';
import withOrganization from 'sentry/utils/withOrganization';
import withProjects from 'sentry/utils/withProjects';
+import FeedbackAlertBanner from 'sentry/views/alerts/rules/issue/feedbackAlertBanner';
import {PreviewIssues} from 'sentry/views/alerts/rules/issue/previewIssues';
import SetupMessagingIntegrationButton, {
MessagingIntegrationAnalyticsView,
@@ -326,10 +327,7 @@ class IssueRuleEditor extends DeprecatedAsyncView {
if (!ruleId && !this.isDuplicateRule) {
// now that we've loaded all the possible conditions, we can populate the
// value of conditions for a new alert
- this.handleChange('conditions', [
- {id: IssueAlertConditionType.NEW_HIGH_PRIORITY_ISSUE},
- {id: IssueAlertConditionType.EXISTING_HIGH_PRIORITY_ISSUE},
- ]);
+ this.handleChange('conditions', [{id: IssueAlertConditionType.FIRST_SEEN_EVENT}]);
}
}
@@ -928,7 +926,6 @@ class IssueRuleEditor extends DeprecatedAsyncView {
];
return (
- this.props.organization.features.includes('noisy-alert-warning') &&
!!rule &&
!isSavedAlertRule(rule) &&
rule.conditions.length === 0 &&
@@ -1224,7 +1221,7 @@ class IssueRuleEditor extends DeprecatedAsyncView {
{t('Set conditions')} {' '}
{hasMessagingIntegrationOnboarding ? (
{
incompatibleFilters ? incompatibleFilters.at(-1) : null
}
/>
+
diff --git a/static/app/views/alerts/rules/issue/messagingIntegrationModal.spec.tsx b/static/app/views/alerts/rules/issue/messagingIntegrationModal.spec.tsx
index 044b485e8ff06..99623d4611298 100644
--- a/static/app/views/alerts/rules/issue/messagingIntegrationModal.spec.tsx
+++ b/static/app/views/alerts/rules/issue/messagingIntegrationModal.spec.tsx
@@ -1,10 +1,8 @@
import {GitHubIntegrationProviderFixture} from 'sentry-fixture/githubIntegrationProvider';
import {OrganizationFixture} from 'sentry-fixture/organization';
-import {ProjectFixture} from 'sentry-fixture/project';
-import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary';
+import {render, screen} from 'sentry-test/reactTestingLibrary';
-import * as indicators from 'sentry/actionCreators/indicator';
import {
makeClosableHeader,
makeCloseButton,
@@ -17,20 +15,11 @@ import MessagingIntegrationModal from 'sentry/views/alerts/rules/issue/messaging
jest.mock('sentry/actionCreators/modal');
describe('MessagingIntegrationModal', function () {
- let project, org;
+ const organization = OrganizationFixture();
const providerKeys = ['slack', 'discord', 'msteams'];
- const providers = (providerKey: string) => [
- GitHubIntegrationProviderFixture({key: providerKey}),
- ];
-
- beforeEach(function () {
- MockApiClient.clearMockResponses();
-
- project = ProjectFixture();
- org = OrganizationFixture();
-
- jest.clearAllMocks();
- });
+ const providers = providerKeys.map(providerKey =>
+ GitHubIntegrationProviderFixture({key: providerKey})
+ );
const getComponent = (closeModal = jest.fn(), props = {}) => (
{})}
Footer={ModalFooter}
{...props}
@@ -48,20 +36,8 @@ describe('MessagingIntegrationModal', function () {
);
it('renders', async function () {
- const mockResponses: jest.Mock[] = [];
- providerKeys.forEach(providerKey => {
- mockResponses.push(
- MockApiClient.addMockResponse({
- url: `/organizations/${org.slug}/config/integrations/?provider_key=${providerKey}`,
- body: {providers: providers(providerKey)},
- })
- );
- });
- render(getComponent(), {organization: org});
+ render(getComponent(), {organization: organization});
- mockResponses.forEach(mock => {
- expect(mock).toHaveBeenCalled();
- });
const heading = await screen.findByRole('heading', {
name: /connect with a messaging tool/i,
});
@@ -69,30 +45,4 @@ describe('MessagingIntegrationModal', function () {
const buttons = await screen.findAllByRole('button', {name: /add integration/i});
expect(buttons).toHaveLength(providerKeys.length);
});
-
- it('closes on error', async function () {
- const closeModal = jest.fn();
- jest.spyOn(indicators, 'addErrorMessage');
-
- const mockResponses: jest.Mock[] = [];
- providerKeys.forEach(value => {
- mockResponses.push(
- MockApiClient.addMockResponse({
- url: `/organizations/${org.slug}/config/integrations/?provider_key=${value}`,
- statusCode: 400,
- body: {error: 'internal error'},
- })
- );
- });
-
- render(getComponent(closeModal), {organization: org});
-
- mockResponses.forEach(mock => {
- expect(mock).toHaveBeenCalled();
- });
- await waitFor(() => {
- expect(closeModal).toHaveBeenCalled();
- expect(indicators.addErrorMessage).toHaveBeenCalled();
- });
- });
});
diff --git a/static/app/views/alerts/rules/issue/messagingIntegrationModal.tsx b/static/app/views/alerts/rules/issue/messagingIntegrationModal.tsx
index 5e9525364b34d..d7f2e9fa80118 100644
--- a/static/app/views/alerts/rules/issue/messagingIntegrationModal.tsx
+++ b/static/app/views/alerts/rules/issue/messagingIntegrationModal.tsx
@@ -1,22 +1,17 @@
import {Fragment} from 'react';
import styled from '@emotion/styled';
-import {addErrorMessage} from 'sentry/actionCreators/indicator';
import type {ModalRenderProps} from 'sentry/actionCreators/modal';
-import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import type {IntegrationProvider} from 'sentry/types/integrations';
-import type {Project} from 'sentry/types/project';
-import {useApiQueries} from 'sentry/utils/queryClient';
-import useOrganization from 'sentry/utils/useOrganization';
import AddIntegrationRow from 'sentry/views/alerts/rules/issue/addIntegrationRow';
import {IntegrationContext} from 'sentry/views/settings/organizationIntegrations/integrationContext';
type Props = ModalRenderProps & {
headerContent: React.ReactNode;
- project: Project;
- providerKeys: string[];
+ providers: IntegrationProvider[];
bodyContent?: React.ReactNode;
+ modalParams?: {[key: string]: string};
onAddIntegration?: () => void;
};
@@ -26,28 +21,10 @@ function MessagingIntegrationModal({
Body,
headerContent,
bodyContent,
- providerKeys,
- project,
+ providers,
+ modalParams,
onAddIntegration,
}: Props) {
- const organization = useOrganization();
- const queryResults = useApiQueries<{providers: IntegrationProvider[]}>(
- providerKeys.map((providerKey: string) => [
- `/organizations/${organization.slug}/config/integrations/?provider_key=${providerKey}`,
- ]),
- {staleTime: Infinity}
- );
-
- if (queryResults.some(({isPending}) => isPending)) {
- return null;
- }
-
- if (queryResults.some(({isError}) => isError)) {
- closeModal();
- addErrorMessage(t('Failed to load integration data'));
- return null;
- }
-
return (
@@ -56,12 +33,7 @@ function MessagingIntegrationModal({
{bodyContent}
- {queryResults.map(result => {
- const provider = result.data?.providers[0];
-
- if (!provider) {
- return null;
- }
+ {providers.map(provider => {
return (
diff --git a/static/app/views/alerts/rules/issue/setupMessagingIntegrationButton.spec.tsx b/static/app/views/alerts/rules/issue/setupMessagingIntegrationButton.spec.tsx
index 4e39b4863d233..28810036523d1 100644
--- a/static/app/views/alerts/rules/issue/setupMessagingIntegrationButton.spec.tsx
+++ b/static/app/views/alerts/rules/issue/setupMessagingIntegrationButton.spec.tsx
@@ -20,11 +20,12 @@ describe('SetupAlertIntegrationButton', function () {
const providers = (providerKey: string) => [
GitHubIntegrationProviderFixture({key: providerKey}),
];
- const providerKey = 'slack';
+ const providerKeys = ['slack', 'discord', 'msteams'];
+ let mockResponses: jest.Mock[] = [];
const getComponent = () => (
@@ -32,69 +33,80 @@ describe('SetupAlertIntegrationButton', function () {
beforeEach(function () {
MockApiClient.clearMockResponses();
- MockApiClient.addMockResponse({
- url: `/organizations/${organization.slug}/config/integrations/?provider_key=${providerKey}`,
- body: {providers: providers(providerKey)},
+ mockResponses = [];
+ providerKeys.forEach(providerKey => {
+ mockResponses.push(
+ MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/config/integrations/?provider_key=${providerKey}`,
+ body: {providers: providers(providerKey)},
+ })
+ );
});
- jest.clearAllMocks();
});
it('renders when no integration is installed', async function () {
- MockApiClient.addMockResponse({
- url: `/projects/${organization.slug}/${project.slug}/`,
- body: {
- ...project,
- hasAlertIntegrationInstalled: false,
- },
- });
+ mockResponses.push(
+ MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/integrations/?integrationType=messaging`,
+ body: [{status: 'disabled'}, {status: 'disabled'}, {status: 'disabled'}],
+ })
+ );
render(getComponent(), {organization: organization});
+ mockResponses.forEach(mock => {
+ expect(mock).toHaveBeenCalled();
+ });
await screen.findByRole('button', {name: /connect to messaging/i});
});
it('does not render button if alert integration installed', function () {
- MockApiClient.addMockResponse({
- url: `/projects/${organization.slug}/${project.slug}/`,
- body: {
- ...project,
- hasAlertIntegrationInstalled: true,
- },
- });
+ mockResponses.push(
+ MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/integrations/?integrationType=messaging`,
+ body: [{status: 'active'}, {status: 'disabled'}, {status: 'disabled'}],
+ })
+ );
render(getComponent(), {organization: organization});
+ mockResponses.forEach(mock => {
+ expect(mock).toHaveBeenCalled();
+ });
expect(screen.queryByRole('button')).not.toBeInTheDocument();
});
it('opens modal when clicked', async function () {
- MockApiClient.addMockResponse({
- url: `/projects/${organization.slug}/${project.slug}/`,
- body: {
- ...project,
- hasAlertIntegrationInstalled: false,
- },
- });
+ mockResponses.push(
+ MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/integrations/?integrationType=messaging`,
+ body: [{status: 'disabled'}, {status: 'disabled'}, {status: 'disabled'}],
+ })
+ );
render(getComponent(), {organization: organization});
+ mockResponses.forEach(mock => {
+ expect(mock).toHaveBeenCalled();
+ });
const button = await screen.findByRole('button', {name: /connect to messaging/i});
await userEvent.click(button);
expect(openModal).toHaveBeenCalled();
});
- it('does not render button if project is loading', function () {
- MockApiClient.addMockResponse({
- url: `/projects/${organization.slug}/${project.slug}/`,
- statusCode: 400,
- body: {error: 'internal error'},
- });
+ it('does not render button if API errors', function () {
+ mockResponses.push(
+ MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/integrations/?integrationType=messaging`,
+ statusCode: 400,
+ body: {error: 'internal error'},
+ })
+ );
render(getComponent(), {organization: organization});
expect(screen.queryByRole('button')).not.toBeInTheDocument();
});
it('disables button if user does not have integration feature', async function () {
- MockApiClient.addMockResponse({
- url: `/projects/${organization.slug}/${project.slug}/`,
- body: {
- ...project,
- hasAlertIntegrationInstalled: false,
- },
- });
+ mockResponses.push(
+ MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/integrations/?integrationType=messaging`,
+ body: [{status: 'disabled'}, {status: 'disabled'}, {status: 'disabled'}],
+ })
+ );
HookStore.add('integrations:feature-gates', () => {
return {
@@ -110,6 +122,9 @@ describe('SetupAlertIntegrationButton', function () {
});
render(getComponent(), {organization: organization});
+ mockResponses.forEach(mock => {
+ expect(mock).toHaveBeenCalled();
+ });
await screen.findByRole('button', {name: /connect to messaging/i});
expect(screen.getByRole('button')).toBeDisabled();
});
diff --git a/static/app/views/alerts/rules/issue/setupMessagingIntegrationButton.tsx b/static/app/views/alerts/rules/issue/setupMessagingIntegrationButton.tsx
index e1ecc143993b2..bac7fad05fb1c 100644
--- a/static/app/views/alerts/rules/issue/setupMessagingIntegrationButton.tsx
+++ b/static/app/views/alerts/rules/issue/setupMessagingIntegrationButton.tsx
@@ -6,76 +6,71 @@ import {Tooltip} from 'sentry/components/tooltip';
import {t} from 'sentry/locale';
import PluginIcon from 'sentry/plugins/components/pluginIcon';
import {space} from 'sentry/styles/space';
-import type {IntegrationProvider} from 'sentry/types/integrations';
-import type {Project} from 'sentry/types/project';
+import type {
+ IntegrationProvider,
+ OrganizationIntegration,
+} from 'sentry/types/integrations';
import {trackAnalytics} from 'sentry/utils/analytics';
import {getIntegrationFeatureGate} from 'sentry/utils/integrationUtil';
-import {useApiQuery} from 'sentry/utils/queryClient';
+import {useApiQueries, useApiQuery} from 'sentry/utils/queryClient';
import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams';
import useOrganization from 'sentry/utils/useOrganization';
import MessagingIntegrationModal from 'sentry/views/alerts/rules/issue/messagingIntegrationModal';
-interface ProjectWithAlertIntegrationInfo extends Project {
- hasAlertIntegrationInstalled: boolean;
-}
-
export enum MessagingIntegrationAnalyticsView {
ALERT_RULE_CREATION = 'alert_rule_creation',
+ PROJECT_CREATION = 'project_creation',
}
type Props = {
- projectSlug: string;
refetchConfigs: () => void;
analyticsParams?: {
view: MessagingIntegrationAnalyticsView;
};
+ projectId?: string;
};
function SetupMessagingIntegrationButton({
- projectSlug,
refetchConfigs,
analyticsParams,
+ projectId,
}: Props) {
const providerKeys = ['slack', 'discord', 'msteams'];
const organization = useOrganization();
const onAddIntegration = () => {
- projectQuery.refetch();
+ messagingIntegrationsQuery.refetch();
refetchConfigs();
};
- const projectQuery = useApiQuery(
- [
- `/projects/${organization.slug}/${projectSlug}/`,
- {query: {expand: 'hasAlertIntegration'}},
- ],
+ const messagingIntegrationsQuery = useApiQuery(
+ [`/organizations/${organization.slug}/integrations/?integrationType=messaging`],
{staleTime: Infinity}
);
- // Only need to fetch the first provider to check if the feature is enabled, as all providers will return the same response
- const integrationQuery = useApiQuery<{providers: IntegrationProvider[]}>(
- [
- `/organizations/${organization.slug}/config/integrations/?provider_key=${providerKeys[0]}`,
- ],
+ const integrationProvidersQuery = useApiQueries<{providers: IntegrationProvider[]}>(
+ providerKeys.map((providerKey: string) => [
+ `/organizations/${organization.slug}/config/integrations/?provider_key=${providerKey}`,
+ ]),
{staleTime: Infinity}
);
const {IntegrationFeatures} = getIntegrationFeatureGate();
- const shouldRenderSetupButton =
- projectQuery.data != null &&
- !projectQuery.data.hasAlertIntegrationInstalled &&
- integrationQuery.data != null;
+ const shouldRenderSetupButton = messagingIntegrationsQuery.data?.every(
+ integration => integration.status !== 'active'
+ );
useRouteAnalyticsParams({
setup_message_integration_button_shown: shouldRenderSetupButton,
});
if (
- projectQuery.isPending ||
- projectQuery.isError ||
- integrationQuery.isPending ||
- integrationQuery.isError
+ messagingIntegrationsQuery.isPending ||
+ messagingIntegrationsQuery.isError ||
+ integrationProvidersQuery.some(({isPending}) => isPending) ||
+ integrationProvidersQuery.some(({isError}) => isError) ||
+ integrationProvidersQuery[0].data == null
) {
return null;
}
@@ -87,7 +82,7 @@ function SetupMessagingIntegrationButton({
return (
{({disabled, disabledReason}) => (
result.data?.providers[0])
+ .filter(
+ (provider): provider is IntegrationProvider =>
+ provider !== undefined
+ )}
onAddIntegration={onAddIntegration}
+ {...(projectId && {modalParams: {projectId: projectId}})}
/>
),
{
@@ -124,7 +124,6 @@ function SetupMessagingIntegrationButton({
}
);
trackAnalytics('onboarding.messaging_integration_modal_rendered', {
- project_id: projectQuery.data.id,
organization,
...analyticsParams,
});
diff --git a/static/app/views/alerts/rules/metric/constants.tsx b/static/app/views/alerts/rules/metric/constants.tsx
index faa94df601f96..b20254c822cc2 100644
--- a/static/app/views/alerts/rules/metric/constants.tsx
+++ b/static/app/views/alerts/rules/metric/constants.tsx
@@ -24,6 +24,7 @@ import type {AlertType, WizardRuleTemplate} from 'sentry/views/alerts/wizard/opt
export const DEFAULT_COUNT_TIME_WINDOW = 1; // 1min
export const DEFAULT_CHANGE_TIME_WINDOW = 60; // 1h
+export const DEFAULT_DYNAMIC_TIME_WINDOW = 60; // 1h
export const DEFAULT_CHANGE_COMP_DELTA = 10080; // 1w
export const DEFAULT_AGGREGATE = 'count()';
@@ -89,6 +90,9 @@ export const DuplicateMetricFields: string[] = [
'name',
'projectId',
'comparisonDelta',
+ 'seasonality',
+ 'sensitivity',
+ 'detectionType',
];
export const DuplicateTriggerFields: string[] = ['alertThreshold', 'label'];
diff --git a/static/app/views/alerts/rules/metric/create.spec.tsx b/static/app/views/alerts/rules/metric/create.spec.tsx
index 8ac9eb80640b4..0ddae132b2927 100644
--- a/static/app/views/alerts/rules/metric/create.spec.tsx
+++ b/static/app/views/alerts/rules/metric/create.spec.tsx
@@ -29,6 +29,10 @@ describe('Incident Rules Create', function () {
url: '/organizations/org-slug/events-stats/',
body: EventsStatsFixture(),
});
+ MockApiClient.addMockResponse({
+ url: '/organizations/org-slug/events/anomalies/',
+ body: [],
+ });
MockApiClient.addMockResponse({
url: '/organizations/org-slug/alert-rules/available-actions/',
body: [
diff --git a/static/app/views/alerts/rules/metric/details/body.tsx b/static/app/views/alerts/rules/metric/details/body.tsx
index 777255d06fc67..c26c950cf33bb 100644
--- a/static/app/views/alerts/rules/metric/details/body.tsx
+++ b/static/app/views/alerts/rules/metric/details/body.tsx
@@ -148,9 +148,10 @@ export default function MetricDetailsBody({
const {dataset, aggregate, query} = rule;
const eventType = extractEventTypeFilterFromRule(rule);
- const queryWithTypeFilter = (
- query ? `(${query}) AND (${eventType})` : eventType
- ).trim();
+ const queryWithTypeFilter =
+ dataset === Dataset.EVENTS_ANALYTICS_PLATFORM
+ ? query
+ : (query ? `(${query}) AND (${eventType})` : eventType).trim();
const relativeOptions = {
...SELECTOR_RELATIVE_PERIODS,
...(rule.timeWindow > 1 ? {[TimePeriod.FOURTEEN_DAYS]: t('Last 14 days')} : {}),
@@ -170,12 +171,11 @@ export default function MetricDetailsBody({
return (
-
- {isCustomMetricAlert(rule.aggregate) &&
- !isInsightsMetricAlert(rule.aggregate) && (
-
- )}
-
+ {isCustomMetricAlert(rule.aggregate) && !isInsightsMetricAlert(rule.aggregate) && (
+
+
+
+ )}
{selectedIncident?.alertRule.status === AlertRuleStatus.SNAPSHOT && (
diff --git a/static/app/views/alerts/rules/metric/details/metricChart.tsx b/static/app/views/alerts/rules/metric/details/metricChart.tsx
index 7b5ebc42fcabd..62d210986a0a8 100644
--- a/static/app/views/alerts/rules/metric/details/metricChart.tsx
+++ b/static/app/views/alerts/rules/metric/details/metricChart.tsx
@@ -364,7 +364,6 @@ class MetricChart extends PureComponent {
{getDynamicText({
value: (
this.handleZoom(zoomArgs.start, zoomArgs.end)}
diff --git a/static/app/views/alerts/rules/metric/details/metricChartOption.tsx b/static/app/views/alerts/rules/metric/details/metricChartOption.tsx
index e5caf55e667af..f895a141d473a 100644
--- a/static/app/views/alerts/rules/metric/details/metricChartOption.tsx
+++ b/static/app/views/alerts/rules/metric/details/metricChartOption.tsx
@@ -1,5 +1,5 @@
import color from 'color';
-import type {MarkAreaComponentOption, YAXisComponentOption} from 'echarts';
+import type {YAXisComponentOption} from 'echarts';
import moment from 'moment-timezone';
import type {AreaChartProps, AreaChartSeries} from 'sentry/components/charts/areaChart';
@@ -16,12 +16,9 @@ import {getCrashFreeRateSeries} from 'sentry/utils/sessions';
import {lightTheme as theme} from 'sentry/utils/theme';
import type {MetricRule, Trigger} from 'sentry/views/alerts/rules/metric/types';
import {AlertRuleTriggerType, Dataset} from 'sentry/views/alerts/rules/metric/types';
+import {getAnomalyMarkerSeries} from 'sentry/views/alerts/rules/metric/utils/anomalyChart';
import type {Anomaly, Incident} from 'sentry/views/alerts/types';
-import {
- AnomalyType,
- IncidentActivityType,
- IncidentStatus,
-} from 'sentry/views/alerts/types';
+import {IncidentActivityType, IncidentStatus} from 'sentry/views/alerts/types';
import {
ALERT_CHART_MIN_MAX_BUFFER,
alertAxisFormatter,
@@ -140,48 +137,6 @@ function createIncidentSeries(
};
}
-function createAnomalyMarkerSeries(
- lineColor: string,
- timestamp: string
-): AreaChartSeries {
- const formatter = ({value}: any) => {
- const time = formatTooltipDate(moment(value), 'MMM D, YYYY LT');
- return [
- ``,
- ``,
- '
',
- ].join('');
- };
-
- return {
- seriesName: 'Anomaly Line',
- type: 'line',
- markLine: MarkLine({
- silent: false,
- lineStyle: {color: lineColor, type: 'dashed'},
- label: {
- silent: true,
- show: false,
- },
- data: [
- {
- xAxis: timestamp,
- },
- ],
- tooltip: {
- formatter,
- },
- }),
- data: [],
- tooltip: {
- trigger: 'item',
- alwaysShowContent: true,
- formatter,
- },
- };
-}
-
export type MetricChartData = {
rule: MetricRule;
timeseriesData: Series[];
@@ -263,8 +218,11 @@ export function getMetricAlertChartOption({
) / ALERT_CHART_MIN_MAX_BUFFER
)
: 0;
- const firstPoint = new Date(dataArr[0]?.name).getTime();
- const lastPoint = new Date(dataArr[dataArr.length - 1]?.name).getTime();
+ const startDate = new Date(dataArr[0]?.name);
+ const endDate =
+ dataArr.length > 1 ? new Date(dataArr[dataArr.length - 1]?.name) : new Date();
+ const firstPoint = startDate.getTime();
+ const lastPoint = endDate.getTime();
const totalDuration = lastPoint - firstPoint;
let waitingForDataDuration = 0;
let criticalDuration = 0;
@@ -403,77 +361,8 @@ export function getMetricAlertChartOption({
});
}
if (anomalies) {
- const anomalyBlocks: MarkAreaComponentOption['data'] = [];
- let start: string | undefined;
- let end: string | undefined;
- anomalies
- .filter(anomalyts => {
- const ts = new Date(anomalyts.timestamp).getTime();
- return firstPoint < ts && ts < lastPoint;
- })
- .forEach(anomalyts => {
- const {anomaly, timestamp} = anomalyts;
-
- if (
- [AnomalyType.high, AnomalyType.low].includes(anomaly.anomaly_type as string)
- ) {
- if (!start) {
- // If this is the start of an anomaly, set start
- start = new Date(timestamp).toISOString();
- }
- // as long as we have an valid anomaly type - continue tracking until we've hit the end
- end = new Date(timestamp).toISOString();
- } else {
- if (start && end) {
- // If we've hit a non-anomaly type, push the block
- anomalyBlocks.push([
- {
- xAxis: start,
- },
- {
- xAxis: end,
- },
- ]);
- // Create a marker line for the start of the anomaly
- series.push(createAnomalyMarkerSeries(theme.purple300, start));
- }
- // reset the start/end to capture the next anomaly block
- start = undefined;
- end = undefined;
- }
- });
- if (start && end) {
- // push in the last block
- // Create a marker line for the start of the anomaly
- series.push(createAnomalyMarkerSeries(theme.purple300, start));
- anomalyBlocks.push([
- {
- xAxis: start,
- },
- {
- xAxis: end,
- },
- ]);
- }
-
- // NOTE: if timerange is too small - highlighted area will not be visible
- // Possibly provide a minimum window size if the time range is too large?
- series.push({
- seriesName: '',
- name: 'Anomaly',
- type: 'line',
- smooth: true,
- data: [],
- markArea: {
- itemStyle: {
- color: 'rgba(255, 173, 177, 0.4)',
- },
- silent: true, // potentially don't make this silent if we want to render the `anomaly detected` in the tooltip
- data: anomalyBlocks,
- },
- });
+ series.push(...getAnomalyMarkerSeries(anomalies, {startDate, endDate}));
}
-
let maxThresholdValue = 0;
if (!rule.comparisonDelta && warningTrigger?.alertThreshold) {
const {alertThreshold} = warningTrigger;
diff --git a/static/app/views/alerts/rules/metric/details/sidebar.tsx b/static/app/views/alerts/rules/metric/details/sidebar.tsx
index 7b0a63d12f270..4047911137753 100644
--- a/static/app/views/alerts/rules/metric/details/sidebar.tsx
+++ b/static/app/views/alerts/rules/metric/details/sidebar.tsx
@@ -4,12 +4,13 @@ import styled from '@emotion/styled';
import {OnDemandWarningIcon} from 'sentry/components/alerts/onDemandMetricAlert';
import ActorAvatar from 'sentry/components/avatar/actorAvatar';
import AlertBadge from 'sentry/components/badge/alertBadge';
+import {Button} from 'sentry/components/button';
import {SectionHeading} from 'sentry/components/charts/styles';
import {DateTime} from 'sentry/components/dateTime';
import Duration from 'sentry/components/duration';
import {KeyValueTable, KeyValueTableRow} from 'sentry/components/keyValueTable';
import TimeSince from 'sentry/components/timeSince';
-import {IconDiamond} from 'sentry/icons';
+import {IconDiamond, IconMegaphone} from 'sentry/icons';
import {t, tct} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {ActivationConditionType, MonitorType} from 'sentry/types/alerts';
@@ -17,6 +18,7 @@ import type {Actor} from 'sentry/types/core';
import getDynamicText from 'sentry/utils/getDynamicText';
import {getSearchFilters, isOnDemandSearchKey} from 'sentry/utils/onDemandMetrics/index';
import {capitalize} from 'sentry/utils/string/capitalize';
+import {useFeedbackForm} from 'sentry/utils/useFeedbackForm';
import {COMPARISON_DELTA_OPTIONS} from 'sentry/views/alerts/rules/metric/constants';
import type {Action, MetricRule} from 'sentry/views/alerts/rules/metric/types';
import {
@@ -144,7 +146,7 @@ export function MetricDetailsSidebar({
const ownerId = rule.owner?.split(':')[1];
const teamActor = ownerId && {type: 'team' as Actor['type'], id: ownerId, name: ''};
- let conditionType;
+ let conditionType: React.ReactNode;
const activationCondition =
rule.monitorType === MonitorType.ACTIVATED &&
typeof rule.activationCondition !== 'undefined' &&
@@ -160,6 +162,29 @@ export function MetricDetailsSidebar({
break;
}
+ const openForm = useFeedbackForm();
+
+ const feedbackButton = openForm ? (
+ {
+ openForm({
+ formTitle: 'Anomaly Detection Feedback',
+ messagePlaceholder: t(
+ 'How can we make alerts using anomaly detection more useful?'
+ ),
+ tags: {
+ ['feedback.source']: 'dynamic_thresholding',
+ ['feedback.owner']: 'ml-ai',
+ },
+ });
+ }}
+ size="xs"
+ icon={ }
+ >
+ Give Feedback
+
+ ) : null;
+
return (
@@ -265,7 +290,7 @@ export function MetricDetailsSidebar({
/>
{rule.detectionType === AlertRuleComparisonType.DYNAMIC && (
+ {rule.detectionType === AlertRuleComparisonType.DYNAMIC && feedbackButton}
);
}
diff --git a/static/app/views/alerts/rules/metric/eapField.spec.tsx b/static/app/views/alerts/rules/metric/eapField.spec.tsx
new file mode 100644
index 0000000000000..fd8dc43f79448
--- /dev/null
+++ b/static/app/views/alerts/rules/metric/eapField.spec.tsx
@@ -0,0 +1,34 @@
+import {initializeOrg} from 'sentry-test/initializeOrg';
+import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
+
+import EAPField from 'sentry/views/alerts/rules/metric/eapField';
+
+describe('EAPField', () => {
+ it('renders', () => {
+ const {project} = initializeOrg();
+ render(
+ {}}
+ project={project}
+ />
+ );
+ screen.getByText('count');
+ screen.getByText('span.duration');
+ });
+
+ it('should call onChange with the new aggregate string when switching aggregates', async () => {
+ const {project} = initializeOrg();
+ const onChange = jest.fn();
+ render(
+
+ );
+ await userEvent.click(screen.getByText('count'));
+ await userEvent.click(await screen.findByText('max'));
+ await waitFor(() => expect(onChange).toHaveBeenCalledWith('max(span.duration)', {}));
+ });
+});
diff --git a/static/app/views/alerts/rules/metric/eapField.tsx b/static/app/views/alerts/rules/metric/eapField.tsx
new file mode 100644
index 0000000000000..eb6ea89aa5c2d
--- /dev/null
+++ b/static/app/views/alerts/rules/metric/eapField.tsx
@@ -0,0 +1,140 @@
+import {useCallback, useEffect} from 'react';
+import styled from '@emotion/styled';
+
+import SelectControl from 'sentry/components/forms/controls/selectControl';
+import {t} from 'sentry/locale';
+import {space} from 'sentry/styles/space';
+import type {Project} from 'sentry/types/project';
+import {parseFunction} from 'sentry/utils/discover/fields';
+import {ALLOWED_EXPLORE_VISUALIZE_AGGREGATES} from 'sentry/utils/fields';
+
+export const DEFAULT_EAP_FIELD = 'span.duration';
+export const DEFAULT_EAP_METRICS_ALERT_FIELD = `count(${DEFAULT_EAP_FIELD})`;
+
+interface Props {
+ aggregate: string;
+ onChange: (value: string, meta: Record) => void;
+ project: Project;
+}
+
+// Use the same aggregates/operations available in the explore view
+const OPERATIONS = [
+ ...ALLOWED_EXPLORE_VISUALIZE_AGGREGATES.map(aggregate => ({
+ label: aggregate,
+ value: aggregate,
+ })),
+];
+
+// TODD(edward): Just hardcode the EAP fields for now. We should use SpanTagsProvider in the future to match the Explore UI.
+const EAP_FIELD_OPTIONS = [
+ {
+ name: 'span.duration',
+ },
+ {
+ name: 'span.self_time',
+ },
+];
+
+function EAPField({aggregate, onChange}: Props) {
+ // We parse out the aggregation and field from the aggregate string.
+ // This only works for aggregates with <= 1 argument.
+ const {
+ name: aggregation,
+ arguments: [field],
+ } = parseFunction(aggregate) ?? {arguments: [undefined]};
+
+ useEffect(() => {
+ const selectedMriMeta = EAP_FIELD_OPTIONS.find(metric => metric.name === field);
+ if (field && !selectedMriMeta) {
+ const newSelection = EAP_FIELD_OPTIONS[0];
+ if (newSelection) {
+ onChange(`count(${newSelection.name})`, {});
+ } else if (aggregate !== DEFAULT_EAP_METRICS_ALERT_FIELD) {
+ onChange(DEFAULT_EAP_METRICS_ALERT_FIELD, {});
+ }
+ }
+ }, [onChange, aggregate, aggregation, field]);
+
+ const handleFieldChange = useCallback(
+ option => {
+ const selectedMeta = EAP_FIELD_OPTIONS.find(metric => metric.name === option.value);
+ if (!selectedMeta) {
+ return;
+ }
+ onChange(`${aggregation}(${option.value})`, {});
+ },
+ [onChange, aggregation]
+ );
+
+ const handleOperationChange = useCallback(
+ option => {
+ if (field) {
+ onChange(`${option.value}(${field})`, {});
+ } else {
+ onChange(`${option.value}(${DEFAULT_EAP_FIELD})`, {});
+ }
+ },
+ [field, onChange]
+ );
+
+ // As SelectControl does not support an options size limit out of the box
+ // we work around it by using the async variant of the control
+ const getFieldOptions = useCallback((searchText: string) => {
+ const filteredMeta = EAP_FIELD_OPTIONS.filter(
+ ({name}) =>
+ searchText === '' || name.toLowerCase().includes(searchText.toLowerCase())
+ );
+
+ const options = filteredMeta.map(metric => {
+ return {
+ label: metric.name,
+ value: metric.name,
+ };
+ });
+ return options;
+ }, []);
+
+ // When using the async variant of SelectControl, we need to pass in an option object instead of just the value
+ const selectedOption = field && {
+ label: field,
+ value: field,
+ };
+
+ return (
+
+
+
+ EAP_FIELD_OPTIONS.length === 0
+ ? t('No metrics in this project')
+ : t('No options')
+ }
+ async
+ defaultOptions={getFieldOptions('')}
+ loadOptions={searchText => Promise.resolve(getFieldOptions(searchText))}
+ filterOption={() => true}
+ value={selectedOption}
+ onChange={handleFieldChange}
+ />
+
+ );
+}
+
+export default EAPField;
+
+const Wrapper = styled('div')`
+ display: flex;
+ gap: ${space(1)};
+`;
+
+const StyledSelectControl = styled(SelectControl)`
+ width: 200px;
+`;
diff --git a/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx b/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx
index 885f104a033be..81f85823e7215 100644
--- a/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx
+++ b/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx
@@ -12,7 +12,7 @@ import {
OnDemandMetricAlert,
OnDemandWarningIcon,
} from 'sentry/components/alerts/onDemandMetricAlert';
-import SearchBar, {getHasTag} from 'sentry/components/events/searchBar';
+import {getHasTag} from 'sentry/components/events/searchBar';
import {
STATIC_FIELD_TAGS,
STATIC_FIELD_TAGS_WITHOUT_ERROR_FIELDS,
@@ -32,7 +32,6 @@ import Panel from 'sentry/components/panels/panel';
import PanelBody from 'sentry/components/panels/panelBody';
import {SearchQueryBuilder} from 'sentry/components/searchQueryBuilder';
import {InvalidReason} from 'sentry/components/searchSyntax/parser';
-import {SearchInvalidTag} from 'sentry/components/smartSearchBar/searchInvalidTag';
import {t, tct} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {ActivationConditionType, MonitorType} from 'sentry/types/alerts';
@@ -715,7 +714,7 @@ class RuleConditionsForm extends PureComponent {
}}
flexibleControlStateSize
>
- {({onChange, onBlur, onKeyDown, initialData, value}) => {
+ {({onChange, onBlur, initialData, value}) => {
return (hasCustomMetrics(organization) &&
alertType === 'custom_metrics') ||
alertType === 'insights_metrics' ? (
@@ -734,111 +733,39 @@ class RuleConditionsForm extends PureComponent {
/>
) : (
- {organization.features.includes('search-query-builder-alerts') ? (
- {
- onFilterSearch(query, true);
- onChange(query, {});
- }}
- onBlur={(query, {parsedQuery}) => {
- onFilterSearch(query, parsedQuery);
- onBlur(query);
- }}
- // We only need strict validation for Transaction queries, everything else is fine
- disallowUnsupportedFilters={
- organization.features.includes('alert-allow-indexed') ||
- (hasOnDemandMetricAlertFeature(organization) &&
- isOnDemandQueryString(value))
- ? false
- : dataset === Dataset.GENERIC_METRICS
- }
- />
- ) : (
- {
- if (dataset !== Dataset.GENERIC_METRICS) {
- return null;
- }
- return (
- {item.desc},
- }
- )}
- docLink="https://docs.sentry.io/product/alerts/create-alerts/metric-alert-config/#tags--properties"
- />
- );
- }}
- searchSource="alert_builder"
- defaultQuery={initialData?.query ?? ''}
- {...getSupportedAndOmittedTags(dataset, organization)}
- includeSessionTagsValues={dataset === Dataset.SESSIONS}
- disabled={disabled || isErrorMigration}
- useFormWrapper={false}
- organization={organization}
- placeholder={this.searchPlaceholder}
- onChange={onChange}
- query={initialData.query}
- // We only need strict validation for Transaction queries, everything else is fine
- highlightUnsupportedTags={
- organization.features.includes('alert-allow-indexed') ||
- (hasOnDemandMetricAlertFeature(organization) &&
- isOnDemandQueryString(value))
- ? false
- : dataset === Dataset.GENERIC_METRICS
- }
- onKeyDown={e => {
- /**
- * Do not allow enter key to submit the alerts form since it is unlikely
- * users will be ready to create the rule as this sits above required fields.
- */
- if (e.key === 'Enter') {
- e.preventDefault();
- e.stopPropagation();
- }
- onKeyDown?.(e);
- }}
- onClose={(query, {validSearch}) => {
- onFilterSearch(query, validSearch);
- onBlur(query);
- }}
- onSearch={query => {
- onFilterSearch(query, true);
- onChange(query, {});
- }}
- hasRecentSearches={dataset !== Dataset.SESSIONS}
- />
- )}
+ {
+ onFilterSearch(query, true);
+ onChange(query, {});
+ }}
+ onBlur={(query, {parsedQuery}) => {
+ onFilterSearch(query, parsedQuery);
+ onBlur(query);
+ }}
+ // We only need strict validation for Transaction queries, everything else is fine
+ disallowUnsupportedFilters={
+ organization.features.includes('alert-allow-indexed') ||
+ (hasOnDemandMetricAlertFeature(organization) &&
+ isOnDemandQueryString(value))
+ ? false
+ : dataset === Dataset.GENERIC_METRICS
+ }
+ />
{isExtrapolatedChartData && isOnDemandQueryString(value) && (
- p.disabled &&
- `
- background: ${p.theme.backgroundSecondary};
- color: ${p.theme.disabled};
- cursor: not-allowed;
- `}
-`;
-
const StyledListItem = styled(ListItem)`
margin-bottom: ${space(0.5)};
font-size: ${p => p.theme.fontSizeExtraLarge};
diff --git a/static/app/views/alerts/rules/metric/ruleForm.spec.tsx b/static/app/views/alerts/rules/metric/ruleForm.spec.tsx
index 76cab955847a2..5c5670098f529 100644
--- a/static/app/views/alerts/rules/metric/ruleForm.spec.tsx
+++ b/static/app/views/alerts/rules/metric/ruleForm.spec.tsx
@@ -3,7 +3,7 @@ import {IncidentTriggerFixture} from 'sentry-fixture/incidentTrigger';
import {MetricRuleFixture} from 'sentry-fixture/metricRule';
import {initializeOrg} from 'sentry-test/initializeOrg';
-import {act, render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
+import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
import selectEvent from 'sentry-test/selectEvent';
import {addErrorMessage} from 'sentry/actionCreators/indicator';
@@ -32,7 +32,7 @@ jest.mock('sentry/utils/analytics', () => ({
}));
describe('Incident Rules Form', () => {
- let organization, project, router, location;
+ let organization, project, router, location, anomalies;
// create wrapper
const createWrapper = props =>
render(
@@ -100,6 +100,16 @@ describe('Incident Rules Form', () => {
url: '/organizations/org-slug/metrics/tags/',
body: [],
});
+ MockApiClient.addMockResponse({
+ method: 'GET',
+ url: '/organizations/org-slug/recent-searches/',
+ body: [],
+ });
+ anomalies = MockApiClient.addMockResponse({
+ method: 'POST',
+ url: '/organizations/org-slug/events/anomalies/',
+ body: [],
+ });
});
afterEach(() => {
@@ -365,7 +375,11 @@ describe('Incident Rules Form', () => {
});
it('creates an anomaly detection rule', async () => {
- organization.features = [...organization.features, 'anomaly-detection-alerts'];
+ organization.features = [
+ ...organization.features,
+ 'anomaly-detection-alerts',
+ 'anomaly-detection-rollout',
+ ];
const rule = MetricRuleFixture({
sensitivity: AlertRuleSensitivity.MEDIUM,
seasonality: AlertRuleSeasonality.AUTO,
@@ -379,14 +393,22 @@ describe('Incident Rules Form', () => {
dataset: 'events',
},
});
- expect(
- await screen.findByLabelText(
- 'Anomaly: whenever values are outside of expected bounds'
- )
- ).toBeChecked();
expect(
await screen.findByRole('textbox', {name: 'Level of responsiveness'})
).toBeInTheDocument();
+ expect(anomalies).toHaveBeenLastCalledWith(
+ expect.anything(),
+ expect.objectContaining({
+ data: expect.objectContaining({
+ config: {
+ direction: 'up',
+ sensitivity: AlertRuleSensitivity.MEDIUM,
+ expected_seasonality: AlertRuleSeasonality.AUTO,
+ time_period: 60,
+ },
+ }),
+ })
+ );
await userEvent.click(screen.getByLabelText('Save Rule'));
expect(createRule).toHaveBeenLastCalledWith(
@@ -484,6 +506,48 @@ describe('Incident Rules Form', () => {
);
expect(metric.startSpan).toHaveBeenCalledWith({name: 'saveAlertRule'});
});
+
+ it('creates an EAP metric rule', async () => {
+ const rule = MetricRuleFixture();
+ createWrapper({
+ rule: {
+ ...rule,
+ id: undefined,
+ eventTypes: [],
+ aggregate: 'count(span.duration)',
+ dataset: Dataset.EVENTS_ANALYTICS_PLATFORM,
+ },
+ });
+
+ // Clear field
+ await userEvent.clear(screen.getByPlaceholderText('Enter Alert Name'));
+
+ // Enter in name so we can submit
+ await userEvent.type(
+ screen.getByPlaceholderText('Enter Alert Name'),
+ 'EAP Incident Rule'
+ );
+
+ // Set thresholdPeriod
+ await selectEvent.select(screen.getAllByText('For 1 minute')[0], 'For 10 minutes');
+
+ await userEvent.click(screen.getByLabelText('Save Rule'));
+
+ expect(createRule).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({
+ data: expect.objectContaining({
+ name: 'EAP Incident Rule',
+ projects: ['project-slug'],
+ eventTypes: [],
+ thresholdPeriod: 10,
+ alertType: 'eap_metrics',
+ dataset: 'events_analytics_platform',
+ }),
+ })
+ );
+ expect(metric.startSpan).toHaveBeenCalledWith({name: 'saveAlertRule'});
+ });
});
describe('Editing a rule', () => {
@@ -564,7 +628,11 @@ describe('Incident Rules Form', () => {
});
it('switches to anomaly detection threshold', async () => {
- organization.features = [...organization.features, 'anomaly-detection-alerts'];
+ organization.features = [
+ ...organization.features,
+ 'anomaly-detection-alerts',
+ 'anomaly-detection-rollout',
+ ];
createWrapper({
rule: {
...rule,
@@ -655,14 +723,6 @@ describe('Incident Rules Form', () => {
describe('Slack async lookup', () => {
const uuid = 'xxxx-xxxx-xxxx';
- beforeEach(() => {
- jest.useFakeTimers();
- });
-
- afterEach(() => {
- jest.useRealTimers();
- });
-
it('success status updates the rule', async () => {
const alertRule = MetricRuleFixture({name: 'Slack Alert Rule'});
MockApiClient.addMockResponse({
@@ -686,17 +746,16 @@ describe('Incident Rules Form', () => {
onSubmitSuccess,
});
- act(jest.runAllTimers);
+ await screen.findByTestId('loading-indicator');
await userEvent.type(
await screen.findByPlaceholderText('Enter Alert Name'),
'Slack Alert Rule',
{delay: null}
);
- await userEvent.click(screen.getByLabelText('Save Rule'), {delay: null});
+ await userEvent.click(await screen.findByLabelText('Save Rule'), {delay: null});
- expect(screen.getByTestId('loading-indicator')).toBeInTheDocument();
+ expect(await screen.findByTestId('loading-indicator')).toBeInTheDocument();
- act(jest.runAllTimers);
await waitFor(
() => {
expect(onSubmitSuccess).toHaveBeenCalledWith(
@@ -733,7 +792,6 @@ describe('Incident Rules Form', () => {
onSubmitSuccess,
});
- act(jest.runAllTimers);
expect(await screen.findByTestId('loading-indicator')).toBeInTheDocument();
expect(onSubmitSuccess).not.toHaveBeenCalled();
});
@@ -760,15 +818,13 @@ describe('Incident Rules Form', () => {
rule: alertRule,
onSubmitSuccess,
});
- act(jest.runAllTimers);
await userEvent.type(
await screen.findByPlaceholderText('Enter Alert Name'),
'Slack Alert Rule',
{delay: null}
);
- await userEvent.click(screen.getByLabelText('Save Rule'), {delay: null});
+ await userEvent.click(await screen.findByLabelText('Save Rule'), {delay: null});
- act(jest.runAllTimers);
await waitFor(
() => {
expect(addErrorMessage).toHaveBeenCalledWith('An error occurred');
diff --git a/static/app/views/alerts/rules/metric/ruleForm.tsx b/static/app/views/alerts/rules/metric/ruleForm.tsx
index c972d9b9ab9db..42a58a4d20e65 100644
--- a/static/app/views/alerts/rules/metric/ruleForm.tsx
+++ b/static/app/views/alerts/rules/metric/ruleForm.tsx
@@ -1,4 +1,4 @@
-import type {ReactNode} from 'react';
+import type {ComponentProps, ReactNode} from 'react';
import styled from '@emotion/styled';
import * as Sentry from '@sentry/react';
@@ -53,13 +53,13 @@ import {IncompatibleAlertQuery} from 'sentry/views/alerts/rules/metric/incompati
import RuleNameOwnerForm from 'sentry/views/alerts/rules/metric/ruleNameOwnerForm';
import ThresholdTypeForm from 'sentry/views/alerts/rules/metric/thresholdTypeForm';
import Triggers from 'sentry/views/alerts/rules/metric/triggers';
-import TriggersChart from 'sentry/views/alerts/rules/metric/triggers/chart';
+import TriggersChart, {ErrorChart} from 'sentry/views/alerts/rules/metric/triggers/chart';
import {getEventTypeFilter} from 'sentry/views/alerts/rules/metric/utils/getEventTypeFilter';
import hasThresholdValue from 'sentry/views/alerts/rules/metric/utils/hasThresholdValue';
import {isCustomMetricAlert} from 'sentry/views/alerts/rules/metric/utils/isCustomMetricAlert';
import {isInsightsMetricAlert} from 'sentry/views/alerts/rules/metric/utils/isInsightsMetricAlert';
import {isOnDemandMetricAlert} from 'sentry/views/alerts/rules/metric/utils/onDemandMetricAlert';
-import {AlertRuleType} from 'sentry/views/alerts/types';
+import {AlertRuleType, type Anomaly} from 'sentry/views/alerts/types';
import {ruleNeedsErrorMigration} from 'sentry/views/alerts/utils/migrationUi';
import type {MetricAlertType} from 'sentry/views/alerts/wizard/options';
import {
@@ -77,6 +77,7 @@ import {
DEFAULT_CHANGE_COMP_DELTA,
DEFAULT_CHANGE_TIME_WINDOW,
DEFAULT_COUNT_TIME_WINDOW,
+ DEFAULT_DYNAMIC_TIME_WINDOW,
} from './constants';
import RuleConditionsForm from './ruleConditionsForm';
import {
@@ -104,6 +105,8 @@ type RuleTaskResponse = {
error?: string;
};
+type HistoricalDataset = ReturnType;
+
type Props = {
organization: Organization;
project: Project;
@@ -124,14 +127,17 @@ type Props = {
type State = {
aggregate: string;
alertType: MetricAlertType;
+ anomalies: Anomaly[];
// `null` means loading
availableActions: MetricActionTemplate[] | null;
comparisonType: AlertRuleComparisonType;
+ currentData: HistoricalDataset;
// Rule conditions form inputs
// Needed for TriggersChart
dataset: Dataset;
environment: string | null;
eventTypes: EventTypes[];
+ historicalData: HistoricalDataset;
isQueryValid: boolean;
project: Project;
query: string;
@@ -143,6 +149,8 @@ type State = {
triggerErrors: Map;
triggers: Trigger[];
activationCondition?: ActivationConditionType;
+ chartError?: boolean;
+ chartErrorMessage?: string;
comparisonDelta?: number;
isExtrapolatedChartData?: boolean;
monitorType?: MonitorType;
@@ -156,6 +164,12 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
pollingTimeout: number | undefined = undefined;
uuid: string | null = null;
+ constructor(props, context) {
+ super(props, context);
+ this.handleHistoricalTimeSeriesDataFetched =
+ this.handleHistoricalTimeSeriesDataFetched.bind(this);
+ }
+
get isDuplicateRule(): boolean {
return Boolean(this.props.isDuplicateRule);
}
@@ -164,7 +178,9 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
const {alertType, query, eventTypes, dataset} = this.state;
const eventTypeFilter = getEventTypeFilter(this.state.dataset, eventTypes);
const queryWithTypeFilter = (
- !['custom_metrics', 'span_metrics', 'insights_metrics'].includes(alertType)
+ !['custom_metrics', 'span_metrics', 'insights_metrics', 'eap_metrics'].includes(
+ alertType
+ )
? query
? `(${query}) AND (${eventTypeFilter})`
: eventTypeFilter
@@ -215,7 +231,9 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
return {
...super.getDefaultState(),
-
+ currentData: [],
+ historicalData: [],
+ anomalies: [],
name: name ?? rule.name ?? '',
aggregate,
dataset,
@@ -537,9 +555,15 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
handleFieldChange = (name: string, value: unknown) => {
const {projects} = this.props;
- const {timeWindow} = this.state;
+ const {timeWindow, chartError} = this.state;
+ if (chartError) {
+ this.setState({chartError: false, chartErrorMessage: undefined});
+ }
if (name === 'alertType') {
+ if (value === 'crash_free_sessions' || value === 'crash_free_users') {
+ this.setState({comparisonType: AlertRuleComparisonType.COUNT});
+ }
this.setState(({dataset}) => ({
alertType: value as MetricAlertType,
dataset: this.checkOnDemandMetricsDataset(dataset, this.state.query),
@@ -875,26 +899,35 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
clearIndicators();
}
- return {triggers, triggerErrors, triggersHaveChanged: true};
+ return {
+ triggers,
+ triggerErrors,
+ triggersHaveChanged: true,
+ chartError: false,
+ chartErrorMessage: undefined,
+ };
});
};
handleSensitivityChange = (sensitivity: AlertRuleSensitivity) => {
- this.setState({sensitivity});
+ this.setState({sensitivity}, () => this.fetchAnomalies());
};
handleThresholdTypeChange = (thresholdType: AlertRuleThresholdType) => {
const {triggers} = this.state;
const triggerErrors = this.validateTriggers(triggers, thresholdType);
- this.setState(state => ({
- thresholdType,
- triggerErrors: new Map([...triggerErrors, ...state.triggerErrors]),
- }));
+ this.setState(
+ state => ({
+ thresholdType,
+ triggerErrors: new Map([...triggerErrors, ...state.triggerErrors]),
+ }),
+ () => this.fetchAnomalies()
+ );
};
handleThresholdPeriodChange = (value: number) => {
- this.setState({thresholdPeriod: value});
+ this.setState({thresholdPeriod: value}, () => this.fetchAnomalies());
};
handleResolveThresholdChange = (
@@ -915,26 +948,43 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
};
handleComparisonTypeChange = (value: AlertRuleComparisonType) => {
- const comparisonDelta =
- value === AlertRuleComparisonType.CHANGE
- ? this.state.comparisonDelta ?? DEFAULT_CHANGE_COMP_DELTA
- : undefined;
- const timeWindow = this.state.comparisonDelta
- ? DEFAULT_COUNT_TIME_WINDOW
- : DEFAULT_CHANGE_TIME_WINDOW;
- const sensitivity =
- value === AlertRuleComparisonType.DYNAMIC
- ? this.state.sensitivity || AlertRuleSensitivity.MEDIUM
- : undefined;
- const seasonality =
- value === AlertRuleComparisonType.DYNAMIC ? AlertRuleSeasonality.AUTO : undefined;
- this.setState({
- comparisonType: value,
- comparisonDelta,
- timeWindow,
- sensitivity,
- seasonality,
- });
+ let updateState = {};
+ switch (value) {
+ case AlertRuleComparisonType.DYNAMIC:
+ this.fetchAnomalies();
+ updateState = {
+ comparisonType: value,
+ comparisonDelta: undefined,
+ thresholdType: AlertRuleThresholdType.ABOVE_AND_BELOW,
+ timeWindow: DEFAULT_DYNAMIC_TIME_WINDOW,
+ sensitivity: AlertRuleSensitivity.MEDIUM,
+ seasonality: AlertRuleSeasonality.AUTO,
+ };
+ break;
+ case AlertRuleComparisonType.CHANGE:
+ updateState = {
+ comparisonType: value,
+ comparisonDelta: DEFAULT_CHANGE_COMP_DELTA,
+ thresholdType: AlertRuleThresholdType.ABOVE,
+ timeWindow: DEFAULT_CHANGE_TIME_WINDOW,
+ sensitivity: undefined,
+ seasonality: undefined,
+ };
+ break;
+ case AlertRuleComparisonType.COUNT:
+ updateState = {
+ comparisonType: value,
+ comparisonDelta: undefined,
+ thresholdType: AlertRuleThresholdType.ABOVE,
+ timeWindow: DEFAULT_COUNT_TIME_WINDOW,
+ sensitivity: undefined,
+ seasonality: undefined,
+ };
+ break;
+ default:
+ break;
+ }
+ this.setState({...updateState, chartError: false, chartErrorMessage: undefined});
};
handleDeleteRule = async () => {
@@ -986,17 +1036,97 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
handleTimeSeriesDataFetched = (data: EventsStats | MultiSeriesEventsStats | null) => {
const {isExtrapolatedData} = data ?? {};
+ const currentData = formatStatsToHistoricalDataset(data);
+ const newState: Partial = {currentData};
if (shouldShowOnDemandMetricAlertUI(this.props.organization)) {
- this.setState({isExtrapolatedChartData: Boolean(isExtrapolatedData)});
+ newState.isExtrapolatedChartData = Boolean(isExtrapolatedData);
}
-
+ this.setState(newState, () => this.fetchAnomalies());
const {dataset, aggregate, query} = this.state;
if (!isOnDemandMetricAlert(dataset, aggregate, query)) {
this.handleMEPAlertDataset(data);
}
};
+ handleHistoricalTimeSeriesDataFetched(
+ data: EventsStats | MultiSeriesEventsStats | null
+ ) {
+ const historicalData = formatStatsToHistoricalDataset(data);
+ this.setState({historicalData}, () => this.fetchAnomalies());
+ }
+
+ async fetchAnomalies() {
+ const {comparisonType, historicalData, currentData} = this.state;
+
+ if (
+ comparisonType !== AlertRuleComparisonType.DYNAMIC ||
+ !(Array.isArray(currentData) && Array.isArray(historicalData)) ||
+ currentData.length === 0 ||
+ historicalData.length === 0
+ ) {
+ return;
+ }
+ this.setState({chartError: false, chartErrorMessage: ''});
+
+ const {organization, project} = this.props;
+ const {timeWindow, sensitivity, seasonality, thresholdType} = this.state;
+ const direction =
+ thresholdType === AlertRuleThresholdType.ABOVE
+ ? 'up'
+ : thresholdType === AlertRuleThresholdType.BELOW
+ ? 'down'
+ : 'both';
+
+ // extract the earliest timestamp from the current dataset
+ const startOfCurrentTimeframe = currentData.reduce(
+ (value, [timestamp]) => (value < timestamp ? value : timestamp),
+ Infinity
+ );
+ const params = {
+ organization_id: organization.id,
+ project_id: project.id,
+ config: {
+ time_period: timeWindow,
+ sensitivity,
+ direction,
+ expected_seasonality: seasonality,
+ },
+ // remove historical data that overlaps with current dataset
+ historical_data: historicalData.filter(
+ ([timestamp]) => timestamp < startOfCurrentTimeframe
+ ),
+ current_data: currentData,
+ };
+
+ try {
+ const anomalies = await this.api.requestPromise(
+ `/organizations/${organization.slug}/events/anomalies/`,
+ {method: 'POST', data: params}
+ );
+ this.setState({anomalies});
+ } catch (e) {
+ let chartErrorMessage: string | undefined;
+ if (e.responseJSON) {
+ if (typeof e.responseJSON === 'object' && e.responseJSON.detail) {
+ chartErrorMessage = e.responseJSON.detail;
+ }
+ if (typeof e.responseJSON === 'string') {
+ chartErrorMessage = e.responseJSON;
+ }
+ } else if (typeof e.message === 'string') {
+ chartErrorMessage = e.message;
+ } else {
+ chartErrorMessage = t('Something went wrong when rendering this chart.');
+ }
+
+ this.setState({
+ chartError: true,
+ chartErrorMessage,
+ });
+ }
+ }
+
// If the user is creating an on-demand metric alert, we want to override the dataset
// to be generic metrics instead of transactions
checkOnDemandMetricsDataset = (dataset: Dataset, query: string) => {
@@ -1047,8 +1177,21 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
dataset,
alertType,
isQueryValid,
+ anomalies,
+ chartError,
+ chartErrorMessage,
} = this.state;
+ if (chartError) {
+ return (
+
+ );
+ }
const isOnDemand = isOnDemandMetricAlert(dataset, aggregate, query);
let formattedAggregate = aggregate;
@@ -1056,10 +1199,11 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
formattedAggregate = formatMRIField(aggregate);
}
- const chartProps = {
+ const chartProps: ComponentProps = {
organization,
projects: [project],
triggers,
+ anomalies: comparisonType === AlertRuleComparisonType.DYNAMIC ? anomalies : [],
location,
query: this.chartQuery,
aggregate,
@@ -1077,6 +1221,8 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
showTotalCount:
!['custom_metrics', 'span_metrics'].includes(alertType) && !isOnDemand,
onDataLoaded: this.handleTimeSeriesDataFetched,
+ includeHistorical: comparisonType === AlertRuleComparisonType.DYNAMIC,
+ onHistoricalDataLoaded: this.handleHistoricalTimeSeriesDataFetched,
};
let formattedQuery = `event.type:${eventTypes?.join(',')}`;
@@ -1199,7 +1345,9 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
{isCustomMetricAlert(rule.aggregate) &&
- !isInsightsMetricAlert(rule.aggregate) && }
+ !isInsightsMetricAlert(rule.aggregate) && (
+
+ )}
{eventView && }