diff --git a/.github/workflows/cbench.yml b/.github/workflows/cbench.yml deleted file mode 100644 index 363aa07eb4d..00000000000 --- a/.github/workflows/cbench.yml +++ /dev/null @@ -1,137 +0,0 @@ -name: CBENCH - -on: - workflow_dispatch: - push: - branches: - - main - paths: - - "quickwit/**" - - "!quickwit/quickwit-ui/**" - # For security reasons (to make sure the list of allowed users is - # trusted), make sure we run the workflow definition from the base - # commit of the pull request. - pull_request_target: - -# This is required for github.rest.issues.createComment. -permissions: - issues: write - pull-requests: write - -env: - RUSTFLAGS: --cfg tokio_unstable - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - tests: - name: Benchmark - # The self-hosted runner must have the system deps installed for QW and - # the benchmark, because we don't have root access. - runs-on: self-hosted - timeout-minutes: 60 - steps: - - name: Set authorized users - id: authorized-users - # List of users allowed to trigger this workflow. - # Because it executes code on a self-hosted runner, it must be restricted to trusted users. - run: | - echo 'users=["ddelemeny", "fmassot", "fulmicoton", "guilload", "PSeitz", "rdettai", "trinity-1686a"]' >> $GITHUB_OUTPUT - - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - if: contains(fromJSON(steps.authorized-users.outputs.users), github.actor) && github.event_name == 'pull_request_target' - name: Checkout quickwit (pull request commit) - with: - repository: quickwit-oss/quickwit - ref: ${{ github.event.pull_request.head.sha }} - path: ./quickwit - - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - if: contains(fromJSON(steps.authorized-users.outputs.users), github.actor) && github.event_name != 'pull_request_target' - name: Checkout quickwit - with: - repository: quickwit-oss/quickwit - ref: ${{ github.sha }} - path: ./quickwit - - - name: Checkout benchmarking code - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - if: contains(fromJSON(steps.authorized-users.outputs.users), github.actor) - with: - repository: quickwit-oss/benchmarks - ref: main - path: ./benchmarks - - - name: Install Rust - run: rustup update stable - - - name: Install protoc - uses: taiki-e/install-action@41ef8c65f4034ff24ab1cc2cef52f3000bcf9523 # v2.62.40 - with: - tool: protoc - - # We don't use rust-cache as it requires root access on the self-hosted runner, which we don't have. - - name: cargo build - if: contains(fromJSON(steps.authorized-users.outputs.users), github.actor) - run: cargo build --release --bin quickwit - working-directory: ./quickwit/quickwit - - name: Compile qbench - if: contains(fromJSON(steps.authorized-users.outputs.users), github.actor) - run: cargo build --release - working-directory: ./benchmarks/qbench - - name: Run Benchmark on SSD - if: contains(fromJSON(steps.authorized-users.outputs.users), github.actor) - id: bench-run-ssd - run: python3 ./run.py --search-only --storage pd-ssd --engine quickwit --track generated-logs --tags "${{ github.event_name }}_${{ github.ref_name }}" --manage-engine --source github_workflow --binary-path ../quickwit/quickwit/target/release/quickwit --instance "{autodetect_gcp}" --export-to-endpoint=https://qw-benchmarks.104.155.161.122.nip.io --engine-data-dir "{qwdata_local}" --github-workflow-user "${{ github.actor }}" --github-workflow-run-id "${{ github.run_id }}" --comparison-reference-tag="push_main" --github-pr "${{ github.event_name == 'pull_request_target' && github.event.number || 0 }}" --comparison-reference-commit "${{ github.event_name == 'pull_request_target' && github.sha || github.event.before }}" --write-exported-run-url-to-file $GITHUB_OUTPUT - working-directory: ./benchmarks - - name: Run Benchmark on cloud storage - if: contains(fromJSON(steps.authorized-users.outputs.users), github.actor) - id: bench-run-cloud-storage - run: python3 ./run.py --search-only --storage gcs --engine quickwit --track generated-logs --tags "${{ github.event_name }}_${{ github.ref_name }}" --manage-engine --source github_workflow --binary-path ../quickwit/quickwit/target/release/quickwit --instance "{autodetect_gcp}" --export-to-endpoint=https://qw-benchmarks.104.155.161.122.nip.io --engine-data-dir "{qwdata_gcs}" --engine-config-file engines/quickwit/configs/cbench_quickwit_gcs.yaml --github-workflow-user "${{ github.actor }}" --github-workflow-run-id "${{ github.run_id }}" --comparison-reference-tag="push_main" --github-pr "${{ github.event_name == 'pull_request_target' && github.event.number || 0 }}" --comparison-reference-commit "${{ github.event_name == 'pull_request_target' && github.sha || github.event.before }}" --write-exported-run-url-to-file $GITHUB_OUTPUT - working-directory: ./benchmarks - - name: Show results links - if: contains(fromJSON(steps.authorized-users.outputs.users), github.actor) - run: | - echo "::notice title=Benchmark Results on SSD::${{ steps.bench-run-ssd.outputs.url }}" - echo "::notice title=Comparison of results on SSD::${{ steps.bench-run-ssd.outputs.comparison_text }}" - echo "::notice title=Benchmark Results on Cloud Storage::${{ steps.bench-run-cloud-storage.outputs.url }}" - echo "::notice title=Comparison of results on Cloud Storage::${{ steps.bench-run-cloud-storage.outputs.comparison_text }}" - - name: In case of auth error - if: ${{ ! contains(fromJSON(steps.authorized-users.outputs.users), github.actor) }} - run: | - echo "::error title=User not allowed to run the benchmark::User must be in list ${{ steps.authorized-users.outputs.users }}" - - name: Add a PR comment with comparison results - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 - if: contains(fromJSON(steps.authorized-users.outputs.users), github.actor) && github.event_name == 'pull_request_target' - # Inspired from: https://github.com/actions/github-script/blob/60a0d83039c74a4aee543508d2ffcb1c3799cdea/.github/workflows/pull-request-test.yml - with: - script: | - // Get the existing comments. - const {data: comments} = await github.rest.issues.listComments({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.payload.number, - }) - - // Find any comment already made by the bot to update it. - const botComment = comments.find(comment => comment.user.id === 41898282) - const commentBody = "### On SSD:\n${{ steps.bench-run-ssd.outputs.comparison_text }}\n### On GCS:\n${{ steps.bench-run-cloud-storage.outputs.comparison_text }}\n" - if (botComment) { - // Update existing comment. - await github.rest.issues.updateComment({ - owner: context.repo.owner, - repo: context.repo.repo, - comment_id: botComment.id, - body: commentBody - }) - } else { - // New comment. - await github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.payload.number, - body: commentBody - }) - } diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 44aa8a73e54..bb28d31d81d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,7 +31,7 @@ jobs: tests: name: Unit tests runs-on: "ubuntu-latest" - timeout-minutes: 40 + timeout-minutes: 60 permissions: contents: read actions: write @@ -68,15 +68,11 @@ jobs: - quickwit/**/*.proto - quickwit/rest-api-tests/** - .github/workflows/ci.yml - # The following step is just meant to install rustup actually. - # The next one installs the correct toolchain. - - name: Install rustup - if: steps.modified.outputs.rust_src == 'true' - run: curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain none -y - name: Setup stable Rust Toolchain if: steps.modified.outputs.rust_src == 'true' - run: rustup show active-toolchain || rustup toolchain install - working-directory: ./quickwit + uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master + with: + toolchain: stable - name: Setup cache uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 if: steps.modified.outputs.rust_src == 'true' @@ -98,7 +94,7 @@ jobs: - name: Install python packages if: always() && steps.modified.outputs.rust_src == 'true' run: | - pip install --user pipenv==2025.0.4 + pip install --user --require-hashes -r ${{ github.workspace }}/.github/workflows/requirements.txt pipenv install --deploy --ignore-pipfile working-directory: ./quickwit/rest-api-tests - name: Run REST API tests @@ -109,7 +105,7 @@ jobs: lints: name: Lints runs-on: "ubuntu-latest" - timeout-minutes: 20 + timeout-minutes: 60 permissions: contents: read actions: write @@ -126,17 +122,18 @@ jobs: - .github/workflows/ci.yml - name: Install Ubuntu packages if: always() && steps.modified.outputs.rust_src == 'true' - run: sudo apt-get -y install protobuf-compiler python3 python3-pip - - name: Install rustup - if: steps.modified.outputs.rust_src == 'true' - run: curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain none -y + run: sudo apt-get -y install protobuf-compiler - name: Setup nightly Rust Toolchain (for rustfmt) if: steps.modified.outputs.rust_src == 'true' - run: rustup toolchain install nightly + uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master + with: + toolchain: nightly + components: rustfmt - name: Setup stable Rust Toolchain if: steps.modified.outputs.rust_src == 'true' - run: rustup show active-toolchain || rustup toolchain install - working-directory: ./quickwit + uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master + with: + toolchain: stable - name: Setup cache if: steps.modified.outputs.rust_src == 'true' uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 @@ -178,7 +175,9 @@ jobs: steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - name: Install Rust toolchain - uses: dtolnay/rust-toolchain@5d458579430fc14a04a08a1e7d3694f545e91ce6 # stable + uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master + with: + toolchain: stable - name: Cache cargo tools uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 29a9a250b50..58049144a0e 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -133,7 +133,7 @@ jobs: - name: Install python packages run: | - pip install --user pipenv==2025.0.4 + pip install --user --require-hashes -r ${{ github.workspace }}/.github/workflows/requirements.txt pipenv install --deploy --ignore-pipfile working-directory: ./quickwit/quickwit-cli/tests diff --git a/.github/workflows/requirements.txt b/.github/workflows/requirements.txt new file mode 100644 index 00000000000..a69055692f9 --- /dev/null +++ b/.github/workflows/requirements.txt @@ -0,0 +1,22 @@ +# contains pinned dependencies for installing pipenv to ensure repeatable builds in CI/CD workflows +certifi==2025.10.5 \ + --hash=sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de \ + --hash=sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43 +distlib==0.4.0 \ + --hash=sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16 \ + --hash=sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d +filelock==3.20.0 \ + --hash=sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2 \ + --hash=sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4 +packaging==25.0 \ + --hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484 \ + --hash=sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f +pipenv==2025.0.4 \ + --hash=sha256:36fc2a7841ccdb2f58a9f787b296c2e15dea3b5b79b84d4071812f28b7e8d7a2 \ + --hash=sha256:e1fbe4cfd25ab179f123d1fbb1fa1cdc0b3ffcdb1f21c775dcaa12ccc356f2bb +platformdirs==4.5.0 \ + --hash=sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312 \ + --hash=sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3 +virtualenv==20.35.4 \ + --hash=sha256:643d3914d73d3eeb0c552cbb12d7e82adf0e504dbf86a3182f8771a153a1971c \ + --hash=sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml new file mode 100644 index 00000000000..204e856e85d --- /dev/null +++ b/.github/workflows/scorecard.yml @@ -0,0 +1,50 @@ +name: OpenSSF Scorecard +on: + schedule: + - cron: '0 0 * * 0' + push: + branches: + - main + +permissions: + contents: read + +jobs: + analysis: + name: Scorecards analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results + id-token: write + actions: read + contents: read + + steps: + - name: 'Checkout code' + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + persist-credentials: false + + - name: 'Run analysis' + uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3 + with: + results_file: results.sarif + results_format: sarif + repo_token: ${{ secrets.GITHUB_TOKEN }} + publish_results: true + + # Upload the results as artifacts. + - name: 'Upload artifact' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: 'Upload to code-scanning' + uses: github/codeql-action/upload-sarif@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0 + with: + sarif_file: results.sarif diff --git a/.github/workflows/ui-ci.yml b/.github/workflows/ui-ci.yml index 5ef0a777f0f..e47ce106e4c 100644 --- a/.github/workflows/ui-ci.yml +++ b/.github/workflows/ui-ci.yml @@ -31,7 +31,6 @@ jobs: - name: Cypress run command: | sudo apt-get -y install protobuf-compiler - rustup show active-toolchain || rustup toolchain install CI=false yarn --cwd quickwit-ui build RUSTFLAGS="--cfg tokio_unstable" cargo build --features=postgres mkdir qwdata @@ -39,6 +38,8 @@ jobs: yarn --cwd quickwit-ui cypress run - name: Lint command: yarn --cwd quickwit-ui lint + - name: Check formatting + command: yarn --cwd quickwit-ui check-formatting - name: Unit Test command: yarn --cwd quickwit-ui test services: @@ -70,8 +71,10 @@ jobs: node-version: 20 cache: "yarn" cache-dependency-path: quickwit/quickwit-ui/yarn.lock - - name: Install rustup - run: curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain none -y + - name: Setup stable Rust Toolchain + uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master + with: + toolchain: stable - name: Install JS dependencies run: yarn --cwd quickwit-ui install working-directory: ./quickwit diff --git a/Dockerfile b/Dockerfile index 3962d1b3d43..d69a412a821 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ RUN touch .gitignore_for_build_directory \ && NODE_ENV=production make install build -FROM rust:bookworm@sha256:3914072ca0c3b8aad871db9169a651ccfce30cf58303e5d6f2db16d1d8a7e58f AS bin-builder +FROM rust:bookworm@sha256:b5efaabfd787a695d2e46b37d3d9c54040e11f4c10bc2e714bbadbfcc0cd6c39 AS bin-builder ARG CARGO_FEATURES=release-feature-set ARG CARGO_PROFILE=release diff --git a/README.md b/README.md index dd546c9eb84..2c8c7315ed7 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@ [![CI](https://github.com/quickwit-oss/quickwit/actions/workflows/ci.yml/badge.svg)](https://github.com/quickwit-oss/quickwit/actions?query=workflow%3ACI+branch%3Amain) [![codecov](https://codecov.io/gh/quickwit-oss/quickwit/branch/main/graph/badge.svg?token=06SRGAV5SS)](https://codecov.io/gh/quickwit-oss/quickwit) +[![OpenSSF Scorecard](https://api.scorecard.dev/projects/github.com/quickwit-oss/quickwit/badge)](https://scorecard.dev/viewer/?uri=github.com/quickwit-oss/quickwit) [![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.0-4baaaa.svg)](CODE_OF_CONDUCT.md) [![License: Apache 2.0](https://img.shields.io/badge/license-Apache%202.0-blue?style=flat-square)](LICENSE) [![Twitter Follow](https://img.shields.io/twitter/follow/Quickwit_Inc?color=%231DA1F2&logo=Twitter&style=plastic)](https://twitter.com/Quickwit_Inc) diff --git a/quickwit/Cargo.lock b/quickwit/Cargo.lock index f5cd19ec89e..8e66e71046b 100644 --- a/quickwit/Cargo.lock +++ b/quickwit/Cargo.lock @@ -5682,7 +5682,7 @@ checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e" [[package]] name = "ownedbytes" version = "0.9.0" -source = "git+https://github.com/quickwit-oss/tantivy/?rev=dabcaa5#dabcaa58093a3f7f10e98a5a3b06cfe2370482f9" +source = "git+https://github.com/SekoiaLab/tantivy/?rev=bbdf83e#bbdf83e82e6ddc334727c35df6cc55452319ccc6" dependencies = [ "stable_deref_trait", ] @@ -7164,6 +7164,7 @@ dependencies = [ "quickwit-serve", "quickwit-storage", "rand 0.8.5", + "rdkafka", "reqwest 0.12.23", "rustls 0.23.31", "serde_json", @@ -9570,7 +9571,7 @@ checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" [[package]] name = "tantivy" version = "0.25.0" -source = "git+https://github.com/quickwit-oss/tantivy/?rev=dabcaa5#dabcaa58093a3f7f10e98a5a3b06cfe2370482f9" +source = "git+https://github.com/SekoiaLab/tantivy/?rev=bbdf83e#bbdf83e82e6ddc334727c35df6cc55452319ccc6" dependencies = [ "aho-corasick", "arc-swap", @@ -9625,7 +9626,7 @@ dependencies = [ [[package]] name = "tantivy-bitpacker" version = "0.9.0" -source = "git+https://github.com/quickwit-oss/tantivy/?rev=dabcaa5#dabcaa58093a3f7f10e98a5a3b06cfe2370482f9" +source = "git+https://github.com/SekoiaLab/tantivy/?rev=bbdf83e#bbdf83e82e6ddc334727c35df6cc55452319ccc6" dependencies = [ "bitpacking", ] @@ -9633,7 +9634,7 @@ dependencies = [ [[package]] name = "tantivy-columnar" version = "0.6.0" -source = "git+https://github.com/quickwit-oss/tantivy/?rev=dabcaa5#dabcaa58093a3f7f10e98a5a3b06cfe2370482f9" +source = "git+https://github.com/SekoiaLab/tantivy/?rev=bbdf83e#bbdf83e82e6ddc334727c35df6cc55452319ccc6" dependencies = [ "downcast-rs", "fastdivide", @@ -9648,7 +9649,7 @@ dependencies = [ [[package]] name = "tantivy-common" version = "0.10.0" -source = "git+https://github.com/quickwit-oss/tantivy/?rev=dabcaa5#dabcaa58093a3f7f10e98a5a3b06cfe2370482f9" +source = "git+https://github.com/SekoiaLab/tantivy/?rev=bbdf83e#bbdf83e82e6ddc334727c35df6cc55452319ccc6" dependencies = [ "async-trait", "byteorder", @@ -9671,7 +9672,7 @@ dependencies = [ [[package]] name = "tantivy-query-grammar" version = "0.25.0" -source = "git+https://github.com/quickwit-oss/tantivy/?rev=dabcaa5#dabcaa58093a3f7f10e98a5a3b06cfe2370482f9" +source = "git+https://github.com/SekoiaLab/tantivy/?rev=bbdf83e#bbdf83e82e6ddc334727c35df6cc55452319ccc6" dependencies = [ "fnv", "nom 7.1.3", @@ -9683,7 +9684,7 @@ dependencies = [ [[package]] name = "tantivy-sstable" version = "0.6.0" -source = "git+https://github.com/quickwit-oss/tantivy/?rev=dabcaa5#dabcaa58093a3f7f10e98a5a3b06cfe2370482f9" +source = "git+https://github.com/SekoiaLab/tantivy/?rev=bbdf83e#bbdf83e82e6ddc334727c35df6cc55452319ccc6" dependencies = [ "futures-util", "itertools 0.14.0", @@ -9696,7 +9697,7 @@ dependencies = [ [[package]] name = "tantivy-stacker" version = "0.6.0" -source = "git+https://github.com/quickwit-oss/tantivy/?rev=dabcaa5#dabcaa58093a3f7f10e98a5a3b06cfe2370482f9" +source = "git+https://github.com/SekoiaLab/tantivy/?rev=bbdf83e#bbdf83e82e6ddc334727c35df6cc55452319ccc6" dependencies = [ "murmurhash32", "rand_distr", @@ -9706,7 +9707,7 @@ dependencies = [ [[package]] name = "tantivy-tokenizer-api" version = "0.6.0" -source = "git+https://github.com/quickwit-oss/tantivy/?rev=dabcaa5#dabcaa58093a3f7f10e98a5a3b06cfe2370482f9" +source = "git+https://github.com/SekoiaLab/tantivy/?rev=bbdf83e#bbdf83e82e6ddc334727c35df6cc55452319ccc6" dependencies = [ "serde", ] diff --git a/quickwit/Cargo.toml b/quickwit/Cargo.toml index 89e1429d9ab..e7ed41793c3 100644 --- a/quickwit/Cargo.toml +++ b/quickwit/Cargo.toml @@ -346,7 +346,7 @@ quickwit-serve = { path = "quickwit-serve" } quickwit-storage = { path = "quickwit-storage" } quickwit-telemetry = { path = "quickwit-telemetry" } -tantivy = { git = "https://github.com/quickwit-oss/tantivy/", rev = "dabcaa5", default-features = false, features = [ +tantivy = { git = "https://github.com/SekoiaLab/tantivy/", rev = "bbdf83e", default-features = false, features = [ "lz4-compression", "mmap", "quickwit", diff --git a/quickwit/quickwit-cli/src/index.rs b/quickwit/quickwit-cli/src/index.rs index 6bdbfa86688..7eb6daeaddd 100644 --- a/quickwit/quickwit-cli/src/index.rs +++ b/quickwit/quickwit-cli/src/index.rs @@ -1266,7 +1266,7 @@ mod test { let mut split_4 = template_split; split_4.split_metadata = split_metadata_4; - let splits = vec![split_1, split_2, split_3, split_4]; + let splits = [split_1, split_2, split_3, split_4]; let splits_num_docs = splits .iter() diff --git a/quickwit/quickwit-cli/src/main.rs b/quickwit/quickwit-cli/src/main.rs index 3c6d6bc5daf..01b37a34266 100644 --- a/quickwit/quickwit-cli/src/main.rs +++ b/quickwit/quickwit-cli/src/main.rs @@ -133,7 +133,6 @@ fn about_text() -> String { #[cfg(test)] mod tests { - use std::path::PathBuf; use std::str::FromStr; use std::time::Duration; @@ -683,7 +682,7 @@ mod tests { split_id, target_dir, .. - })) if &index_id == "wikipedia" && &split_id == "ABC" && target_dir == PathBuf::from("datadir") + })) if &index_id == "wikipedia" && &split_id == "ABC" && target_dir == *"datadir" )); Ok(()) } diff --git a/quickwit/quickwit-cli/src/tool.rs b/quickwit/quickwit-cli/src/tool.rs index da5521c0e8c..d32db8a9e45 100644 --- a/quickwit/quickwit-cli/src/tool.rs +++ b/quickwit/quickwit-cli/src/tool.rs @@ -552,6 +552,7 @@ pub async fn local_search_cli(args: LocalSearchArgs) -> anyhow::Result<()> { sort_by, count_all: CountHits::CountAll, allow_failed_splits: false, + split_id: None, }; let search_request = search_request_from_api_request(vec![args.index_id], search_request_query_string)?; diff --git a/quickwit/quickwit-config/src/node_config/mod.rs b/quickwit/quickwit-config/src/node_config/mod.rs index bb8a17daaeb..ae46c331a01 100644 --- a/quickwit/quickwit-config/src/node_config/mod.rs +++ b/quickwit/quickwit-config/src/node_config/mod.rs @@ -268,6 +268,7 @@ pub struct SearcherConfig { pub split_footer_cache_capacity: ByteSize, pub partial_request_cache_capacity: ByteSize, pub max_num_concurrent_split_searches: usize, + pub max_total_split_searches: Option, // Deprecated: stream search requests are no longer supported. #[serde(alias = "max_num_concurrent_split_streams", default, skip_serializing)] pub _max_num_concurrent_split_streams: Option, @@ -325,6 +326,7 @@ impl Default for SearcherConfig { split_footer_cache_capacity: ByteSize::mb(500), partial_request_cache_capacity: ByteSize::mb(64), max_num_concurrent_split_searches: 100, + max_total_split_searches: None, _max_num_concurrent_split_streams: None, aggregation_memory_limit: ByteSize::mb(500), aggregation_bucket_limit: 65000, diff --git a/quickwit/quickwit-config/src/node_config/serialize.rs b/quickwit/quickwit-config/src/node_config/serialize.rs index b5f39ceb0ac..b81e25390a6 100644 --- a/quickwit/quickwit-config/src/node_config/serialize.rs +++ b/quickwit/quickwit-config/src/node_config/serialize.rs @@ -662,6 +662,7 @@ mod tests { split_footer_cache_capacity: ByteSize::gb(1), partial_request_cache_capacity: ByteSize::mb(64), max_num_concurrent_split_searches: 150, + max_total_split_searches: None, _max_num_concurrent_split_streams: Some(serde::de::IgnoredAny), split_cache: None, request_timeout_secs: NonZeroU64::new(30).unwrap(), diff --git a/quickwit/quickwit-control-plane/src/indexing_scheduler/mod.rs b/quickwit/quickwit-control-plane/src/indexing_scheduler/mod.rs index 98651f809c4..f22ecd98ab0 100644 --- a/quickwit/quickwit-control-plane/src/indexing_scheduler/mod.rs +++ b/quickwit/quickwit-control-plane/src/indexing_scheduler/mod.rs @@ -51,6 +51,9 @@ pub(crate) const MIN_DURATION_BETWEEN_SCHEDULING: Duration = Duration::from_secs(30) }; +/// That's 80% of a pipeline capacity +const MAX_LOAD_PER_PIPELINE: CpuCapacity = CpuCapacity::from_cpu_millis(3_200); + #[derive(Debug, Clone, Default, Serialize)] pub struct IndexingSchedulerState { pub num_applied_physical_indexing_plan: usize, @@ -257,8 +260,12 @@ fn get_sources_to_schedule(model: &ControlPlaneModel) -> Vec { source_uid, source_type: SourceToScheduleType::NonSharded { num_pipelines: source_config.num_pipelines.get() as u32, - // FIXME - load_per_pipeline: NonZeroU32::new(PIPELINE_FULL_CAPACITY.cpu_millis()) + // FIXME: + // - implementing adaptative load contains the risk of generating + // rebalancing storms for sources like Kafka + // - this is coupled with the scheduling logic that misses the notion of + // pipeline + load_per_pipeline: NonZeroU32::new(MAX_LOAD_PER_PIPELINE.cpu_millis()) .unwrap(), }, params_fingerprint, @@ -981,7 +988,7 @@ mod tests { index_uid: IndexUid::for_test("index-2", 0), source_id: "source2".to_string(), }; - let sources = vec![ + let sources = [ SourceToSchedule { source_uid: source_1.clone(), source_type: SourceToScheduleType::NonSharded { diff --git a/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/README.md b/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/README.md index f4ec64010a9..58ff4ae62d0 100644 --- a/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/README.md +++ b/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/README.md @@ -15,8 +15,9 @@ We also want to observe some interesting properties such as: To simplify the logic and make it easier to test it, we first abstract this in the following optimization problem. In Quickwit, we have two types of source: -- The push api source: they have a given (changing) set of shards associated to them. - A shard is rate-limited to ensure their throughput is lower than `5MB/s` worth of +- The push api source: indexes have a given (changing) set of shards associated to them. + Shards are stored on indexer nodes and are spread randomly accross them. A shard is + rate-limited to ensure their throughput is lower than `5MB/s` worth of uncompressed data. This guarantees that a given shard can be indexed by a single indexing pipeline. diff --git a/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/mod.rs b/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/mod.rs index ca5ac4d1fc7..eb7875097e0 100644 --- a/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/mod.rs +++ b/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/mod.rs @@ -757,8 +757,8 @@ mod tests { convert_scheduling_solution_to_physical_plan_single_node_single_source, }; use crate::indexing_plan::PhysicalIndexingPlan; - use crate::indexing_scheduler::get_shard_locality_metrics; use crate::indexing_scheduler::scheduling::assign_shards; + use crate::indexing_scheduler::{MAX_LOAD_PER_PIPELINE, get_shard_locality_metrics}; use crate::model::ShardLocations; fn source_id() -> SourceUid { @@ -939,6 +939,146 @@ mod tests { } } + #[test] + fn test_build_physical_plan_with_pipeline_limit() { + let indexer1 = "indexer1".to_string(); + let indexer2 = "indexer2".to_string(); + let source_uid0 = source_id(); + let source_uid1 = source_id(); + let source_0 = SourceToSchedule { + source_uid: source_uid0.clone(), + source_type: SourceToScheduleType::Sharded { + shard_ids: (0..16).map(ShardId::from).collect(), + load_per_shard: NonZeroU32::new(800).unwrap(), + }, + params_fingerprint: 0, + }; + let source_1 = SourceToSchedule { + source_uid: source_uid1.clone(), + source_type: SourceToScheduleType::NonSharded { + num_pipelines: 4, + load_per_pipeline: NonZeroU32::new(MAX_LOAD_PER_PIPELINE.cpu_millis()).unwrap(), + }, + params_fingerprint: 0, + }; + let mut indexer_id_to_cpu_capacities = FnvHashMap::default(); + indexer_id_to_cpu_capacities.insert(indexer1.clone(), mcpu(16_000)); + indexer_id_to_cpu_capacities.insert(indexer2.clone(), mcpu(16_000)); + let shard_locations = ShardLocations::default(); + let indexing_plan = build_physical_indexing_plan( + &[source_0, source_1], + &indexer_id_to_cpu_capacities, + None, + &shard_locations, + ); + assert_eq!(indexing_plan.indexing_tasks_per_indexer().len(), 2); + + let node1_plan = indexing_plan.indexer(&indexer1).unwrap(); + let node2_plan = indexing_plan.indexer(&indexer2).unwrap(); + + let source_0_on_node1 = node1_plan + .iter() + .filter(|task| task.source_id == source_uid0.source_id) + .count(); + let source_0_on_node2 = node2_plan + .iter() + .filter(|task| task.source_id == source_uid0.source_id) + .count(); + assert!(source_0_on_node1 <= 3); + assert!(source_0_on_node2 <= 3); + assert_eq!(source_0_on_node1 + source_0_on_node2, 4); + + let source_1_on_node1 = node1_plan + .iter() + .filter(|task| task.source_id == source_uid1.source_id) + .count(); + let source_1_on_node2 = node2_plan + .iter() + .filter(|task| task.source_id == source_uid1.source_id) + .count(); + assert!(source_1_on_node1 <= 3); + assert!(source_1_on_node2 <= 3); + assert_eq!(source_1_on_node1 + source_1_on_node2, 4); + } + + #[test] + fn test_build_physical_plan_second_iteration() { + let indexer1 = "indexer1".to_string(); + let indexer2 = "indexer2".to_string(); + let indexer3 = "indexer3".to_string(); + let mut sources = Vec::new(); + for _ in 0..10 { + sources.push(SourceToSchedule { + source_uid: source_id(), + source_type: SourceToScheduleType::NonSharded { + num_pipelines: 4, + load_per_pipeline: NonZeroU32::new(MAX_LOAD_PER_PIPELINE.cpu_millis()).unwrap(), + }, + params_fingerprint: 0, + }); + } + let mut indexer_id_to_cpu_capacities = FnvHashMap::default(); + indexer_id_to_cpu_capacities.insert(indexer1.clone(), mcpu(16_000)); + indexer_id_to_cpu_capacities.insert(indexer2.clone(), mcpu(16_000)); + indexer_id_to_cpu_capacities.insert(indexer3.clone(), mcpu(16_000)); + let shard_locations = ShardLocations::default(); + let indexing_plan = build_physical_indexing_plan( + &sources, + &indexer_id_to_cpu_capacities, + None, + &shard_locations, + ); + assert_eq!(indexing_plan.indexing_tasks_per_indexer().len(), 3); + + for source in &sources { + let pipelines_per_indexer_for_source = indexing_plan + .indexing_tasks_per_indexer() + .values() + .map(|tasks| { + tasks + .iter() + .filter(|t| t.source_id == source.source_uid.source_id) + .count() + }) + .collect_vec(); + assert!(pipelines_per_indexer_for_source.contains(&3)); + assert!(pipelines_per_indexer_for_source.contains(&1)); + assert!(pipelines_per_indexer_for_source.contains(&0)); + assert_eq!(pipelines_per_indexer_for_source.iter().sum::(), 4); + } + + for source in &mut sources { + if let SourceToScheduleType::NonSharded { num_pipelines, .. } = &mut source.source_type + { + *num_pipelines = 5; + } + } + + let new_indexing_plan = build_physical_indexing_plan( + &sources, + &indexer_id_to_cpu_capacities, + Some(&indexing_plan), + &shard_locations, + ); + + for source in &sources { + let pipelines_per_indexer_for_source = new_indexing_plan + .indexing_tasks_per_indexer() + .values() + .map(|tasks| { + tasks + .iter() + .filter(|t| t.source_id == source.source_uid.source_id) + .count() + }) + .collect_vec(); + assert!(pipelines_per_indexer_for_source.contains(&3)); + assert!(pipelines_per_indexer_for_source.contains(&2)); + assert!(pipelines_per_indexer_for_source.contains(&0)); + assert_eq!(pipelines_per_indexer_for_source.iter().sum::(), 5); + } + } + fn make_indexing_tasks( source_uid: &SourceUid, shards: &[(PipelineUid, &[ShardId])], diff --git a/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/scheduling_logic.rs b/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/scheduling_logic.rs index a8a5c4f64c0..bb668d53204 100644 --- a/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/scheduling_logic.rs +++ b/quickwit/quickwit-control-plane/src/indexing_scheduler/scheduling/scheduling_logic.rs @@ -12,14 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -use std::cmp::Reverse; +use std::cmp::{Ordering, Reverse}; use std::collections::BTreeMap; use std::collections::btree_map::Entry; -use itertools::Itertools; use quickwit_proto::indexing::CpuCapacity; use super::scheduling_logic_model::*; +use crate::indexing_scheduler::MAX_LOAD_PER_PIPELINE; use crate::indexing_scheduler::scheduling::inflate_node_capacities_if_necessary; // ------------------------------------------------------------------------------------ @@ -229,6 +229,44 @@ fn assert_enforce_nodes_cpu_capacity_post_condition( // If this algorithm fails to place all remaining shards, we inflate // the node capacities by 20% in the scheduling problem and start from the beginning. +#[derive(Debug, PartialEq, Eq)] +struct PlacementCandidate { + indexer_ord: IndexerOrd, + current_num_shards: u32, + available_capacity: CpuCapacity, + affinity: u32, +} + +impl Ord for PlacementCandidate { + fn cmp(&self, other: &Self) -> Ordering { + // Higher affinity is better + match self.affinity.cmp(&other.affinity) { + Ordering::Equal => {} + ordering => return ordering.reverse(), + } + // If tie, pick the node with shards already assigned first + let current_shard_presence = self.current_num_shards.clamp(0, 1); + let other_shard_presence = other.current_num_shards.clamp(0, 1); + match current_shard_presence.cmp(&other_shard_presence) { + Ordering::Equal => {} + ordering => return ordering.reverse(), + } + // If tie, pick the node with the highest available capacity + match self.available_capacity.cmp(&other.available_capacity) { + Ordering::Equal => {} + ordering => return ordering.reverse(), + } + // Final tie-breaker: indexer ID for deterministic ordering + self.indexer_ord.cmp(&other.indexer_ord).reverse() + } +} + +impl PartialOrd for PlacementCandidate { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + fn attempt_place_unassigned_shards( unassigned_shards: &[Source], problem: &SchedulingProblem, @@ -236,12 +274,27 @@ fn attempt_place_unassigned_shards( ) -> Result { let mut solution = partial_solution.clone(); for source in unassigned_shards { - let indexers_with_most_available_capacity = - compute_indexer_available_capacity(problem, &solution) - .sorted_by_key(|(indexer_ord, capacity)| Reverse((*capacity, *indexer_ord))); + let mut placements: Vec = solution + .indexer_assignments + .iter() + .map(|indexer_assignment: &IndexerAssignment| { + let available_capacity = indexer_assignment.indexer_available_capacity(problem); + assert!(available_capacity >= 0i32); + let available_capacity = CpuCapacity::from_cpu_millis(available_capacity as u32); + let current_num_shards = indexer_assignment.num_shards(source.source_ord); + PlacementCandidate { + affinity: 0, + current_num_shards, + available_capacity, + indexer_ord: indexer_assignment.indexer_ord, + } + }) + .collect(); + placements.sort(); place_unassigned_shards_single_source( source, - indexers_with_most_available_capacity, + &placements, + problem.num_indexers(), &mut solution, )?; } @@ -259,25 +312,29 @@ fn place_unassigned_shards_with_affinity( Reverse(load) }); for source in &unassigned_shards { - // List of indexer with a non-null affinity and some available capacity, sorted by - // (affinity, available capacity) in that order. - let indexers_with_affinity_and_available_capacity = source + let mut placements: Vec = source .affinities .iter() .filter(|&(_, &affinity)| affinity != 0u32) - .map(|(&indexer_ord, affinity)| { + .map(|(&indexer_ord, &affinity)| { let available_capacity = solution.indexer_assignments[indexer_ord].indexer_available_capacity(problem); - let capacity = CpuCapacity::from_cpu_millis(available_capacity as u32); - (indexer_ord, affinity, capacity) - }) - .sorted_by_key(|(indexer_ord, affinity, capacity)| { - Reverse((*affinity, *capacity, *indexer_ord)) + let available_capacity = CpuCapacity::from_cpu_millis(available_capacity as u32); + let current_num_shards = + solution.indexer_assignments[indexer_ord].num_shards(source.source_ord); + PlacementCandidate { + affinity, + current_num_shards, + available_capacity, + indexer_ord, + } }) - .map(|(indexer_ord, _, capacity)| (indexer_ord, capacity)); + .collect(); + placements.sort(); let _ = place_unassigned_shards_single_source( source, - indexers_with_affinity_and_available_capacity, + &placements, + problem.num_indexers(), solution, ); } @@ -350,22 +407,40 @@ struct NotEnoughCapacity; /// amongst the node with their given node capacity. fn place_unassigned_shards_single_source( source: &Source, - mut indexer_with_capacities: impl Iterator, + sorted_candidates: &[PlacementCandidate], + num_indexers: usize, solution: &mut SchedulingSolution, ) -> Result<(), NotEnoughCapacity> { let mut num_shards = source.num_shards; - while num_shards > 0 { - let Some((indexer_ord, available_capacity)) = indexer_with_capacities.next() else { - return Err(NotEnoughCapacity); - }; - let num_placable_shards = available_capacity.cpu_millis() / source.load_per_shard; - let num_shards_to_place = num_placable_shards.min(num_shards); + // To ensure that merges can keep up, we try not to assign more than 3 + // pipelines per indexer for a source (except if there aren't enough nodes). + let target_limit_num_shards_per_indexer_per_source = + 3 * MAX_LOAD_PER_PIPELINE.cpu_millis() / source.load_per_shard.get(); + let limit_num_shards_per_indexer_per_source = target_limit_num_shards_per_indexer_per_source + .max(num_shards.div_ceil(num_indexers as u32)); + for PlacementCandidate { + indexer_ord, + available_capacity, + current_num_shards, + .. + } in sorted_candidates + { + let num_placable_shards_for_available_capacity = + available_capacity.cpu_millis() / source.load_per_shard; + let num_placable_shards_for_limit = + limit_num_shards_per_indexer_per_source.saturating_sub(*current_num_shards); + let num_shards_to_place = num_shards + .min(num_placable_shards_for_available_capacity) + .min(num_placable_shards_for_limit); // Update the solution, the shard load, and the number of shards to place. - solution.indexer_assignments[indexer_ord] + solution.indexer_assignments[*indexer_ord] .add_shards(source.source_ord, num_shards_to_place); num_shards -= num_shards_to_place; + if num_shards == 0 { + return Ok(()); + } } - Ok(()) + Err(NotEnoughCapacity) } /// Compute the sources/shards that have not been assigned to any indexer yet. @@ -394,30 +469,11 @@ fn compute_unassigned_sources( unassigned_sources.into_values().collect() } -/// Builds a BinaryHeap with the different indexer capacities. -/// -/// Panics if one of the indexer is over-assigned. -fn compute_indexer_available_capacity<'a>( - problem: &'a SchedulingProblem, - solution: &'a SchedulingSolution, -) -> impl Iterator + 'a { - solution - .indexer_assignments - .iter() - .map(|indexer_assignment| { - let available_capacity: i32 = indexer_assignment.indexer_available_capacity(problem); - assert!(available_capacity >= 0i32); - ( - indexer_assignment.indexer_ord, - CpuCapacity::from_cpu_millis(available_capacity as u32), - ) - }) -} - #[cfg(test)] mod tests { use std::num::NonZeroU32; + use itertools::Itertools; use proptest::prelude::*; use quickwit_proto::indexing::mcpu; @@ -602,6 +658,27 @@ mod tests { assert_eq!(solution.indexer_assignments[1].num_shards(0), 4); } + #[test] + fn test_placement_limit_with_affinity() { + let mut problem = + SchedulingProblem::with_indexer_cpu_capacities(vec![mcpu(16_000), mcpu(16_000)]); + let max_load_per_pipeline = NonZeroU32::new(MAX_LOAD_PER_PIPELINE.cpu_millis()).unwrap(); + problem.add_source(4, max_load_per_pipeline); + problem.add_source(4, max_load_per_pipeline); + problem.inc_affinity(0, 1); + problem.inc_affinity(0, 1); + problem.inc_affinity(0, 0); + problem.inc_affinity(1, 0); + let mut solution = problem.new_solution(); + place_unassigned_shards_with_affinity(&problem, &mut solution); + assert_eq!(solution.indexer_assignments[0].num_shards(1), 3); + assert_eq!(solution.indexer_assignments[0].num_shards(0), 1); + assert_eq!(solution.indexer_assignments[1].num_shards(0), 3); + // one shard was not placed because indexer 0 was full and it had no + // affinity with indexer 1 + assert_eq!(solution.indexer_assignments[1].num_shards(1), 0); + } + #[test] fn test_place_unassigned_shards_reach_capacity() { let mut problem = @@ -807,4 +884,35 @@ mod tests { assert_eq!(solution.capacity_scaling_iterations, 1); } + + #[test] + fn test_shard_fragmentation_when_iterating() { + // Create a problem where affinity constraints cause suboptimal placement + // requiring iterative scaling despite initial capacity scaling. + let mut problem = + SchedulingProblem::with_indexer_cpu_capacities(vec![mcpu(3000), mcpu(3000)]); + problem.add_source(1, NonZeroU32::new(1000).unwrap()); + problem.add_source(1, NonZeroU32::new(1000).unwrap()); + problem.add_source(1, NonZeroU32::new(1000).unwrap()); + let empty_solution = problem.new_solution(); + let first_solution = solve(problem, empty_solution); + + let mut updated_problem = + SchedulingProblem::with_indexer_cpu_capacities(vec![mcpu(3000), mcpu(3000)]); + updated_problem.add_source(2, NonZeroU32::new(1000).unwrap()); + updated_problem.add_source(2, NonZeroU32::new(1000).unwrap()); + updated_problem.add_source(2, NonZeroU32::new(1000).unwrap()); + + let second_solution = solve(updated_problem, first_solution); + + for source in 0..2 { + let num_shards_per_indexer = second_solution + .indexer_assignments + .iter() + .map(|indexer_assignment| indexer_assignment.num_shards(source)) + .collect_vec(); + assert!(num_shards_per_indexer.contains(&2)); + assert!(num_shards_per_indexer.contains(&0)); + } + } } diff --git a/quickwit/quickwit-control-plane/src/model/mod.rs b/quickwit/quickwit-control-plane/src/model/mod.rs index b277e61455d..688654b99ec 100644 --- a/quickwit/quickwit-control-plane/src/model/mod.rs +++ b/quickwit/quickwit-control-plane/src/model/mod.rs @@ -160,7 +160,7 @@ impl ControlPlaneModel { self.index_table.get(index_uid) } - pub fn source_metadata(&self, source_uid: &SourceUid) -> Option<&SourceConfig> { + pub(crate) fn source_metadata(&self, source_uid: &SourceUid) -> Option<&SourceConfig> { self.index_metadata(&source_uid.index_uid)? .sources .get(&source_uid.source_id) diff --git a/quickwit/quickwit-indexing/src/actors/indexing_service.rs b/quickwit/quickwit-indexing/src/actors/indexing_service.rs index 5d307fb200b..10e2e692beb 100644 --- a/quickwit/quickwit-indexing/src/actors/indexing_service.rs +++ b/quickwit/quickwit-indexing/src/actors/indexing_service.rs @@ -31,7 +31,7 @@ use quickwit_common::io::Limiter; use quickwit_common::pubsub::EventBroker; use quickwit_common::{io, temp_dir}; use quickwit_config::{ - INGEST_API_SOURCE_ID, IndexConfig, IndexerConfig, SourceConfig, build_doc_mapper, + INGEST_API_SOURCE_ID, IndexConfig, IndexerConfig, SourceConfig, SourceParams, build_doc_mapper, indexing_pipeline_params_fingerprint, }; use quickwit_ingest::{ @@ -289,8 +289,26 @@ impl IndexingService { let message = format!("failed to spawn indexing pipeline: {error}"); IndexingError::Internal(message) })?; - let merge_policy = - crate::merge_policy::merge_policy_from_settings(&index_config.indexing_settings); + + let mut indexing_settings = index_config.indexing_settings.clone(); + if let SourceParams::Kafka(kafka_params) = &source_config.source_params { + if let Some(indexing_settings_value) = + kafka_params.client_params.get("indexing_settings") + { + if let Ok(indexing_pipeline) = + serde_json::from_value(indexing_settings_value.clone()) + { + indexing_settings = indexing_pipeline; + } else { + error!( + index_id = indexing_pipeline_id.index_uid.index_id, + source_id = indexing_pipeline_id.source_id, + "source level override of indexing_settings failed, deserialization error" + ); + } + } + } + let merge_policy = crate::merge_policy::merge_policy_from_settings(&indexing_settings); let retention_policy = index_config.retention_policy_opt.clone(); let split_store = IndexingSplitStore::new(storage.clone(), self.local_split_store.clone()); @@ -345,7 +363,7 @@ impl IndexingService { // Indexing-related parameters doc_mapper, indexing_directory, - indexing_settings: index_config.indexing_settings.clone(), + indexing_settings, split_store, max_concurrent_split_uploads_index, cooperative_indexing_permits: self.cooperative_indexing_permits.clone(), diff --git a/quickwit/quickwit-indexing/src/source/kafka_source.rs b/quickwit/quickwit-indexing/src/source/kafka_source.rs index bc5b400a9bc..5086616ebed 100644 --- a/quickwit/quickwit-indexing/src/source/kafka_source.rs +++ b/quickwit/quickwit-indexing/src/source/kafka_source.rs @@ -714,6 +714,10 @@ fn parse_client_params(client_params: JsonValue) -> anyhow::Result }; let mut client_config = ClientConfig::new(); for (key, value_json) in params { + if key == "indexing_settings" { + // used for QW per source settings override workaround + continue; + } let value = match value_json { JsonValue::Bool(value_bool) => value_bool.to_string(), JsonValue::Number(value_number) => value_number.to_string(), @@ -834,6 +838,7 @@ mod kafka_broker_tests { { let producer: &FutureProducer = &ClientConfig::new() .set("bootstrap.servers", "localhost:9092") + .set("broker.address.family", "v4") .set("statistics.interval.ms", "500") .set("api.version.request", "true") .set("debug", "all") @@ -1169,7 +1174,7 @@ mod kafka_broker_tests { #[tokio::test] async fn test_kafka_source_suggest_truncate() { let admin_client = create_admin_client(); - let topic = append_random_suffix("test-kafka-source--suggest-truncate--topic"); + let topic = append_random_suffix("test--source--suggest-truncate--topic"); create_topic(&admin_client, &topic, 2).await.unwrap(); let metastore = metastore_for_test(); diff --git a/quickwit/quickwit-ingest/src/ingest_v2/ingester.rs b/quickwit/quickwit-ingest/src/ingest_v2/ingester.rs index a88ca030e67..af8f7ece9f8 100644 --- a/quickwit/quickwit-ingest/src/ingest_v2/ingester.rs +++ b/quickwit/quickwit-ingest/src/ingest_v2/ingester.rs @@ -514,7 +514,7 @@ impl Ingester { continue; } let doc_mapper = shard.doc_mapper_opt.clone().expect("shard should be open"); - let validate_shard = shard.validate; + let validate_docs = shard.validate_docs; let follower_id_opt = shard.follower_id_opt().cloned(); let from_position_exclusive = shard.replication_position_inclusive.clone(); @@ -570,7 +570,7 @@ impl Ingester { // Total number of bytes (valid and invalid documents) let original_batch_num_bytes = doc_batch.num_bytes() as u64; - let (valid_doc_batch, parse_failures) = if validate_shard { + let (valid_doc_batch, parse_failures) = if validate_docs { validate_doc_batch(doc_batch, doc_mapper).await? } else { (doc_batch, Vec::new()) diff --git a/quickwit/quickwit-ingest/src/ingest_v2/models.rs b/quickwit/quickwit-ingest/src/ingest_v2/models.rs index 348c4aa3a1b..750570d55b9 100644 --- a/quickwit/quickwit-ingest/src/ingest_v2/models.rs +++ b/quickwit/quickwit-ingest/src/ingest_v2/models.rs @@ -52,7 +52,7 @@ pub(super) struct IngesterShard { pub doc_mapper_opt: Option>, /// Whether to validate documents in this shard. True if no preprocessing (VRL) will happen /// before indexing. - pub validate: bool, + pub validate_docs: bool, pub shard_status_tx: watch::Sender, pub shard_status_rx: watch::Receiver, /// Instant at which the shard was last written to. @@ -67,7 +67,7 @@ impl IngesterShard { truncation_position_inclusive: Position, doc_mapper: Arc, now: Instant, - validate: bool, + validate_docs: bool, ) -> Self { let shard_status = (shard_state, replication_position_inclusive.clone()); let (shard_status_tx, shard_status_rx) = watch::channel(shard_status); @@ -78,7 +78,7 @@ impl IngesterShard { truncation_position_inclusive, is_advertisable: false, doc_mapper_opt: Some(doc_mapper), - validate, + validate_docs, shard_status_tx, shard_status_rx, last_write_instant: now, @@ -103,7 +103,7 @@ impl IngesterShard { // anyway. is_advertisable: false, doc_mapper_opt: None, - validate: false, + validate_docs: false, shard_status_tx, shard_status_rx, last_write_instant: now, @@ -116,7 +116,7 @@ impl IngesterShard { truncation_position_inclusive: Position, doc_mapper_opt: Option>, now: Instant, - validate: bool, + validate_docs: bool, ) -> Self { let shard_status = (shard_state, replication_position_inclusive.clone()); let (shard_status_tx, shard_status_rx) = watch::channel(shard_status); @@ -127,7 +127,7 @@ impl IngesterShard { truncation_position_inclusive, is_advertisable: false, doc_mapper_opt, - validate, + validate_docs, shard_status_tx, shard_status_rx, last_write_instant: now, diff --git a/quickwit/quickwit-integration-tests/Cargo.toml b/quickwit/quickwit-integration-tests/Cargo.toml index 55308cff556..46c645ec10b 100644 --- a/quickwit/quickwit-integration-tests/Cargo.toml +++ b/quickwit/quickwit-integration-tests/Cargo.toml @@ -11,6 +11,9 @@ authors.workspace = true license.workspace = true [features] +kafka-broker-tests = [ + "quickwit-indexing/kafka", +] sqs-localstack-tests = [ "quickwit-indexing/sqs", "quickwit-indexing/sqs-localstack-tests", @@ -26,6 +29,7 @@ hyper = { workspace = true } hyper-util = { workspace = true } itertools = { workspace = true } rand = { workspace = true } +rdkafka = { workspace = true } reqwest = { workspace = true } rustls = { workspace = true } serde_json = { workspace = true } diff --git a/quickwit/quickwit-integration-tests/src/test_utils/cluster_sandbox.rs b/quickwit/quickwit-integration-tests/src/test_utils/cluster_sandbox.rs index 36fbadbbcea..b957bd8e348 100644 --- a/quickwit/quickwit-integration-tests/src/test_utils/cluster_sandbox.rs +++ b/quickwit/quickwit-integration-tests/src/test_utils/cluster_sandbox.rs @@ -48,7 +48,7 @@ use reqwest::Url; use serde_json::Value; use tempfile::TempDir; use tokio::net::TcpListener; -use tracing::debug; +use tracing::{debug, warn}; use super::shutdown::NodeShutdownHandle; @@ -178,7 +178,7 @@ impl ResolvedClusterConfig { pub async fn start(self) -> ClusterSandbox { rustls::crypto::ring::default_provider() .install_default() - .expect("rustls crypto ring default provider installation should not fail"); + .unwrap_or_else(|_| warn!("failed to install default ring crypto provider")); let mut node_shutdown_handles = Vec::new(); let runtimes_config = RuntimesConfig::light_for_tests(); diff --git a/quickwit/quickwit-integration-tests/src/tests/kafka_tests.rs b/quickwit/quickwit-integration-tests/src/tests/kafka_tests.rs new file mode 100644 index 00000000000..c2c95a3f769 --- /dev/null +++ b/quickwit/quickwit-integration-tests/src/tests/kafka_tests.rs @@ -0,0 +1,290 @@ +// Copyright 2021-Present Datadog, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::time::Duration; + +use quickwit_common::rand::append_random_suffix; +use quickwit_common::test_utils::wait_until_predicate; +use quickwit_config::ConfigFormat; +use quickwit_config::service::QuickwitService; +use quickwit_metastore::SplitState; +use quickwit_serve::ListSplitsQueryParams; +use rdkafka::admin::{AdminClient, AdminOptions, NewTopic, TopicReplication}; +use rdkafka::client::DefaultClientContext; +use rdkafka::config::ClientConfig; +use rdkafka::producer::{FutureProducer, FutureRecord}; + +use crate::test_utils::ClusterSandboxBuilder; + +fn create_admin_client() -> AdminClient { + ClientConfig::new() + .set("bootstrap.servers", "localhost:9092") + .set("broker.address.family", "v4") + .create() + .unwrap() +} + +async fn create_topic( + admin_client: &AdminClient, + topic: &str, + num_partitions: i32, +) -> anyhow::Result<()> { + admin_client + .create_topics( + &[NewTopic::new( + topic, + num_partitions, + TopicReplication::Fixed(1), + )], + &AdminOptions::new().operation_timeout(Some(Duration::from_secs(5))), + ) + .await? + .into_iter() + .collect::, _>>() + .map_err(|(topic, err_code)| { + anyhow::anyhow!( + "failed to create topic `{}`. error code: `{}`", + topic, + err_code + ) + })?; + Ok(()) +} + +async fn populate_topic(topic: &str) -> anyhow::Result<()> { + let producer: &FutureProducer = &ClientConfig::new() + .set("bootstrap.servers", "localhost:9092") + .set("broker.address.family", "v4") + .set("message.timeout.ms", "30000") + .create()?; + + let message = r#"{"message":"test","id":1}"#; + + producer + .send( + FutureRecord { + topic, + partition: None, + timestamp: None, + key: None::<&[u8]>, + payload: Some(message), + headers: None, + }, + Duration::from_secs(5), + ) + .await + .map_err(|(err, _)| err)?; + Ok(()) +} + +#[tokio::test] +async fn test_kafka_source() { + quickwit_common::setup_logging_for_tests(); + + let sandbox = ClusterSandboxBuilder::build_and_start_standalone().await; + let index_id = append_random_suffix("test-kafka-source"); + let topic = append_random_suffix("test-kafka-source-topic"); + + let admin_client = create_admin_client(); + create_topic(&admin_client, &topic, 1).await.unwrap(); + + let index_config = format!( + r#" + version: 0.8 + index_id: {index_id} + doc_mapping: + field_mappings: + - name: message + type: text + - name: id + type: i64 + indexing_settings: + commit_timeout_secs: 3 + "# + ); + + sandbox + .rest_client(QuickwitService::Indexer) + .indexes() + .create(index_config.clone(), ConfigFormat::Yaml, false) + .await + .unwrap(); + + let source_id = "test-kafka-source-no-override"; + let source_config = format!( + r#" + version: 0.7 + source_id: {source_id} + desired_num_pipelines: 1 + max_num_pipelines_per_indexer: 1 + source_type: kafka + params: + topic: {topic} + client_params: + bootstrap.servers: localhost:9092 + broker.address.family: v4 + auto.offset.reset: earliest + enable.auto.commit: false + input_format: json + "# + ); + + sandbox + .rest_client(QuickwitService::Indexer) + .sources(&index_id) + .create(source_config, ConfigFormat::Yaml) + .await + .unwrap(); + + populate_topic(&topic).await.unwrap(); + + let result = wait_until_predicate( + || async { + let splits_query_params = ListSplitsQueryParams { + split_states: Some(vec![SplitState::Published]), + ..Default::default() + }; + sandbox + .rest_client(QuickwitService::Indexer) + .splits(&index_id) + .list(splits_query_params) + .await + .map(|splits| !splits.is_empty()) + .unwrap_or(false) + }, + Duration::from_secs(15), + Duration::from_millis(500), + ) + .await; + + assert!( + result.is_ok(), + "Splits should be published within 15 seconds using index config settings" + ); + + sandbox.assert_hit_count(&index_id, "", 1).await; + + sandbox + .rest_client(QuickwitService::Indexer) + .indexes() + .delete(&index_id, false) + .await + .unwrap(); + + sandbox.shutdown().await.unwrap(); +} + +#[tokio::test] +async fn test_kafka_source_with_indexing_settings_override() { + quickwit_common::setup_logging_for_tests(); + + let sandbox = ClusterSandboxBuilder::build_and_start_standalone().await; + let index_id = append_random_suffix("test-kafka-indexing-settings-override"); + let topic = append_random_suffix("test-kafka-indexing-settings-override-topic"); + + let admin_client = create_admin_client(); + create_topic(&admin_client, &topic, 1).await.unwrap(); + + // Create index with high commit_timeout (300 seconds) + // This would normally mean splits take 5 minutes to commit + let index_config = format!( + r#" + version: 0.8 + index_id: {index_id} + doc_mapping: + field_mappings: + - name: message + type: text + - name: id + type: i64 + indexing_settings: + commit_timeout_secs: 300 + "# + ); + + sandbox + .rest_client(QuickwitService::Indexer) + .indexes() + .create(index_config.clone(), ConfigFormat::Yaml, false) + .await + .unwrap(); + + // Create Kafka source with indexing_settings override to lower commit_timeout to 3 seconds + // This tests that the source-level override works correctly + let source_id = "test-kafka-source"; + let source_config = format!( + r#" + version: 0.7 + source_id: {source_id} + desired_num_pipelines: 1 + max_num_pipelines_per_indexer: 1 + source_type: kafka + params: + topic: {topic} + client_params: + bootstrap.servers: localhost:9092 + broker.address.family: v4 + auto.offset.reset: earliest + enable.auto.commit: false + indexing_settings: + commit_timeout_secs: 3 + input_format: json + "# + ); + + sandbox + .rest_client(QuickwitService::Indexer) + .sources(&index_id) + .create(source_config, ConfigFormat::Yaml) + .await + .unwrap(); + + populate_topic(&topic).await.unwrap(); + + let result = wait_until_predicate( + || async { + let splits_query_params = ListSplitsQueryParams { + split_states: Some(vec![SplitState::Published]), + ..Default::default() + }; + sandbox + .rest_client(QuickwitService::Indexer) + .splits(&index_id) + .list(splits_query_params) + .await + .map(|splits| !splits.is_empty()) + .unwrap_or(false) + }, + Duration::from_secs(15), + Duration::from_millis(500), + ) + .await; + + assert!( + result.is_ok(), + "Splits should be published within 15 seconds when using indexing_settings override. If \ + this test fails, the override may not be working correctly." + ); + + sandbox.assert_hit_count(&index_id, "", 1).await; + + sandbox + .rest_client(QuickwitService::Indexer) + .indexes() + .delete(&index_id, false) + .await + .unwrap(); + + sandbox.shutdown().await.unwrap(); +} diff --git a/quickwit/quickwit-integration-tests/src/tests/mod.rs b/quickwit/quickwit-integration-tests/src/tests/mod.rs index bbc5dcf814a..519537bb5fb 100644 --- a/quickwit/quickwit-integration-tests/src/tests/mod.rs +++ b/quickwit/quickwit-integration-tests/src/tests/mod.rs @@ -15,6 +15,8 @@ mod basic_tests; mod ingest_v1_tests; mod ingest_v2_tests; +#[cfg(feature = "kafka-broker-tests")] +mod kafka_tests; mod no_cp_tests; mod otlp_tests; #[cfg(feature = "sqs-localstack-tests")] diff --git a/quickwit/quickwit-metastore/src/tests/index.rs b/quickwit/quickwit-metastore/src/tests/index.rs index 6d7adf8ced7..6974e22e5af 100644 --- a/quickwit/quickwit-metastore/src/tests/index.rs +++ b/quickwit/quickwit-metastore/src/tests/index.rs @@ -677,9 +677,14 @@ pub async fn test_metastore_list_indexes, #[prost(int64, optional, tag = "5")] @@ -178,6 +181,12 @@ pub struct SearchRequest { pub search_after: ::core::option::Option, #[prost(enumeration = "CountHits", tag = "17")] pub count_hits: i32, + /// When an exact index ID is provided (not a pattern), the query fails only if + /// that index is not found and this parameter is set to `false`. + #[prost(bool, tag = "18")] + pub ignore_missing_indexes: bool, + #[prost(string, optional, tag = "19")] + pub split_id: ::core::option::Option<::prost::alloc::string::String>, } #[derive(serde::Serialize, serde::Deserialize, utoipa::ToSchema)] #[derive(Eq, Hash)] diff --git a/quickwit/quickwit-query/src/aggregations.rs b/quickwit/quickwit-query/src/aggregations.rs index eca275ad265..5318d7884c6 100644 --- a/quickwit/quickwit-query/src/aggregations.rs +++ b/quickwit/quickwit-query/src/aggregations.rs @@ -18,7 +18,8 @@ use tantivy::aggregation::Key as TantivyKey; use tantivy::aggregation::agg_result::{ AggregationResult as TantivyAggregationResult, AggregationResults as TantivyAggregationResults, BucketEntries as TantivyBucketEntries, BucketEntry as TantivyBucketEntry, - BucketResult as TantivyBucketResult, MetricResult as TantivyMetricResult, + BucketResult as TantivyBucketResult, CompositeBucketEntry as TantivyCompositeBucketEntry, + CompositeKey as TantivyCompositeKey, MetricResult as TantivyMetricResult, RangeBucketEntry as TantivyRangeBucketEntry, }; use tantivy::aggregation::metric::{ @@ -169,6 +170,13 @@ pub enum BucketResult { /// The upper bound error for the doc count of each term. doc_count_error_upper_bound: Option, }, + /// This is the composite aggregation result + Composite { + /// The buckets + buckets: Vec, + /// The key to start after when paginating + after_key: FxHashMap, + }, } impl From for BucketResult { @@ -189,6 +197,10 @@ impl From for BucketResult { sum_other_doc_count, doc_count_error_upper_bound, }, + TantivyBucketResult::Composite { buckets, after_key } => BucketResult::Composite { + buckets: buckets.into_iter().map(Into::into).collect(), + after_key: after_key.into_iter().map(|(k, v)| (k, v.into())).collect(), + }, } } } @@ -211,6 +223,10 @@ impl From for TantivyBucketResult { sum_other_doc_count, doc_count_error_upper_bound, }, + BucketResult::Composite { buckets, after_key } => TantivyBucketResult::Composite { + buckets: buckets.into_iter().map(Into::into).collect(), + after_key: after_key.into_iter().map(|(k, v)| (k, v.into())).collect(), + }, } } } @@ -410,3 +426,75 @@ impl From for TantivyPercentilesMetricResult { TantivyPercentilesMetricResult { values } } } + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CompositeKey { + /// Boolean key + Bool(bool), + /// String key + Str(String), + /// `i64` key + I64(i64), + /// `u64` key + U64(u64), + /// `f64` key + F64(f64), + /// Null key + Null, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CompositeBucketEntry { + /// The identifier of the bucket. + pub key: FxHashMap, + /// Number of documents in the bucket. + pub doc_count: u64, + /// Sub-aggregations in this bucket. + pub sub_aggregation: AggregationResults, +} + +impl From for CompositeKey { + fn from(value: TantivyCompositeKey) -> CompositeKey { + match value { + TantivyCompositeKey::Bool(b) => CompositeKey::Bool(b), + TantivyCompositeKey::Str(s) => CompositeKey::Str(s), + TantivyCompositeKey::I64(i) => CompositeKey::I64(i), + TantivyCompositeKey::U64(u) => CompositeKey::U64(u), + TantivyCompositeKey::F64(f) => CompositeKey::F64(f), + TantivyCompositeKey::Null => CompositeKey::Null, + } + } +} + +impl From for TantivyCompositeKey { + fn from(value: CompositeKey) -> TantivyCompositeKey { + match value { + CompositeKey::Bool(b) => TantivyCompositeKey::Bool(b), + CompositeKey::Str(s) => TantivyCompositeKey::Str(s), + CompositeKey::I64(i) => TantivyCompositeKey::I64(i), + CompositeKey::U64(u) => TantivyCompositeKey::U64(u), + CompositeKey::F64(f) => TantivyCompositeKey::F64(f), + CompositeKey::Null => TantivyCompositeKey::Null, + } + } +} + +impl From for CompositeBucketEntry { + fn from(value: TantivyCompositeBucketEntry) -> CompositeBucketEntry { + CompositeBucketEntry { + key: value.key.into_iter().map(|(k, v)| (k, v.into())).collect(), + doc_count: value.doc_count, + sub_aggregation: value.sub_aggregation.into(), + } + } +} + +impl From for TantivyCompositeBucketEntry { + fn from(value: CompositeBucketEntry) -> TantivyCompositeBucketEntry { + TantivyCompositeBucketEntry { + key: value.key.into_iter().map(|(k, v)| (k, v.into())).collect(), + doc_count: value.doc_count, + sub_aggregation: value.sub_aggregation.into(), + } + } +} diff --git a/quickwit/quickwit-search/src/metrics_trackers.rs b/quickwit/quickwit-search/src/metrics_trackers.rs index 7f2f9fbbfb3..c48200acc0f 100644 --- a/quickwit/quickwit-search/src/metrics_trackers.rs +++ b/quickwit/quickwit-search/src/metrics_trackers.rs @@ -26,6 +26,7 @@ use crate::metrics::SEARCH_METRICS; // root +#[derive(Debug)] pub enum RootSearchMetricsStep { Plan, Exec { num_targeted_splits: usize }, @@ -85,14 +86,17 @@ impl PinnedDrop for RootSearchMetricsFuture { } } -impl Future for RootSearchMetricsFuture -where F: Future> +impl Future for RootSearchMetricsFuture +where F: Future> { - type Output = Result; + type Output = crate::Result; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { let this = self.project(); let response = ready!(this.tracked.poll(cx)); + if let Err(err) = &response { + tracing::error!(?err, step = ?this.step, "root search failed"); + } *this.is_success = Some(response.is_ok()); Poll::Ready(Ok(response?)) } @@ -141,10 +145,10 @@ where F: Future> fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { let this = self.project(); let response = ready!(this.tracked.poll(cx)); - *this.status = if response.is_ok() { - Some("success") - } else { - Some("error") + *this.status = match &response { + Ok(resp) if !resp.failed_splits.is_empty() => Some("partial-success"), + Ok(_) => Some("success"), + Err(_) => Some("error"), }; Poll::Ready(Ok(response?)) } diff --git a/quickwit/quickwit-search/src/root.rs b/quickwit/quickwit-search/src/root.rs index 4af335725b3..9dcbde4f7fe 100644 --- a/quickwit/quickwit-search/src/root.rs +++ b/quickwit/quickwit-search/src/root.rs @@ -366,6 +366,8 @@ fn simplify_search_request_for_scroll_api(req: &SearchRequest) -> crate::Result< // request is simplified after initial query, and we cache the hit count, so we don't need // to recompute it afterward. count_hits: quickwit_proto::search::CountHits::Underestimate as i32, + ignore_missing_indexes: req.ignore_missing_indexes, + split_id: req.split_id.clone(), }) } @@ -1139,6 +1141,18 @@ async fn refine_and_list_matches( metastore, ) .await?; + + if let Some(split_id) = &search_request.split_id { + for split_metadata in split_metadatas { + if &split_metadata.split_id == split_id { + return Ok(vec![split_metadata]); + } + } + return Err(SearchError::InvalidQuery(format!( + "split ID {split_id} not found for the given query" + ))); + } + Ok(split_metadatas) } @@ -1156,7 +1170,12 @@ async fn plan_splits_for_root_search( .deserialize_indexes_metadata() .await?; - check_all_index_metadata_found(&indexes_metadata[..], &search_request.index_id_patterns[..])?; + if !search_request.ignore_missing_indexes { + check_all_index_metadata_found( + &indexes_metadata[..], + &search_request.index_id_patterns[..], + )?; + } if indexes_metadata.is_empty() { return Ok((Vec::new(), HashMap::default())); @@ -1206,6 +1225,24 @@ pub async fn root_search( current_span.record("num_docs", num_docs); current_span.record("num_splits", num_splits); + if let Some(max_total_split_searches) = + searcher_context.searcher_config.max_total_split_searches + { + if max_total_split_searches < num_splits { + tracing::error!( + num_splits, + max_total_split_searches, + index=?search_request.index_id_patterns, + query=%search_request.query_ast, + "max total splits exceeded" + ); + return Err(SearchError::InvalidArgument(format!( + "Number of targeted splits {num_splits} exceeds the limit \ + {max_total_split_searches}" + ))); + } + } + let mut search_response_result = RootSearchMetricsFuture { start: start_instant, tracked: root_search_aux( @@ -1243,7 +1280,12 @@ pub async fn search_plan( .deserialize_indexes_metadata() .await?; - check_all_index_metadata_found(&indexes_metadata[..], &search_request.index_id_patterns[..])?; + if !search_request.ignore_missing_indexes { + check_all_index_metadata_found( + &indexes_metadata[..], + &search_request.index_id_patterns[..], + )?; + } if indexes_metadata.is_empty() { return Ok(SearchPlanResponse { result: serde_json::to_string(&SearchPlanResponseRest { @@ -3240,6 +3282,102 @@ mod tests { Ok(()) } + #[tokio::test] + async fn test_root_search_missing_index() -> anyhow::Result<()> { + let mut mock_metastore = MockMetastoreService::new(); + let index_metadata = IndexMetadata::for_test("test-index1", "ram:///test-index"); + let index_uid = index_metadata.index_uid.clone(); + mock_metastore + .expect_list_indexes_metadata() + .returning(move |_index_ids_query| { + Ok(ListIndexesMetadataResponse::for_test(vec![ + index_metadata.clone(), + ])) + }); + mock_metastore + .expect_list_splits() + .returning(move |_list_splits_request| { + let splits = vec![ + MockSplitBuilder::new("split1") + .with_index_uid(&index_uid) + .build(), + ]; + let splits_response = ListSplitsResponse::try_from_splits(splits).unwrap(); + Ok(ServiceStream::from(vec![Ok(splits_response)])) + }); + let mock_metastore_client = MetastoreServiceClient::from_mock(mock_metastore); + let mut mock_search_service = MockSearchService::new(); + mock_search_service.expect_leaf_search().returning( + |_leaf_search_req: quickwit_proto::search::LeafSearchRequest| { + Ok(quickwit_proto::search::LeafSearchResponse { + num_hits: 3, + partial_hits: vec![ + mock_partial_hit("split1", 3, 1), + mock_partial_hit("split1", 2, 2), + mock_partial_hit("split1", 1, 3), + ], + failed_splits: Vec::new(), + num_attempted_splits: 1, + ..Default::default() + }) + }, + ); + mock_search_service.expect_fetch_docs().returning( + |fetch_docs_req: quickwit_proto::search::FetchDocsRequest| { + Ok(quickwit_proto::search::FetchDocsResponse { + hits: get_doc_for_fetch_req(fetch_docs_req), + }) + }, + ); + let searcher_pool = searcher_pool_for_test([("127.0.0.1:1001", mock_search_service)]); + let search_job_placer = SearchJobPlacer::new(searcher_pool); + let cluster_client = ClusterClient::new(search_job_placer.clone()); + + let searcher_context = SearcherContext::for_test(); + + // search with ignore_missing_indexes=true succeeds + let search_request = quickwit_proto::search::SearchRequest { + index_id_patterns: vec!["test-index1".to_string(), "test-index2".to_string()], + query_ast: qast_json_helper("test", &["body"]), + max_hits: 10, + ignore_missing_indexes: true, + ..Default::default() + }; + let search_response = root_search( + &searcher_context, + search_request, + mock_metastore_client.clone(), + &cluster_client, + ) + .await + .unwrap(); + assert_eq!(search_response.num_hits, 3); + assert_eq!(search_response.hits.len(), 3); + + // search with ignore_missing_indexes=false fails + let search_request = quickwit_proto::search::SearchRequest { + index_id_patterns: vec!["test-index1".to_string(), "test-index2".to_string()], + query_ast: qast_json_helper("test", &["body"]), + max_hits: 10, + ignore_missing_indexes: false, + ..Default::default() + }; + let search_error = root_search( + &searcher_context, + search_request, + mock_metastore_client, + &cluster_client, + ) + .await + .unwrap_err(); + if let SearchError::IndexesNotFound { index_ids } = search_error { + assert_eq!(index_ids, vec!["test-index2".to_string()]); + } else { + panic!("unexpected error type: {search_error}"); + } + Ok(()) + } + #[tokio::test] async fn test_root_search_multiple_splits_retry_on_other_node() -> anyhow::Result<()> { let search_request = quickwit_proto::search::SearchRequest { @@ -4112,6 +4250,69 @@ mod tests { Ok(()) } + #[tokio::test] + async fn test_search_plan_missing_index() -> anyhow::Result<()> { + let mut mock_metastore = MockMetastoreService::new(); + let index_metadata = IndexMetadata::for_test("test-index1", "ram:///test-index"); + let index_uid = index_metadata.index_uid.clone(); + mock_metastore + .expect_list_indexes_metadata() + .returning(move |_index_ids_query| { + Ok(ListIndexesMetadataResponse::for_test(vec![ + index_metadata.clone(), + ])) + }); + mock_metastore + .expect_list_splits() + .returning(move |_filter| { + let splits = vec![ + MockSplitBuilder::new("split1") + .with_index_uid(&index_uid) + .build(), + MockSplitBuilder::new("split2") + .with_index_uid(&index_uid) + .build(), + ]; + let splits_response = ListSplitsResponse::try_from_splits(splits).unwrap(); + Ok(ServiceStream::from(vec![Ok(splits_response)])) + }); + let mock_metastore_service = MetastoreServiceClient::from_mock(mock_metastore); + + // plan with ignore_missing_indexes=true succeeds + search_plan( + quickwit_proto::search::SearchRequest { + index_id_patterns: vec!["test-index1".to_string(), "test-index2".to_string()], + query_ast: qast_json_helper("test-query", &["body"]), + max_hits: 10, + ignore_missing_indexes: true, + ..Default::default() + }, + mock_metastore_service.clone(), + ) + .await + .unwrap(); + + // plan with ignore_missing_indexes=false fails + let search_error = search_plan( + quickwit_proto::search::SearchRequest { + index_id_patterns: vec!["test-index1".to_string(), "test-index2".to_string()], + query_ast: qast_json_helper("test-query", &["body"]), + max_hits: 10, + ignore_missing_indexes: false, + ..Default::default() + }, + mock_metastore_service.clone(), + ) + .await + .unwrap_err(); + if let SearchError::IndexesNotFound { index_ids } = search_error { + assert_eq!(index_ids, vec!["test-index2".to_string()]); + } else { + panic!("unexpected error type: {search_error}"); + } + Ok(()) + } + #[test] fn test_extract_timestamp_range_from_ast() { use std::ops::Bound; diff --git a/quickwit/quickwit-serve/src/elasticsearch_api/model/multi_search.rs b/quickwit/quickwit-serve/src/elasticsearch_api/model/multi_search.rs index 0dbc9dab5a2..758a35f7d17 100644 --- a/quickwit/quickwit-serve/src/elasticsearch_api/model/multi_search.rs +++ b/quickwit/quickwit-serve/src/elasticsearch_api/model/multi_search.rs @@ -25,6 +25,7 @@ use crate::simple_list::{from_simple_list, to_simple_list}; // Multi search doc: https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html +#[serde_as] #[serde_with::skip_serializing_none] #[derive(Default, Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] @@ -50,6 +51,10 @@ pub struct MultiSearchQueryParams { pub ignore_throttled: Option, #[serde(default)] pub ignore_unavailable: Option, + /// List of indexes to search. + #[serde_as(deserialize_as = "OneOrMany<_, PreferMany>")] + #[serde(default, rename = "index")] + pub indexes: Vec, #[serde(default)] pub max_concurrent_searches: Option, #[serde(default)] @@ -90,8 +95,8 @@ pub struct MultiSearchHeader { #[serde(default)] pub ignore_unavailable: Option, #[serde_as(deserialize_as = "OneOrMany<_, PreferMany>")] - #[serde(default)] - pub index: Vec, + #[serde(default, rename = "index")] + pub indexes: Vec, #[serde(default)] pub preference: Option, #[serde(default)] @@ -100,6 +105,26 @@ pub struct MultiSearchHeader { pub routing: Option>, } +impl MultiSearchHeader { + pub fn apply_query_param_defaults(&mut self, defaults: &MultiSearchQueryParams) { + if self.allow_no_indices.is_none() { + self.allow_no_indices = defaults.allow_no_indices; + } + if self.expand_wildcards.is_none() { + self.expand_wildcards = defaults.expand_wildcards.clone(); + } + if self.ignore_unavailable.is_none() { + self.ignore_unavailable = defaults.ignore_unavailable; + } + if self.indexes.is_empty() { + self.indexes = defaults.indexes.clone(); + } + if self.routing.is_none() { + self.routing = defaults.routing.clone(); + } + } +} + #[derive(Serialize)] pub struct MultiSearchResponse { pub responses: Vec, diff --git a/quickwit/quickwit-serve/src/elasticsearch_api/rest_handler.rs b/quickwit/quickwit-serve/src/elasticsearch_api/rest_handler.rs index 29c23d41a29..607bfd17b6c 100644 --- a/quickwit/quickwit-serve/src/elasticsearch_api/rest_handler.rs +++ b/quickwit/quickwit-serve/src/elasticsearch_api/rest_handler.rs @@ -358,6 +358,7 @@ fn build_request_for_es_api( let max_hits = search_params.size.or(search_body.size).unwrap_or(10); let start_offset = search_params.from.or(search_body.from).unwrap_or(0); + let ignore_missing_indexes = search_params.ignore_unavailable.unwrap_or(false); let count_hits = match search_params .track_total_hits .or(search_body.track_total_hits) @@ -410,6 +411,8 @@ fn build_request_for_es_api( scroll_ttl_secs, search_after, count_hits, + ignore_missing_indexes, + split_id: None, }, has_doc_id_field, )) @@ -814,26 +817,28 @@ async fn es_compat_index_multi_search( let mut payload_lines = str_lines(str_payload); while let Some(line) = payload_lines.next() { - let request_header = serde_json::from_str::(line).map_err(|err| { - SearchError::InvalidArgument(format!( - "failed to parse request header `{}...`: {}", - truncate_str(line, 20), - err - )) - })?; - if request_header.index.is_empty() { + let mut request_header = + serde_json::from_str::(line).map_err(|err| { + SearchError::InvalidArgument(format!( + "failed to parse request header `{}...`: {}", + truncate_str(line, 20), + err + )) + })?; + request_header.apply_query_param_defaults(&multi_search_params); + if request_header.indexes.is_empty() { return Err(ElasticsearchError::from(SearchError::InvalidArgument( "`_msearch` request header must define at least one index".to_string(), ))); } - for index in &request_header.index { + for index in &request_header.indexes { validate_index_id_pattern(index, true).map_err(|err| { SearchError::InvalidArgument(format!( "request header contains an invalid index: {err}" )) })?; } - let index_ids_patterns = request_header.index.clone(); + let index_ids_patterns = request_header.indexes.clone(); let search_body = payload_lines .next() .ok_or_else(|| { diff --git a/quickwit/quickwit-serve/src/search_api/rest_handler.rs b/quickwit/quickwit-serve/src/search_api/rest_handler.rs index e80db93e4b1..671d7a6c2fa 100644 --- a/quickwit/quickwit-serve/src/search_api/rest_handler.rs +++ b/quickwit/quickwit-serve/src/search_api/rest_handler.rs @@ -210,6 +210,9 @@ pub struct SearchRequestQueryString { #[schema(value_type = bool)] #[serde(default)] pub allow_failed_splits: bool, + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(default)] + pub split_id: Option, } mod count_hits_from_bool { @@ -264,6 +267,8 @@ pub fn search_request_from_api_request( scroll_ttl_secs: None, search_after: None, count_hits: search_request.count_all.into(), + ignore_missing_indexes: false, + split_id: search_request.split_id, }; Ok(search_request) } diff --git a/quickwit/quickwit-storage/src/lib.rs b/quickwit/quickwit-storage/src/lib.rs index f808ac83286..31bbddcdd89 100644 --- a/quickwit/quickwit-storage/src/lib.rs +++ b/quickwit/quickwit-storage/src/lib.rs @@ -29,6 +29,7 @@ mod cache; mod debouncer; mod file_descriptor_cache; mod metrics; +mod metrics_wrappers; mod storage; mod timeout_and_retry_storage; pub use debouncer::AsyncDebouncer; diff --git a/quickwit/quickwit-storage/src/metrics.rs b/quickwit/quickwit-storage/src/metrics.rs index 43ef588e192..064448e0270 100644 --- a/quickwit/quickwit-storage/src/metrics.rs +++ b/quickwit/quickwit-storage/src/metrics.rs @@ -16,7 +16,7 @@ use once_cell::sync::Lazy; use quickwit_common::metrics::{ - GaugeGuard, Histogram, IntCounter, IntCounterVec, IntGauge, new_counter, new_counter_vec, + GaugeGuard, HistogramVec, IntCounter, IntCounterVec, IntGauge, new_counter, new_counter_vec, new_gauge, new_histogram_vec, }; @@ -30,19 +30,13 @@ pub struct StorageMetrics { pub searcher_split_cache: CacheMetrics, pub get_slice_timeout_successes: [IntCounter; 3], pub get_slice_timeout_all_timeouts: IntCounter, - pub object_storage_get_total: IntCounter, - pub object_storage_get_errors_total: IntCounterVec<1>, + pub object_storage_requests_total: IntCounterVec<2>, + pub object_storage_request_duration: HistogramVec<2>, pub object_storage_get_slice_in_flight_count: IntGauge, pub object_storage_get_slice_in_flight_num_bytes: IntGauge, - pub object_storage_put_total: IntCounter, - pub object_storage_put_parts: IntCounter, - pub object_storage_download_num_bytes: IntCounter, - pub object_storage_upload_num_bytes: IntCounter, - - pub object_storage_delete_requests_total: IntCounter, - pub object_storage_bulk_delete_requests_total: IntCounter, - pub object_storage_delete_request_duration: Histogram, - pub object_storage_bulk_delete_request_duration: Histogram, + pub object_storage_download_num_bytes: IntCounterVec<1>, + pub object_storage_download_errors: IntCounterVec<1>, + pub object_storage_upload_num_bytes: IntCounterVec<1>, } impl Default for StorageMetrics { @@ -63,31 +57,6 @@ impl Default for StorageMetrics { let get_slice_timeout_all_timeouts = get_slice_timeout_outcome_total_vec.with_label_values(["all_timeouts"]); - let object_storage_requests_total = new_counter_vec( - "object_storage_requests_total", - "Total number of object storage requests performed.", - "storage", - &[], - ["action"], - ); - let object_storage_delete_requests_total = - object_storage_requests_total.with_label_values(["delete_object"]); - let object_storage_bulk_delete_requests_total = - object_storage_requests_total.with_label_values(["delete_objects"]); - - let object_storage_request_duration = new_histogram_vec( - "object_storage_request_duration_seconds", - "Duration of object storage requests in seconds.", - "storage", - &[], - ["action"], - vec![0.1, 0.5, 1.0, 5.0, 10.0, 30.0, 60.0], - ); - let object_storage_delete_request_duration = - object_storage_request_duration.with_label_values(["delete_object"]); - let object_storage_bulk_delete_request_duration = - object_storage_request_duration.with_label_values(["delete_objects"]); - StorageMetrics { fast_field_cache: CacheMetrics::for_component("fastfields"), fd_cache_metrics: CacheMetrics::for_component("fd"), @@ -97,62 +66,63 @@ impl Default for StorageMetrics { split_footer_cache: CacheMetrics::for_component("splitfooter"), get_slice_timeout_successes, get_slice_timeout_all_timeouts, - object_storage_get_total: new_counter( - "object_storage_gets_total", - "Number of objects fetched. Might be lower than get_slice_timeout_outcome if \ - queries are debounced.", + object_storage_requests_total: new_counter_vec( + "object_storage_requests_total", + "Number of requests to the object store, by action and status. Requests are \ + recorded when the response headers are returned, download failures will not \ + appear as errors.", "storage", &[], + ["action", "status"], ), - object_storage_get_errors_total: new_counter_vec::<1>( - "object_storage_get_errors_total", - "Number of GetObject errors.", + object_storage_request_duration: new_histogram_vec( + "object_storage_request_duration", + "Durations until the response headers are returned from the object store, by \ + action and status. This does not measure the download time for the body content.", "storage", &[], - ["code"], + ["action", "status"], + vec![0.1, 0.5, 1.0, 5.0, 10.0, 30.0, 60.0], ), object_storage_get_slice_in_flight_count: new_gauge( "object_storage_get_slice_in_flight_count", - "Number of GetObject for which the memory was allocated but the download is still \ - in progress.", + "Number of get_object for which the memory was allocated but the download is \ + still in progress.", "storage", &[], ), object_storage_get_slice_in_flight_num_bytes: new_gauge( "object_storage_get_slice_in_flight_num_bytes", - "Memory allocated for GetObject requests that are still in progress.", + "Memory allocated for get_object requests that are still in progress.", "storage", &[], ), - object_storage_put_total: new_counter( - "object_storage_puts_total", - "Number of objects uploaded. May differ from object_storage_requests_parts due to \ - multipart upload.", + object_storage_download_num_bytes: new_counter_vec( + "object_storage_download_num_bytes", + "Amount of data downloaded from object storage.", "storage", &[], + ["status"], ), - object_storage_put_parts: new_counter( - "object_storage_puts_parts", - "Number of object parts uploaded.", - "", - &[], - ), - object_storage_download_num_bytes: new_counter( - "object_storage_download_num_bytes", - "Amount of data downloaded from an object storage.", + object_storage_download_errors: new_counter_vec( + "object_storage_download_errors", + // Download errors are recorded separately because the associated + // get_object requests were already recorded as successful in + // object_storage_requests_total + "Number of download requests that received successful response headers but failed \ + during download.", "storage", &[], + ["status"], ), - object_storage_upload_num_bytes: new_counter( + object_storage_upload_num_bytes: new_counter_vec( "object_storage_upload_num_bytes", - "Amount of data uploaded to an object storage.", + "Amount of data uploaded to object storage. The value recorded for failed and \ + aborted uploads is the full payload size.", "storage", &[], + ["status"], ), - object_storage_delete_requests_total, - object_storage_bulk_delete_requests_total, - object_storage_delete_request_duration, - object_storage_bulk_delete_request_duration, } } } diff --git a/quickwit/quickwit-storage/src/metrics_wrappers.rs b/quickwit/quickwit-storage/src/metrics_wrappers.rs new file mode 100644 index 00000000000..f09d1f0d92d --- /dev/null +++ b/quickwit/quickwit-storage/src/metrics_wrappers.rs @@ -0,0 +1,482 @@ +// Copyright 2021-Present Datadog, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::io; +use std::marker::PhantomData; +use std::pin::Pin; +use std::task::{Context, Poll, ready}; +use std::time::Instant; + +use pin_project::{pin_project, pinned_drop}; +use tokio::io::{AsyncBufRead, AsyncWrite}; + +use crate::STORAGE_METRICS; + +#[derive(Clone, Copy, Debug)] +pub enum ActionLabel { + AbortMultipartUpload, + CompleteMultipartUpload, + CreateMultipartUpload, + DeleteObject, + DeleteObjects, + GetObject, + HeadObject, + ListObjects, + PutObject, + UploadPart, +} + +impl ActionLabel { + fn as_str(&self) -> &'static str { + match self { + ActionLabel::AbortMultipartUpload => "abort_multipart_upload", + ActionLabel::CompleteMultipartUpload => "complete_multipart_upload", + ActionLabel::CreateMultipartUpload => "create_multipart_upload", + ActionLabel::DeleteObject => "delete_object", + ActionLabel::DeleteObjects => "delete_objects", + ActionLabel::GetObject => "get_object", + ActionLabel::HeadObject => "head_object", + ActionLabel::ListObjects => "list_objects", + ActionLabel::PutObject => "put_object", + ActionLabel::UploadPart => "upload_part", + } + } +} + +pub enum RequestStatus { + Pending, + // only useful on feature="azure" + #[allow(dead_code)] + Done, + Ready(String), +} + +/// Converts an object store client SDK Result<> to the [Status] that should be +/// recorded in the metrics. +/// +/// The `Marker` type is necessary to avoid conflicting implementations of the +/// trait. +pub trait AsRequestStatus { + fn as_status(&self) -> RequestStatus; +} + +/// Wrapper around object store requests to record metrics, including cancellation. +#[pin_project(PinnedDrop)] +pub struct RequestMetricsWrapper +where + F: Future, + F::Output: AsRequestStatus, +{ + #[pin] + tracked: F, + action: ActionLabel, + start: Option, + uploaded_bytes: Option, + status: RequestStatus, + _marker: PhantomData, +} + +#[pinned_drop] +impl PinnedDrop for RequestMetricsWrapper +where + F: Future, + F::Output: AsRequestStatus, +{ + fn drop(self: Pin<&mut Self>) { + let status = match &self.status { + RequestStatus::Pending => "cancelled", + RequestStatus::Done => return, + RequestStatus::Ready(s) => s.as_str(), + }; + let label_values = [self.action.as_str(), status]; + STORAGE_METRICS + .object_storage_requests_total + .with_label_values(label_values) + .inc(); + if let Some(start) = self.start { + STORAGE_METRICS + .object_storage_request_duration + .with_label_values(label_values) + .observe(start.elapsed().as_secs_f64()); + } + if let Some(bytes) = self.uploaded_bytes { + STORAGE_METRICS + .object_storage_upload_num_bytes + .with_label_values([status]) + .inc_by(bytes); + } + } +} + +impl Future for RequestMetricsWrapper +where + F: Future, + F::Output: AsRequestStatus, +{ + type Output = F::Output; + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.project(); + let response = ready!(this.tracked.poll(cx)); + *this.status = response.as_status(); + + Poll::Ready(response) + } +} + +pub trait RequestMetricsWrapperExt +where + F: Future, + F::Output: AsRequestStatus, +{ + fn with_count_metric(self, action: ActionLabel) -> RequestMetricsWrapper; + + fn with_count_and_duration_metrics( + self, + action: ActionLabel, + ) -> RequestMetricsWrapper; + + fn with_count_and_upload_metrics( + self, + action: ActionLabel, + bytes: u64, + ) -> RequestMetricsWrapper; +} + +impl RequestMetricsWrapperExt for F +where + F: Future, + F::Output: AsRequestStatus, +{ + fn with_count_metric(self, action: ActionLabel) -> RequestMetricsWrapper { + RequestMetricsWrapper { + tracked: self, + action, + status: RequestStatus::Pending, + start: None, + uploaded_bytes: None, + _marker: PhantomData, + } + } + + fn with_count_and_duration_metrics( + self, + action: ActionLabel, + ) -> RequestMetricsWrapper { + RequestMetricsWrapper { + tracked: self, + action, + status: RequestStatus::Pending, + start: Some(Instant::now()), + uploaded_bytes: None, + _marker: PhantomData, + } + } + + fn with_count_and_upload_metrics( + self, + action: ActionLabel, + bytes: u64, + ) -> RequestMetricsWrapper { + RequestMetricsWrapper { + tracked: self, + action, + status: RequestStatus::Pending, + start: None, + uploaded_bytes: Some(bytes), + _marker: PhantomData, + } + } +} + +mod s3_impls { + use aws_sdk_s3::error::{ProvideErrorMetadata, SdkError}; + + use super::{AsRequestStatus, RequestStatus}; + + pub struct S3Marker; + + impl AsRequestStatus for Result> { + fn as_status(&self) -> RequestStatus { + let status_str = match self { + Ok(_) => "success".to_string(), + Err(SdkError::ConstructionFailure(_)) => "construction_failure".to_string(), + Err(SdkError::TimeoutError(_)) => "timeout_error".to_string(), + Err(SdkError::DispatchFailure(_)) => "dispatch_failure".to_string(), + Err(SdkError::ResponseError(_)) => "response_error".to_string(), + Err(e @ SdkError::ServiceError(_)) => e + .meta() + .code() + .unwrap_or("unknown_service_error") + .to_string(), + Err(_) => "unknown".to_string(), + }; + RequestStatus::Ready(status_str) + } + } +} + +#[cfg(feature = "azure")] +mod azure_impl { + use super::{AsRequestStatus, RequestStatus}; + + pub struct AzureMarker; + + impl AsRequestStatus for Result { + fn as_status(&self) -> RequestStatus { + let Err(err) = self else { + return RequestStatus::Ready("success".to_string()); + }; + let err_status_str = match err.kind() { + azure_storage::ErrorKind::HttpResponse { status, .. } => status.to_string(), + azure_storage::ErrorKind::Credential => "credential".to_string(), + azure_storage::ErrorKind::Io => "io".to_string(), + azure_storage::ErrorKind::DataConversion => "data_conversion".to_string(), + _ => "unknown".to_string(), + }; + RequestStatus::Ready(err_status_str) + } + } + + // The Azure SDK get_blob request returns Option because it chunks + // the download into a stream of get requests. + impl AsRequestStatus for Option> { + fn as_status(&self) -> RequestStatus { + match self { + None => RequestStatus::Done, + Some(res) => res.as_status(), + } + } + } +} + +pub enum DownloadStatus { + InProgress, + Done, + Failed(&'static str), +} + +/// Track io errors during downloads. +/// +/// Downloads are a bit different from other requests because the request might +/// fail while getting the bytes from the response body, long after getting a +/// successful response header. +#[pin_project(PinnedDrop)] +struct DownloadMetricsWrapper<'a, R, W> +where + R: AsyncBufRead + Unpin + ?Sized, + W: AsyncWrite + Unpin + ?Sized, +{ + #[pin] + tracked: copy_buf::CopyBuf<'a, R, W>, + status: DownloadStatus, +} + +#[pinned_drop] +impl<'a, R, W> PinnedDrop for DownloadMetricsWrapper<'a, R, W> +where + R: AsyncBufRead + Unpin + ?Sized, + W: AsyncWrite + Unpin + ?Sized, +{ + fn drop(self: Pin<&mut Self>) { + let error_opt = match &self.status { + DownloadStatus::InProgress => Some("cancelled"), + DownloadStatus::Failed(e) => Some(*e), + DownloadStatus::Done => None, + }; + + STORAGE_METRICS + .object_storage_download_num_bytes + .with_label_values([error_opt.unwrap_or("success")]) + .inc_by(self.tracked.amt); + + if let Some(error) = error_opt { + STORAGE_METRICS + .object_storage_download_errors + .with_label_values([error]) + .inc(); + } + } +} + +impl<'a, R, W> Future for DownloadMetricsWrapper<'a, R, W> +where + R: AsyncBufRead + Unpin + ?Sized, + W: AsyncWrite + Unpin + ?Sized, +{ + type Output = io::Result; + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.project(); + let response = ready!(this.tracked.poll(cx)); + *this.status = match &response { + Ok(_) => DownloadStatus::Done, + Err(e) => DownloadStatus::Failed(io_error_as_label(e.kind())), + }; + Poll::Ready(response) + } +} + +pub async fn copy_with_download_metrics<'a, R, W>( + reader: &'a mut R, + writer: &'a mut W, +) -> io::Result +where + R: AsyncBufRead + Unpin + ?Sized, + W: AsyncWrite + Unpin + ?Sized, +{ + DownloadMetricsWrapper { + tracked: copy_buf::CopyBuf { + reader, + writer, + amt: 0, + }, + status: DownloadStatus::InProgress, + } + .await +} + +/// This is a fork of `tokio::io::copy_buf` that enables tracking the number of +/// bytes transferred. This estimate should be accurate as long as the network +/// is the bottleneck. +mod copy_buf { + + use std::future::Future; + use std::io; + use std::pin::Pin; + use std::task::{Context, Poll, ready}; + + use tokio::io::{AsyncBufRead, AsyncWrite}; + + #[derive(Debug)] + #[must_use = "futures do nothing unless you `.await` or poll them"] + pub struct CopyBuf<'a, R: ?Sized, W: ?Sized> { + pub reader: &'a mut R, + pub writer: &'a mut W, + pub amt: u64, + } + + impl Future for CopyBuf<'_, R, W> + where + R: AsyncBufRead + Unpin + ?Sized, + W: AsyncWrite + Unpin + ?Sized, + { + type Output = io::Result; + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + loop { + let me = &mut *self; + let buffer = ready!(Pin::new(&mut *me.reader).poll_fill_buf(cx))?; + if buffer.is_empty() { + ready!(Pin::new(&mut self.writer).poll_flush(cx))?; + return Poll::Ready(Ok(self.amt)); + } + + let i = ready!(Pin::new(&mut *me.writer).poll_write(cx, buffer))?; + if i == 0 { + return Poll::Ready(Err(std::io::ErrorKind::WriteZero.into())); + } + self.amt += i as u64; + Pin::new(&mut *self.reader).consume(i); + } + } + } +} + +fn io_error_as_label(error: io::ErrorKind) -> &'static str { + use io::ErrorKind::*; + // most of these variants are not expected to happen + match error { + AddrInUse => "addr_in_use", + AddrNotAvailable => "addr_not_available", + AlreadyExists => "already_exists", + ArgumentListTooLong => "argument_list_too_long", + BrokenPipe => "broken_pipe", + ConnectionAborted => "connection_aborted", + ConnectionRefused => "connection_refused", + ConnectionReset => "connection_reset", + CrossesDevices => "crosses_devices", + Deadlock => "deadlock", + DirectoryNotEmpty => "directory_not_empty", + ExecutableFileBusy => "executable_file_busy", + FileTooLarge => "file_too_large", + HostUnreachable => "host_unreachable", + Interrupted => "interrupted", + InvalidData => "invalid_data", + InvalidFilename => "invalid_filename", + InvalidInput => "invalid_input", + IsADirectory => "is_a_directory", + NetworkDown => "network_down", + NetworkUnreachable => "network_unreachable", + NotADirectory => "not_a_directory", + NotConnected => "not_connected", + NotFound => "not_found", + NotSeekable => "not_seekable", + Other => "other", + OutOfMemory => "out_of_memory", + PermissionDenied => "permission_denied", + QuotaExceeded => "quota_exceeded", + ReadOnlyFilesystem => "read_only_filesystem", + ResourceBusy => "resource_busy", + StaleNetworkFileHandle => "stale_network_file_handle", + StorageFull => "storage_full", + TimedOut => "timed_out", + TooManyLinks => "too_many_links", + UnexpectedEof => "unexpected_eof", + Unsupported => "unsupported", + WouldBlock => "would_block", + WriteZero => "write_zero", + _ => "uncategorized", + } +} + +#[cfg(feature = "gcs")] +pub mod opendal_helpers { + use quickwit_common::metrics::HistogramTimer; + + use super::*; + + /// Records a request occurrence for this action with unknown status. + pub fn record_request(action: ActionLabel) { + STORAGE_METRICS + .object_storage_requests_total + .with_label_values([action.as_str(), "unknown"]) + .inc(); + } + + /// Records an upload volume for this action with unknown status. + pub fn record_upload(bytes: u64) { + STORAGE_METRICS + .object_storage_upload_num_bytes + .with_label_values(["unknown"]) + .inc_by(bytes); + } + + /// Records an download volume for this action with unknown status. + pub fn record_download(bytes: u64) { + STORAGE_METRICS + .object_storage_download_num_bytes + .with_label_values(["unknown"]) + .inc_by(bytes); + } + + /// Records a request occurrence for this action with unknown status. + pub fn record_request_with_timer(action: ActionLabel) -> HistogramTimer { + record_request(action); + STORAGE_METRICS + .object_storage_request_duration + .with_label_values([action.as_str(), "unknown"]) + .start_timer() + } +} diff --git a/quickwit/quickwit-storage/src/object_storage/azure_blob_storage.rs b/quickwit/quickwit-storage/src/object_storage/azure_blob_storage.rs index d4c9bd67d84..c92cd71e737 100644 --- a/quickwit/quickwit-storage/src/object_storage/azure_blob_storage.rs +++ b/quickwit/quickwit-storage/src/object_storage/azure_blob_storage.rs @@ -45,10 +45,11 @@ use tracing::{instrument, warn}; use crate::debouncer::DebouncedStorage; use crate::metrics::object_storage_get_slice_in_flight_guards; +use crate::metrics_wrappers::{ActionLabel, RequestMetricsWrapperExt, copy_with_download_metrics}; use crate::storage::SendableAsync; use crate::{ - BulkDeleteError, DeleteFailure, MultiPartPolicy, PutPayload, STORAGE_METRICS, Storage, - StorageError, StorageErrorKind, StorageFactory, StorageResolverError, StorageResult, + BulkDeleteError, DeleteFailure, MultiPartPolicy, PutPayload, Storage, StorageError, + StorageErrorKind, StorageFactory, StorageResolverError, StorageResult, }; /// Azure object storage resolver. @@ -242,10 +243,6 @@ impl AzureBlobStorage { name: &'a str, payload: Box, ) -> StorageResult<()> { - crate::STORAGE_METRICS.object_storage_put_parts.inc(); - crate::STORAGE_METRICS - .object_storage_upload_num_bytes - .inc_by(payload.len()); retry(&self.retry_params, || async { let data = Bytes::from(payload.read_all().await?.to_vec()); let hash = azure_storage_blobs::prelude::Hash::from(md5::compute(&data[..]).0); @@ -254,6 +251,7 @@ impl AzureBlobStorage { .put_block_blob(data) .hash(hash) .into_future() + .with_count_and_upload_metrics(ActionLabel::PutObject, payload.len()) .await?; Result::<(), AzureErrorWrapper>::Ok(()) }) @@ -278,10 +276,6 @@ impl AzureBlobStorage { .map(|(num, range)| { let moved_blob_client = blob_client.clone(); let moved_payload = payload.clone(); - crate::STORAGE_METRICS.object_storage_put_parts.inc(); - crate::STORAGE_METRICS - .object_storage_upload_num_bytes - .inc_by(range.end - range.start); async move { retry(&self.retry_params, || async { // zero pad block ids to make them sortable as strings @@ -294,6 +288,10 @@ impl AzureBlobStorage { .put_block(block_id.clone(), data) .hash(hash) .into_future() + .with_count_and_upload_metrics( + ActionLabel::UploadPart, + range.end - range.start, + ) .await?; Result::<_, AzureErrorWrapper>::Ok(block_id) }) @@ -323,6 +321,7 @@ impl AzureBlobStorage { blob_client .put_block_list(block_list) .into_future() + .with_count_metric(ActionLabel::CompleteMultipartUpload) .await .map_err(AzureErrorWrapper::from)?; @@ -339,6 +338,7 @@ impl Storage for AzureBlobStorage { .max_results(NonZeroU32::new(1u32).expect("1 is always non-zero.")) .into_stream() .next() + .with_count_metric(ActionLabel::ListObjects) .await { let _ = first_blob_result?; @@ -351,7 +351,6 @@ impl Storage for AzureBlobStorage { path: &Path, payload: Box, ) -> crate::StorageResult<()> { - crate::STORAGE_METRICS.object_storage_put_total.inc(); let name = self.blob_name(path); let total_len = payload.len(); let part_num_bytes = self.multipart_policy.part_num_bytes(total_len); @@ -369,7 +368,11 @@ impl Storage for AzureBlobStorage { let name = self.blob_name(path); let mut output_stream = self.container_client.blob_client(name).get().into_stream(); - while let Some(chunk_result) = output_stream.next().await { + while let Some(chunk_result) = output_stream + .next() + .with_count_metric(ActionLabel::GetObject) + .await + { let chunk_response = chunk_result.map_err(AzureErrorWrapper::from)?; let chunk_response_body_stream = chunk_response .data @@ -377,10 +380,7 @@ impl Storage for AzureBlobStorage { .into_async_read() .compat(); let mut body_stream_reader = BufReader::new(chunk_response_body_stream); - let num_bytes_copied = tokio::io::copy_buf(&mut body_stream_reader, output).await?; - STORAGE_METRICS - .object_storage_download_num_bytes - .inc_by(num_bytes_copied); + copy_with_download_metrics(&mut body_stream_reader, output).await?; } output.flush().await?; Ok(()) @@ -393,6 +393,7 @@ impl Storage for AzureBlobStorage { .blob_client(blob_name) .delete() .into_future() + .with_count_metric(ActionLabel::DeleteObject) .await .map_err(|err| AzureErrorWrapper::from(err).into()); ignore_error_kind!(StorageErrorKind::NotFound, delete_res)?; @@ -515,6 +516,7 @@ impl Storage for AzureBlobStorage { .blob_client(name) .get_properties() .into_future() + .with_count_metric(ActionLabel::HeadObject) .await; match properties_result { Ok(response) => Ok(response.blob.properties.content_length), @@ -537,7 +539,7 @@ async fn extract_range_data_and_hash( .await? .into_async_read(); let mut buf: Vec = Vec::with_capacity(range.count()); - tokio::io::copy(&mut reader, &mut buf).await?; + tokio::io::copy_buf(&mut reader, &mut buf).await?; let data = Bytes::from(buf); let hash = md5::compute(&data[..]); Ok((data, hash)) @@ -568,7 +570,11 @@ async fn download_all( output: &mut Vec, ) -> Result<(), AzureErrorWrapper> { output.clear(); - while let Some(chunk_result) = chunk_stream.next().await { + while let Some(chunk_result) = chunk_stream + .next() + .with_count_metric(ActionLabel::GetObject) + .await + { let chunk_response = chunk_result?; let chunk_response_body_stream = chunk_response .data @@ -576,10 +582,7 @@ async fn download_all( .into_async_read() .compat(); let mut body_stream_reader = BufReader::new(chunk_response_body_stream); - let num_bytes_copied = tokio::io::copy_buf(&mut body_stream_reader, output).await?; - crate::STORAGE_METRICS - .object_storage_download_num_bytes - .inc_by(num_bytes_copied); + copy_with_download_metrics(&mut body_stream_reader, output).await?; } // When calling `get_all`, the Vec capacity is not properly set. output.shrink_to_fit(); diff --git a/quickwit/quickwit-storage/src/object_storage/error.rs b/quickwit/quickwit-storage/src/object_storage/error.rs index 5f60fe1f944..8a7efc13332 100644 --- a/quickwit/quickwit-storage/src/object_storage/error.rs +++ b/quickwit/quickwit-storage/src/object_storage/error.rs @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -use aws_sdk_s3::error::{DisplayErrorContext, ProvideErrorMetadata, SdkError}; +use aws_sdk_s3::error::{DisplayErrorContext, SdkError}; use aws_sdk_s3::operation::abort_multipart_upload::AbortMultipartUploadError; use aws_sdk_s3::operation::complete_multipart_upload::CompleteMultipartUploadError; use aws_sdk_s3::operation::create_multipart_upload::CreateMultipartUploadError; @@ -62,11 +62,6 @@ pub trait ToStorageErrorKind { impl ToStorageErrorKind for GetObjectError { fn to_storage_error_kind(&self) -> StorageErrorKind { - let error_code = self.code().unwrap_or("unknown"); - crate::STORAGE_METRICS - .object_storage_get_errors_total - .with_label_values([error_code]) - .inc(); match self { GetObjectError::InvalidObjectState(_) => StorageErrorKind::Service, GetObjectError::NoSuchKey(_) => StorageErrorKind::NotFound, diff --git a/quickwit/quickwit-storage/src/object_storage/s3_compatible_storage.rs b/quickwit/quickwit-storage/src/object_storage/s3_compatible_storage.rs index e1cdfd16b70..7f68375b3fb 100644 --- a/quickwit/quickwit-storage/src/object_storage/s3_compatible_storage.rs +++ b/quickwit/quickwit-storage/src/object_storage/s3_compatible_storage.rs @@ -45,11 +45,12 @@ use tokio::sync::Semaphore; use tracing::{info, instrument, warn}; use crate::metrics::object_storage_get_slice_in_flight_guards; +use crate::metrics_wrappers::{ActionLabel, RequestMetricsWrapperExt, copy_with_download_metrics}; use crate::object_storage::MultiPartPolicy; use crate::storage::SendableAsync; use crate::{ - BulkDeleteError, DeleteFailure, OwnedBytes, STORAGE_METRICS, Storage, StorageError, - StorageErrorKind, StorageResolverError, StorageResult, + BulkDeleteError, DeleteFailure, OwnedBytes, Storage, StorageError, StorageErrorKind, + StorageResolverError, StorageResult, }; /// Semaphore to limit the number of concurrent requests to the object store. Some object stores @@ -288,11 +289,6 @@ impl S3CompatibleObjectStorage { .await .map_err(|io_error| Retry::Permanent(StorageError::from(io_error)))?; - crate::STORAGE_METRICS.object_storage_put_parts.inc(); - crate::STORAGE_METRICS - .object_storage_upload_num_bytes - .inc_by(len); - self.s3_client .put_object() .bucket(bucket) @@ -300,6 +296,7 @@ impl S3CompatibleObjectStorage { .body(body) .content_length(len as i64) .send() + .with_count_and_upload_metrics(ActionLabel::PutObject, len) .await .map_err(|sdk_error| { if sdk_error.is_retryable() { @@ -334,6 +331,7 @@ impl S3CompatibleObjectStorage { .bucket(self.bucket.clone()) .key(key) .send() + .with_count_metric(ActionLabel::CreateMultipartUpload) .await }) .await? @@ -423,11 +421,6 @@ impl S3CompatibleObjectStorage { .map_err(Retry::Permanent)?; let md5 = BASE64_STANDARD.encode(part.md5.0); - crate::STORAGE_METRICS.object_storage_put_parts.inc(); - crate::STORAGE_METRICS - .object_storage_upload_num_bytes - .inc_by(part.len()); - let upload_part_output = self .s3_client .upload_part() @@ -439,6 +432,7 @@ impl S3CompatibleObjectStorage { .part_number(part.part_number as i32) .upload_id(upload_id.0) .send() + .with_count_and_upload_metrics(ActionLabel::UploadPart, part.len()) .await .map_err(|s3_err| { if s3_err.is_retryable() { @@ -518,6 +512,7 @@ impl S3CompatibleObjectStorage { .multipart_upload(completed_upload.clone()) .upload_id(upload_id) .send() + .with_count_metric(ActionLabel::CompleteMultipartUpload) .await }) .await?; @@ -532,6 +527,7 @@ impl S3CompatibleObjectStorage { .key(key) .upload_id(upload_id) .send() + .with_count_metric(ActionLabel::AbortMultipartUpload) .await }) .await?; @@ -546,8 +542,6 @@ impl S3CompatibleObjectStorage { let key = self.key(path); let range_str = range_opt.map(|range| format!("bytes={}-{}", range.start, range.end - 1)); - crate::STORAGE_METRICS.object_storage_get_total.inc(); - let get_object_output = self .s3_client .get_object() @@ -555,6 +549,7 @@ impl S3CompatibleObjectStorage { .key(key) .set_range(range_str) .send() + .with_count_and_duration_metrics(ActionLabel::GetObject) .await?; Ok(get_object_output) } @@ -642,17 +637,12 @@ impl S3CompatibleObjectStorage { for (path_chunk, delete) in &mut delete_requests_it { let delete_objects_res: StorageResult = aws_retry(&self.retry_params, || async { - crate::STORAGE_METRICS - .object_storage_bulk_delete_requests_total - .inc(); - let _timer = crate::STORAGE_METRICS - .object_storage_bulk_delete_request_duration - .start_timer(); self.s3_client .delete_objects() .bucket(self.bucket.clone()) .delete(delete.clone()) .send() + .with_count_and_duration_metrics(ActionLabel::DeleteObjects) .await }) .await @@ -718,10 +708,7 @@ impl S3CompatibleObjectStorage { async fn download_all(byte_stream: ByteStream, output: &mut Vec) -> io::Result<()> { output.clear(); let mut body_stream_reader = BufReader::new(byte_stream.into_async_read()); - let num_bytes_copied = tokio::io::copy_buf(&mut body_stream_reader, output).await?; - STORAGE_METRICS - .object_storage_download_num_bytes - .inc_by(num_bytes_copied); + copy_with_download_metrics(&mut body_stream_reader, output).await?; // When calling `get_all`, the Vec capacity is not properly set. output.shrink_to_fit(); Ok(()) @@ -737,6 +724,7 @@ impl Storage for S3CompatibleObjectStorage { .bucket(self.bucket.clone()) .max_keys(1) .send() + .with_count_metric(ActionLabel::ListObjects) .await?; Ok(()) } @@ -746,7 +734,6 @@ impl Storage for S3CompatibleObjectStorage { path: &Path, payload: Box, ) -> crate::StorageResult<()> { - crate::STORAGE_METRICS.object_storage_put_total.inc(); let _permit = REQUEST_SEMAPHORE.acquire().await; let key = self.key(path); let total_len = payload.len(); @@ -765,10 +752,7 @@ impl Storage for S3CompatibleObjectStorage { let get_object_output = aws_retry(&self.retry_params, || self.get_object(path, None)).await?; let mut body_read = BufReader::new(get_object_output.body.into_async_read()); - let num_bytes_copied = tokio::io::copy_buf(&mut body_read, output).await?; - STORAGE_METRICS - .object_storage_download_num_bytes - .inc_by(num_bytes_copied); + copy_with_download_metrics(&mut body_read, output).await?; output.flush().await?; Ok(()) } @@ -778,17 +762,12 @@ impl Storage for S3CompatibleObjectStorage { let bucket = self.bucket.clone(); let key = self.key(path); let delete_res = aws_retry(&self.retry_params, || async { - crate::STORAGE_METRICS - .object_storage_delete_requests_total - .inc(); - let _timer = crate::STORAGE_METRICS - .object_storage_delete_request_duration - .start_timer(); self.s3_client .delete_object() .bucket(&bucket) .key(&key) .send() + .with_count_and_duration_metrics(ActionLabel::DeleteObject) .await }) .await; @@ -869,6 +848,7 @@ impl Storage for S3CompatibleObjectStorage { .bucket(&bucket) .key(&key) .send() + .with_count_metric(ActionLabel::HeadObject) .await }) .await?; diff --git a/quickwit/quickwit-storage/src/opendal_storage/base.rs b/quickwit/quickwit-storage/src/opendal_storage/base.rs index 0466a42d1d6..e84ba4afcc0 100644 --- a/quickwit/quickwit-storage/src/opendal_storage/base.rs +++ b/quickwit/quickwit-storage/src/opendal_storage/base.rs @@ -24,6 +24,7 @@ use tokio::io::{AsyncRead, AsyncWriteExt as TokioAsyncWriteExt}; use tokio_util::compat::{FuturesAsyncReadCompatExt, FuturesAsyncWriteCompatExt}; use crate::metrics::object_storage_get_slice_in_flight_guards; +use crate::metrics_wrappers::{ActionLabel, opendal_helpers}; use crate::storage::SendableAsync; use crate::{ BulkDeleteError, MultiPartPolicy, OwnedBytes, PutPayload, Storage, StorageError, @@ -79,7 +80,8 @@ impl Storage for OpendalStorage { } async fn put(&self, path: &Path, payload: Box) -> StorageResult<()> { - crate::STORAGE_METRICS.object_storage_put_total.inc(); + // we record one put even though it may involve multiple parts + opendal_helpers::record_request(ActionLabel::PutObject); let path = path.as_os_str().to_string_lossy(); let mut payload_reader = payload.byte_stream().await?.into_async_read(); @@ -92,9 +94,7 @@ impl Storage for OpendalStorage { .compat_write(); tokio::io::copy(&mut payload_reader, &mut storage_writer).await?; storage_writer.get_mut().close().await?; - crate::STORAGE_METRICS - .object_storage_upload_num_bytes - .inc_by(payload.len()); + opendal_helpers::record_upload(payload.len()); Ok(()) } @@ -108,9 +108,7 @@ impl Storage for OpendalStorage { .await? .compat(); let num_bytes_copied = tokio::io::copy(&mut storage_reader, output).await?; - crate::STORAGE_METRICS - .object_storage_download_num_bytes - .inc_by(num_bytes_copied); + opendal_helpers::record_download(num_bytes_copied); output.flush().await?; Ok(()) } @@ -122,7 +120,7 @@ impl Storage for OpendalStorage { // Unlike other object store implementations, in flight requests are // recorded before issuing the query to the object store. let _inflight_guards = object_storage_get_slice_in_flight_guards(size); - crate::STORAGE_METRICS.object_storage_get_total.inc(); + opendal_helpers::record_request(ActionLabel::GetObject); let storage_content = self.op.read_with(&path).range(range).await?.to_vec(); Ok(OwnedBytes::new(storage_content)) } @@ -152,12 +150,7 @@ impl Storage for OpendalStorage { async fn delete(&self, path: &Path) -> StorageResult<()> { let path = path.as_os_str().to_string_lossy(); - crate::STORAGE_METRICS - .object_storage_delete_requests_total - .inc(); - let _timer = crate::STORAGE_METRICS - .object_storage_delete_request_duration - .start_timer(); + let _timer = opendal_helpers::record_request_with_timer(ActionLabel::DeleteObject); self.op.delete(&path).await?; Ok(()) } @@ -173,12 +166,8 @@ impl Storage for OpendalStorage { { let mut bulk_error = BulkDeleteError::default(); for (index, path) in paths.iter().enumerate() { - crate::STORAGE_METRICS - .object_storage_bulk_delete_requests_total - .inc(); - let _timer = crate::STORAGE_METRICS - .object_storage_bulk_delete_request_duration - .start_timer(); + let _timer = + opendal_helpers::record_request_with_timer(ActionLabel::DeleteObjects); let result = self.op.delete(&path.as_os_str().to_string_lossy()).await; if let Err(err) = result { let storage_error_kind = err.kind(); diff --git a/quickwit/quickwit-ui/.prettierignore b/quickwit/quickwit-ui/.prettierignore deleted file mode 100644 index 378eac25d31..00000000000 --- a/quickwit/quickwit-ui/.prettierignore +++ /dev/null @@ -1 +0,0 @@ -build diff --git a/quickwit/quickwit-ui/.prettierrc.json b/quickwit/quickwit-ui/.prettierrc.json deleted file mode 100644 index 0967ef424bc..00000000000 --- a/quickwit/quickwit-ui/.prettierrc.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/quickwit/quickwit-ui/biome.json b/quickwit/quickwit-ui/biome.json new file mode 100644 index 00000000000..7ecccbb98de --- /dev/null +++ b/quickwit/quickwit-ui/biome.json @@ -0,0 +1,9 @@ +{ + "$schema": "./node_modules/@biomejs/biome/configuration_schema.json", + "formatter": { + "enabled": true, + "indentStyle": "space", + "includes": ["**", "!**/build"] + }, + "linter": { "enabled": false } +} diff --git a/quickwit/quickwit-ui/config-overrides.js b/quickwit/quickwit-ui/config-overrides.js index 0c838793447..bcb725a4165 100644 --- a/quickwit/quickwit-ui/config-overrides.js +++ b/quickwit/quickwit-ui/config-overrides.js @@ -12,12 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -const MonacoWebpackPlugin = require('monaco-editor-webpack-plugin'); +const MonacoWebpackPlugin = require("monaco-editor-webpack-plugin"); module.exports = function override(config, env) { - config.plugins.push(new MonacoWebpackPlugin({ - languages: ['json'] - })); + config.plugins.push( + new MonacoWebpackPlugin({ + languages: ["json"], + }), + ); return config; -} - +}; diff --git a/quickwit/quickwit-ui/cypress/e2e/homepage.spec.cy.js b/quickwit/quickwit-ui/cypress/e2e/homepage.spec.cy.js index cc31c6fef27..f61352f94bd 100644 --- a/quickwit/quickwit-ui/cypress/e2e/homepage.spec.cy.js +++ b/quickwit/quickwit-ui/cypress/e2e/homepage.spec.cy.js @@ -12,25 +12,23 @@ // See the License for the specific language governing permissions and // limitations under the License. -describe('Home navigation', () => { - it('Should display sidebar links', () => { - cy.visit('http://127.0.0.1:7280/ui'); - cy.get('a') - .should('be.visible') - .should('contain.text', 'Query editor') - .should('contain.text', 'Indexes') - .should('contain.text', 'Cluster'); +describe("Home navigation", () => { + it("Should display sidebar links", () => { + cy.visit("http://127.0.0.1:7280/ui"); + cy.get("a") + .should("be.visible") + .should("contain.text", "Query editor") + .should("contain.text", "Indexes") + .should("contain.text", "Cluster"); }); - it('Should navigate to cluster state', () => { - cy.visit('http://127.0.0.1:7280/ui'); - cy.get('a').contains('Cluster').click(); - cy.get('p').should('contain.text', 'Cluster'); - cy.get('span').should('contain.text', 'cluster_id'); + it("Should navigate to cluster state", () => { + cy.visit("http://127.0.0.1:7280/ui"); + cy.get("a").contains("Cluster").click(); + cy.get("p").should("contain.text", "Cluster"); + cy.get("span").should("contain.text", "cluster_id"); }); - it('Should display otel logs index page', () => { - cy.visit('http://127.0.0.1:7280/ui/indexes/otel-logs-v0_7'); - cy.get('a') - .should('be.visible') - .should('contain.text', 'Indexes') + it("Should display otel logs index page", () => { + cy.visit("http://127.0.0.1:7280/ui/indexes/otel-logs-v0_7"); + cy.get("a").should("be.visible").should("contain.text", "Indexes"); }); -}) +}); diff --git a/quickwit/quickwit-ui/cypress/plugins/index.js b/quickwit/quickwit-ui/cypress/plugins/index.js index 59b2bab6e4e..8229063adc1 100644 --- a/quickwit/quickwit-ui/cypress/plugins/index.js +++ b/quickwit/quickwit-ui/cypress/plugins/index.js @@ -19,4 +19,4 @@ module.exports = (on, config) => { // `on` is used to hook into various events Cypress emits // `config` is the resolved Cypress config -} +}; diff --git a/quickwit/quickwit-ui/cypress/support/e2e.js b/quickwit/quickwit-ui/cypress/support/e2e.js index 0e7290a13d9..3a252243880 100644 --- a/quickwit/quickwit-ui/cypress/support/e2e.js +++ b/quickwit/quickwit-ui/cypress/support/e2e.js @@ -14,7 +14,7 @@ // *********************************************************** // Import commands.js using ES2015 syntax: -import './commands' +import "./commands"; // Alternatively you can use CommonJS syntax: -// require('./commands') \ No newline at end of file +// require('./commands') diff --git a/quickwit/quickwit-ui/cypress/support/index.js b/quickwit/quickwit-ui/cypress/support/index.js index 239169708c4..7c3a7f398dc 100644 --- a/quickwit/quickwit-ui/cypress/support/index.js +++ b/quickwit/quickwit-ui/cypress/support/index.js @@ -14,17 +14,17 @@ // *********************************************************** // Import commands.js using ES2015 syntax: -import './commands' +import "./commands"; // Alternatively you can use CommonJS syntax: // require('./commands') -Cypress.on('uncaught:exception', (err, runnable) => { +Cypress.on("uncaught:exception", (err, runnable) => { // we expect a 3rd party library error with message 'list not defined' // and don't want to fail the test so we return false - if (err.message.includes('monaco-editor')) { - return false + if (err.message.includes("monaco-editor")) { + return false; } // we still want to ensure there are no other unexpected // errors, so we let them fail the test -}) \ No newline at end of file +}); diff --git a/quickwit/quickwit-ui/mocks/monacoMock.js b/quickwit/quickwit-ui/mocks/monacoMock.js index 728c7c1c205..b1915e441b1 100644 --- a/quickwit/quickwit-ui/mocks/monacoMock.js +++ b/quickwit/quickwit-ui/mocks/monacoMock.js @@ -12,11 +12,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -import * as React from 'react'; +import * as React from "react"; // Mock MonocoEditor as the current jest setup does not work when Monaco JS files // are loaded. export default function MonacoEditor(props) { return
{props.value}
; } - diff --git a/quickwit/quickwit-ui/mocks/swaggerUIMock.js b/quickwit/quickwit-ui/mocks/swaggerUIMock.js index 77db2a196a2..50d9495839c 100644 --- a/quickwit/quickwit-ui/mocks/swaggerUIMock.js +++ b/quickwit/quickwit-ui/mocks/swaggerUIMock.js @@ -12,11 +12,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -import * as React from 'react'; +import * as React from "react"; // Mock SwaggerUI as the current jest setup does not work when Monaco JS files // are loaded. export default function SwaggerUI(props) { return
{props.url}
; } - diff --git a/quickwit/quickwit-ui/mocks/x-charts.js b/quickwit/quickwit-ui/mocks/x-charts.js index 4a777a1ce84..4ee4fca0e43 100644 --- a/quickwit/quickwit-ui/mocks/x-charts.js +++ b/quickwit/quickwit-ui/mocks/x-charts.js @@ -1 +1 @@ -export const LineChart = ({ children }) => children; +export const LineChart = ({ children }) => children; diff --git a/quickwit/quickwit-ui/package.json b/quickwit/quickwit-ui/package.json index ce7fa384b0c..236dd281116 100644 --- a/quickwit/quickwit-ui/package.json +++ b/quickwit/quickwit-ui/package.json @@ -6,6 +6,7 @@ "proxy": "http://127.0.0.1:7280", "homepage": "/ui/", "dependencies": { + "@biomejs/biome": "2.3.5", "@emotion/react": "11.11.1", "@emotion/styled": "11.11.0", "@mui/icons-material": "5.11.0", @@ -24,7 +25,6 @@ "dayjs": "1.11.7", "monaco-editor": "0.34.1", "monaco-editor-webpack-plugin": "7.1.0", - "prettier": "2.8.1", "react": "17.0.2", "react-app-rewired": "2.2.1", "react-dom": "17.0.2", @@ -49,6 +49,8 @@ "eject": "react-app-rewired eject", "postbuild": "cp .gitignore_for_build_directory build/.gitignore", "lint": "eslint . --ext .ts", + "check-formatting": "biome check", + "format": "biome check --write", "e2e-test": "cypress run" }, "eslintConfig": { diff --git a/quickwit/quickwit-ui/src/components/ApiUrlFooter.tsx b/quickwit/quickwit-ui/src/components/ApiUrlFooter.tsx index eaa979bef05..52687ac42c7 100644 --- a/quickwit/quickwit-ui/src/components/ApiUrlFooter.tsx +++ b/quickwit/quickwit-ui/src/components/ApiUrlFooter.tsx @@ -12,9 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { Box, styled, Typography, Button } from '@mui/material'; -import ContentCopyIcon from '@mui/icons-material/ContentCopy'; -import { QUICKWIT_LIGHT_GREY } from '../utils/theme'; +import ContentCopyIcon from "@mui/icons-material/ContentCopy"; +import { Box, Button, styled, Typography } from "@mui/material"; +import { QUICKWIT_LIGHT_GREY } from "../utils/theme"; const Footer = styled(Box)` display: flex; @@ -25,30 +25,43 @@ bottom: 0px; font-size: 0.90em; background-color: ${QUICKWIT_LIGHT_GREY}; opacity: 0.7; -` +`; export default function ApiUrlFooter(url: string) { const urlMaxLength = 80; - const origin = process.env.NODE_ENV === 'development' ? 'http://localhost:7280' : window.location.origin; + const origin = + process.env.NODE_ENV === "development" + ? "http://localhost:7280" + : window.location.origin; const completeUrl = `${origin}/${url}`; const isTooLong = completeUrl.length > urlMaxLength; // TODO show generated aggregation - return
- - API URL: - - -
+ return ( +
+ + API URL: + + +
+ ); } diff --git a/quickwit/quickwit-ui/src/components/IndexSideBar.tsx b/quickwit/quickwit-ui/src/components/IndexSideBar.tsx index 69cbb294c97..d50de353805 100644 --- a/quickwit/quickwit-ui/src/components/IndexSideBar.tsx +++ b/quickwit/quickwit-ui/src/components/IndexSideBar.tsx @@ -12,22 +12,33 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { Autocomplete, Box, Chip, CircularProgress, IconButton, List, ListItem, ListItemText, TextField, Typography } from '@mui/material'; -import React, { useEffect, useMemo, useState } from 'react'; -import styled from '@emotion/styled'; -import { FieldMapping, getAllFields, IndexMetadata } from '../utils/models'; -import { ChevronRight, KeyboardArrowDown } from '@mui/icons-material'; -import Tooltip from '@mui/material/Tooltip'; -import { Client } from '../services/client'; +import styled from "@emotion/styled"; +import { ChevronRight, KeyboardArrowDown } from "@mui/icons-material"; +import { + Autocomplete, + Box, + Chip, + CircularProgress, + IconButton, + List, + ListItem, + ListItemText, + TextField, + Typography, +} from "@mui/material"; +import Tooltip from "@mui/material/Tooltip"; +import React, { useEffect, useMemo, useState } from "react"; +import { Client } from "../services/client"; +import { FieldMapping, getAllFields, IndexMetadata } from "../utils/models"; -const IndexBarWrapper = styled('div')({ - display: 'flex', - height: '100%', - flex: '0 0 260px', - maxWidth: '260px', - flexDirection: 'column', - borderRight: '1px solid rgba(0, 0, 0, 0.12)', - overflow: 'auto', +const IndexBarWrapper = styled("div")({ + display: "flex", + height: "100%", + flex: "0 0 260px", + maxWidth: "260px", + flexDirection: "column", + borderRight: "1px solid rgba(0, 0, 0, 0.12)", + overflow: "auto", }); function IndexAutocomplete(props: IndexMetadataProps) { @@ -53,7 +64,7 @@ function IndexAutocomplete(props: IndexMetadataProps) { (error) => { console.log("Index autocomplete error", error); setLoading(false); - } + }, ); }, [quickwitClient, open]); @@ -66,7 +77,7 @@ function IndexAutocomplete(props: IndexMetadataProps) { }, [open, props.indexMetadata, options.length]); useEffect(() => { - setValue(props.indexMetadata); + setValue(props.indexMetadata); }, [props.indexMetadata]); return ( @@ -78,7 +89,10 @@ function IndexAutocomplete(props: IndexMetadataProps) { onChange={(_, updatedValue) => { setValue(updatedValue); - if (updatedValue == null || updatedValue.index_config.index_id == null) { + if ( + updatedValue == null || + updatedValue.index_config.index_id == null + ) { props.onIndexMetadataUpdate(null); } else { props.onIndexMetadataUpdate(updatedValue); @@ -91,7 +105,9 @@ function IndexAutocomplete(props: IndexMetadataProps) { setOpen(false); setLoading(false); }} - isOptionEqualToValue={(option, value) => option.index_config.index_id === value.index_config.index_id} + isOptionEqualToValue={(option, value) => + option.index_config.index_id === value.index_config.index_id + } getOptionLabel={(option) => option.index_config.index_id} options={options} noOptionsText="No indexes." @@ -99,12 +115,14 @@ function IndexAutocomplete(props: IndexMetadataProps) { renderInput={(params) => ( - {showLoading ? : null} + {showLoading ? ( + + ) : null} {params.InputProps.endAdornment} ), @@ -116,14 +134,13 @@ function IndexAutocomplete(props: IndexMetadataProps) { } export interface IndexMetadataProps { - indexMetadata: null | IndexMetadata, + indexMetadata: null | IndexMetadata; onIndexMetadataUpdate(indexMetadata: IndexMetadata | null): void; } function fieldTypeLabel(fieldMapping: FieldMapping): string { if (fieldMapping.type[0] !== undefined) { return fieldMapping.type[0].toUpperCase(); - } else { return ""; } @@ -131,41 +148,64 @@ function fieldTypeLabel(fieldMapping: FieldMapping): string { export function IndexSideBar(props: IndexMetadataProps) { const [open, setOpen] = useState(true); - const fields = (props.indexMetadata === null) ? [] : getAllFields(props.indexMetadata.index_config.doc_mapping.field_mappings); + const fields = + props.indexMetadata === null + ? [] + : getAllFields( + props.indexMetadata.index_config.doc_mapping.field_mappings, + ); return ( - - + + Index ID - + - + setOpen(!open)} - > - {open ? : } + aria-label="expand row" + size="small" + onClick={() => setOpen(!open)} + > + {open ? : } Fields - { open && - { fields.map(function(field) { - return - } - sx={{paddingLeft: '10px'}} - > - - - - - - })} - - } + {open && ( + + {fields.map(function (field) { + return ( + + } + sx={{ paddingLeft: "10px" }} + > + + + + + + ); + })} + + )} ); diff --git a/quickwit/quickwit-ui/src/components/IndexSummary.tsx b/quickwit/quickwit-ui/src/components/IndexSummary.tsx index 61657b712e8..6ee578e8243 100644 --- a/quickwit/quickwit-ui/src/components/IndexSummary.tsx +++ b/quickwit/quickwit-ui/src/components/IndexSummary.tsx @@ -15,26 +15,27 @@ import styled from "@emotion/styled"; import { Paper } from "@mui/material"; import dayjs from "dayjs"; -import utc from "dayjs/plugin/utc" +import utc from "dayjs/plugin/utc"; import { FC, ReactNode } from "react"; import NumberFormat from "react-number-format"; import { Index } from "../utils/models"; + dayjs.extend(utc); const ItemContainer = styled.div` padding: 10px; display: flex; flex-direction: column; -` +`; const Row = styled.div` padding: 5px; display: flex; flex-direction: row; &:nth-of-type(odd){ background: rgba(0,0,0,0.05) } -` +`; const RowKey = styled.div` width: 350px; -` +`; const IndexRow: FC<{ title: string; children: ReactNode }> = ({ title, children, @@ -47,24 +48,30 @@ const IndexRow: FC<{ title: string; children: ReactNode }> = ({ export function IndexSummary({ index }: { index: Index }) { const all_splits = index.splits; - const published_splits = all_splits.filter(split => split.split_state == "Published"); - const num_of_staged_splits = all_splits.filter(split => split.split_state == "Staged").length; - const num_of_marked_for_delete_splits = all_splits.filter(split => split.split_state == "MarkedForDeletion").length; + const published_splits = all_splits.filter( + (split) => split.split_state == "Published", + ); + const num_of_staged_splits = all_splits.filter( + (split) => split.split_state == "Staged", + ).length; + const num_of_marked_for_delete_splits = all_splits.filter( + (split) => split.split_state == "MarkedForDeletion", + ).length; const total_num_docs = published_splits - .map(split => split.num_docs) + .map((split) => split.num_docs) .reduce((sum, current) => sum + current, 0); const total_num_bytes = published_splits - .map(split => { - return split.footer_offsets.end + .map((split) => { + return split.footer_offsets.end; }) .reduce((sum, current) => sum + current, 0); const total_uncompressed_num_bytes = published_splits - .map(split => { - return split.uncompressed_docs_size_in_bytes + .map((split) => { + return split.uncompressed_docs_size_in_bytes; }) .reduce((sum, current) => sum + current, 0); return ( - + {dayjs @@ -72,7 +79,9 @@ export function IndexSummary({ index }: { index: Index }) { .utc() .format("YYYY/MM/DD HH:mm")} - {index.metadata.index_config.index_uri} + + {index.metadata.index_config.index_uri} + - {published_splits.length} + + {published_splits.length} + - {num_of_staged_splits} - {num_of_marked_for_delete_splits} + + {num_of_staged_splits} + + + {num_of_marked_for_delete_splits} + - ) + ); } diff --git a/quickwit/quickwit-ui/src/components/IndexesTable.tsx b/quickwit/quickwit-ui/src/components/IndexesTable.tsx index e8618c7f1ef..ae8fedf8f14 100644 --- a/quickwit/quickwit-ui/src/components/IndexesTable.tsx +++ b/quickwit/quickwit-ui/src/components/IndexesTable.tsx @@ -12,19 +12,29 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { Paper, Table, TableBody, TableCell, TableContainer, TableHead, TableRow } from "@mui/material"; -import dayjs from 'dayjs'; -import utc from "dayjs/plugin/utc" -import { IndexMetadata } from "../utils/models"; +import { + Paper, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, +} from "@mui/material"; +import dayjs from "dayjs"; +import utc from "dayjs/plugin/utc"; import { useNavigate } from "react-router-dom"; -dayjs.extend(utc); +import { IndexMetadata } from "../utils/models"; +dayjs.extend(utc); -const IndexesTable = ({ indexesMetadata }: Readonly<{indexesMetadata: IndexMetadata[]}>) => { +const IndexesTable = ({ + indexesMetadata, +}: Readonly<{ indexesMetadata: IndexMetadata[] }>) => { const navigate = useNavigate(); - const handleClick = function(indexId: string) { + const handleClick = function (indexId: string) { navigate(`/indexes/${indexId}`); - } + }; return ( @@ -41,16 +51,28 @@ const IndexesTable = ({ indexesMetadata }: Readonly<{indexesMetadata: IndexMetad {indexesMetadata.map((indexMetadata) => ( handleClick(indexMetadata.index_config.index_id)} > {indexMetadata.index_config.index_id} - {indexMetadata.index_config.index_uri} - { dayjs.unix(indexMetadata.create_timestamp).utc().format("YYYY/MM/DD HH:mm") } - { indexMetadata.sources?.length || 'None'} + + {indexMetadata.index_config.index_uri} + + + {dayjs + .unix(indexMetadata.create_timestamp) + .utc() + .format("YYYY/MM/DD HH:mm")} + + + {indexMetadata.sources?.length || "None"} + ))} diff --git a/quickwit/quickwit-ui/src/components/JsonEditor.tsx b/quickwit/quickwit-ui/src/components/JsonEditor.tsx index a5ba510f5ee..fe41366ced9 100644 --- a/quickwit/quickwit-ui/src/components/JsonEditor.tsx +++ b/quickwit/quickwit-ui/src/components/JsonEditor.tsx @@ -12,47 +12,55 @@ // See the License for the specific language governing permissions and // limitations under the License. -import MonacoEditor from 'react-monaco-editor'; import { useCallback } from "react"; +import MonacoEditor from "react-monaco-editor"; import { EDITOR_THEME } from "../utils/theme"; -export function JsonEditor({content, resizeOnMount}: {content: unknown, resizeOnMount: boolean}) { +export function JsonEditor({ + content, + resizeOnMount, +}: { + content: unknown; + resizeOnMount: boolean; +}) { // Setting editor height based on lines height and count to stretch and fit its content. - const onMount = useCallback((editor) => { - if (!resizeOnMount) { - return; - } - const editorElement = editor.getDomNode(); + const onMount = useCallback( + (editor) => { + if (!resizeOnMount) { + return; + } + const editorElement = editor.getDomNode(); - if (!editorElement) { - return; - } + if (!editorElement) { + return; + } - // Weirdly enough, we have to wait a few ms to get the right height - // from `editor.getContentHeight()`. If not, we sometimes end up with - // a height > 7000px... and I don't know why. - setTimeout(() => { - const height = Math.min(800, editor.getContentHeight()); - editorElement.style.height = `${height}px`; - editor.layout(); - }, 10); - - }, [resizeOnMount]); + // Weirdly enough, we have to wait a few ms to get the right height + // from `editor.getContentHeight()`. If not, we sometimes end up with + // a height > 7000px... and I don't know why. + setTimeout(() => { + const height = Math.min(800, editor.getContentHeight()); + editorElement.style.height = `${height}px`; + editor.layout(); + }, 10); + }, + [resizeOnMount], + ); /* eslint-disable @typescript-eslint/no-explicit-any */ function beforeMount(monaco: any) { - monaco.editor.defineTheme('quickwit-light', EDITOR_THEME); + monaco.editor.defineTheme("quickwit-light", EDITOR_THEME); } return ( - ) + ); } diff --git a/quickwit/quickwit-ui/src/components/LayoutUtils.tsx b/quickwit/quickwit-ui/src/components/LayoutUtils.tsx index c1ccddb70aa..b193c760cbf 100644 --- a/quickwit/quickwit-ui/src/components/LayoutUtils.tsx +++ b/quickwit/quickwit-ui/src/components/LayoutUtils.tsx @@ -14,7 +14,7 @@ import { Box, Breadcrumbs, styled } from "@mui/material"; -export const APP_BAR_HEIGHT_PX = '48px'; +export const APP_BAR_HEIGHT_PX = "48px"; export const ViewUnderAppBarBox = styled(Box)` display: flex; flex-direction: column; @@ -31,4 +31,4 @@ padding: 16px 24px; `; export const QBreadcrumbs = styled(Breadcrumbs)` padding-bottom: 8px; -` +`; diff --git a/quickwit/quickwit-ui/src/components/Loader.tsx b/quickwit/quickwit-ui/src/components/Loader.tsx index 6a06ed0de6c..fbdefc41efe 100644 --- a/quickwit/quickwit-ui/src/components/Loader.tsx +++ b/quickwit/quickwit-ui/src/components/Loader.tsx @@ -12,8 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { Box, styled, keyframes } from '@mui/material'; -import { ReactComponent as Logo } from '../assets/img/logo.svg'; +import { Box, keyframes, styled } from "@mui/material"; +import { ReactComponent as Logo } from "../assets/img/logo.svg"; const spin = keyframes` from { @@ -22,22 +22,24 @@ from { to { transform: rotate(360deg); } -` +`; const SpinningLogo = styled(Logo)` height: 10vmin; pointer-events: none; fill: #CBD1DD; animation: ${spin} infinite 5s linear; -` +`; export default function Loader() { - return - - + + + ); } diff --git a/quickwit/quickwit-ui/src/components/QueryActionBar.tsx b/quickwit/quickwit-ui/src/components/QueryActionBar.tsx index 791525526b5..ca06edb8dd2 100644 --- a/quickwit/quickwit-ui/src/components/QueryActionBar.tsx +++ b/quickwit/quickwit-ui/src/components/QueryActionBar.tsx @@ -12,57 +12,63 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { Box, Button, Tabs, Tab } from "@mui/material"; -import { TimeRangeSelect } from './TimeRangeSelect'; -import PlayArrowIcon from '@mui/icons-material/PlayArrow'; +import PlayArrowIcon from "@mui/icons-material/PlayArrow"; +import { Box, Button, Tab, Tabs } from "@mui/material"; import { SearchComponentProps } from "../utils/SearchComponentProps"; +import { TimeRangeSelect } from "./TimeRangeSelect"; export function QueryEditorActionBar(props: SearchComponentProps) { - const timestamp_field_name = props.index?.metadata.index_config.doc_mapping.timestamp_field; + const timestamp_field_name = + props.index?.metadata.index_config.doc_mapping.timestamp_field; const shouldDisplayTimeRangeSelect = timestamp_field_name ?? false; const handleChange = (_event: React.SyntheticEvent, newTab: number) => { - const updatedSearchRequest = {...props.searchRequest, aggregation: newTab != 0}; + const updatedSearchRequest = { + ...props.searchRequest, + aggregation: newTab != 0, + }; props.onSearchRequestUpdate(updatedSearchRequest); - props.runSearch(updatedSearchRequest) + props.runSearch(updatedSearchRequest); }; return ( - - + + - - - - + + + + - - - { shouldDisplayTimeRangeSelect && + {shouldDisplayTimeRangeSelect && ( + { + props.runSearch({ ...props.searchRequest, ...timeRange }); }} - onUpdate={ - (timeRange)=>{ - props.runSearch({...props.searchRequest, ...timeRange}); - } - } disabled={props.queryRunning || !props.searchRequest.indexId} - /> - } + /> + )} - ) + ); } diff --git a/quickwit/quickwit-ui/src/components/QueryEditor/AggregationEditor.tsx b/quickwit/quickwit-ui/src/components/QueryEditor/AggregationEditor.tsx index 47301efd919..5604e467808 100644 --- a/quickwit/quickwit-ui/src/components/QueryEditor/AggregationEditor.tsx +++ b/quickwit/quickwit-ui/src/components/QueryEditor/AggregationEditor.tsx @@ -12,14 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { useRef, useEffect, useState } from 'react'; -import { SearchComponentProps } from '../../utils/SearchComponentProps'; -import { TermAgg, HistogramAgg } from '../../utils/models'; -import { Box } from '@mui/material'; -import MenuItem from '@mui/material/MenuItem'; -import FormControl from '@mui/material/FormControl'; -import Select, { SelectChangeEvent } from '@mui/material/Select'; -import TextField from '@mui/material/TextField'; +import { Box } from "@mui/material"; +import FormControl from "@mui/material/FormControl"; +import MenuItem from "@mui/material/MenuItem"; +import Select, { SelectChangeEvent } from "@mui/material/Select"; +import TextField from "@mui/material/TextField"; +import { useEffect, useRef, useState } from "react"; +import { HistogramAgg, TermAgg } from "../../utils/models"; +import { SearchComponentProps } from "../../utils/SearchComponentProps"; export function AggregationEditor(props: SearchComponentProps) { return ( @@ -29,15 +29,17 @@ export function AggregationEditor(props: SearchComponentProps) { onSearchRequestUpdate={props.onSearchRequestUpdate} runSearch={props.runSearch} index={props.index} - queryRunning={props.queryRunning} /> + queryRunning={props.queryRunning} + /> + queryRunning={props.queryRunning} + /> - ) + ); } export function MetricKind(props: SearchComponentProps) { @@ -46,9 +48,16 @@ export function MetricKind(props: SearchComponentProps) { const handleTypeChange = (event: SelectChangeEvent) => { const value = event.target.value; - const updatedMetric = value != "count" ? {...metricRef.current!, type: value} : null; - const updatedAggregation = {...props.searchRequest.aggregationConfig, metric: updatedMetric}; - const updatedSearchRequest = {...props.searchRequest, aggregationConfig: updatedAggregation}; + const updatedMetric = + value != "count" ? { ...metricRef.current!, type: value } : null; + const updatedAggregation = { + ...props.searchRequest.aggregationConfig, + metric: updatedMetric, + }; + const updatedSearchRequest = { + ...props.searchRequest, + aggregationConfig: updatedAggregation, + }; props.onSearchRequestUpdate(updatedSearchRequest); metricRef.current = updatedMetric; }; @@ -58,20 +67,26 @@ export function MetricKind(props: SearchComponentProps) { if (metricRef.current == null) { return; } - const updatedMetric = {...metricRef.current!, field: value}; - const updatedAggregation = {...props.searchRequest.aggregationConfig, metric: updatedMetric}; - const updatedSearchRequest = {...props.searchRequest, aggregationConfig: updatedAggregation}; + const updatedMetric = { ...metricRef.current!, field: value }; + const updatedAggregation = { + ...props.searchRequest.aggregationConfig, + metric: updatedMetric, + }; + const updatedSearchRequest = { + ...props.searchRequest, + aggregationConfig: updatedAggregation, + }; props.onSearchRequestUpdate(updatedSearchRequest); metricRef.current = updatedMetric; }; return ( - + handleHistogramChange(pos, e)} - sx={{ "marginLeft": "10px", "minHeight": "44px" }} + sx={{ marginLeft: "10px", minHeight: "44px" }} > 10 seconds 1 minute @@ -252,13 +313,14 @@ export function AggregationKind(props: SearchComponentProps) { ); } if (isTerm(agg)) { - return (<> + return ( + <> handleTermFieldChange(pos, e)} - sx={{ "marginLeft": "10px" }} + sx={{ marginLeft: "10px" }} /> @@ -268,40 +330,44 @@ export function AggregationKind(props: SearchComponentProps) { type="number" onChange={(e) => handleTermCountChange(pos, e)} value={agg.term.size} - sx={{ "marginLeft": "10px" }} + sx={{ marginLeft: "10px" }} /> - ) + + ); } - return (null); - } + return null; + }; return ( <> - + {drawAdditional(0, aggregations)} - - + + {drawAdditional(1, aggregations)} - ) + ); } diff --git a/quickwit/quickwit-ui/src/components/QueryEditor/QueryEditor.tsx b/quickwit/quickwit-ui/src/components/QueryEditor/QueryEditor.tsx index 6f8209efa56..923e9cda60b 100644 --- a/quickwit/quickwit-ui/src/components/QueryEditor/QueryEditor.tsx +++ b/quickwit/quickwit-ui/src/components/QueryEditor/QueryEditor.tsx @@ -12,36 +12,43 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { useEffect, useRef, useState } from 'react'; -import MonacoEditor from 'react-monaco-editor'; -import * as monacoEditor from 'monaco-editor/esm/vs/editor/editor.api'; -import { LANGUAGE_CONFIG, LanguageFeatures, createIndexCompletionProvider } from './config'; -import { SearchComponentProps } from '../../utils/SearchComponentProps'; -import { EDITOR_THEME } from '../../utils/theme'; -import { Box } from '@mui/material'; +import { Box } from "@mui/material"; +import * as monacoEditor from "monaco-editor/esm/vs/editor/editor.api"; +import { useEffect, useRef, useState } from "react"; +import MonacoEditor from "react-monaco-editor"; +import { SearchComponentProps } from "../../utils/SearchComponentProps"; +import { EDITOR_THEME } from "../../utils/theme"; +import { + createIndexCompletionProvider, + LANGUAGE_CONFIG, + LanguageFeatures, +} from "./config"; -const QUICKWIT_EDITOR_THEME_ID = 'quickwit-light'; +const QUICKWIT_EDITOR_THEME_ID = "quickwit-light"; function getLanguageId(indexId: string | null): string { if (indexId === null) { - return ''; + return ""; } return `${indexId}-query-language`; } export function QueryEditor(props: SearchComponentProps) { const monacoRef = useRef(null); - const [languageId, setLanguageId] = useState(''); + const [languageId, setLanguageId] = useState(""); const runSearchRef = useRef(props.runSearch); const searchRequestRef = useRef(props.searchRequest); - const defaultValue = props.searchRequest.query === null ? `// Select an index and type your query. Example: field_name:"phrase query"` : props.searchRequest.query; + const defaultValue = + props.searchRequest.query === null + ? `// Select an index and type your query. Example: field_name:"phrase query"` + : props.searchRequest.query; let resize: () => void; /* eslint-disable @typescript-eslint/no-explicit-any */ function handleEditorDidMount(editor: any, monaco: any) { monacoRef.current = monaco; editor.addAction({ - id: 'SEARCH', + id: "SEARCH", label: "Run search", keybindings: [ monaco.KeyCode.F9, @@ -50,27 +57,44 @@ export function QueryEditor(props: SearchComponentProps) { run: () => { runSearchRef.current(searchRequestRef.current); }, - }) + }); resize = () => { - editor.layout({width: Math.max(window.innerWidth - (260+180+2*24), 200), height: 84}); - } - window.addEventListener('resize', resize); + editor.layout({ + width: Math.max(window.innerWidth - (260 + 180 + 2 * 24), 200), + height: 84, + }); + }; + window.addEventListener("resize", resize); } function handleEditorWillUnmount() { - window.removeEventListener('resize', resize); + window.removeEventListener("resize", resize); } useEffect(() => { const updatedLanguageId = getLanguageId(props.searchRequest.indexId); - if (monacoRef.current !== null && updatedLanguageId !== '' && props.index !== null) { + if ( + monacoRef.current !== null && + updatedLanguageId !== "" && + props.index !== null + ) { const monaco = monacoRef.current; - if (!monaco.languages.getLanguages().some(({ id }: {id :string }) => id === updatedLanguageId)) { - console.log('register language', updatedLanguageId); - monaco.languages.register({'id': updatedLanguageId}); - monaco.languages.setMonarchTokensProvider(updatedLanguageId, LanguageFeatures()) + if ( + !monaco.languages + .getLanguages() + .some(({ id }: { id: string }) => id === updatedLanguageId) + ) { + console.log("register language", updatedLanguageId); + monaco.languages.register({ id: updatedLanguageId }); + monaco.languages.setMonarchTokensProvider( + updatedLanguageId, + LanguageFeatures(), + ); if (props.index != null) { - monaco.languages.registerCompletionItemProvider(updatedLanguageId, createIndexCompletionProvider(props.index.metadata)); + monaco.languages.registerCompletionItemProvider( + updatedLanguageId, + createIndexCompletionProvider(props.index.metadata), + ); monaco.languages.setLanguageConfiguration( updatedLanguageId, LANGUAGE_CONFIG, @@ -88,7 +112,9 @@ export function QueryEditor(props: SearchComponentProps) { }, [monacoRef, props.runSearch]); function handleEditorChange(value: any) { - const updatedSearchRequest = Object.assign({}, props.searchRequest, {query: value}); + const updatedSearchRequest = Object.assign({}, props.searchRequest, { + query: value, + }); searchRequestRef.current = updatedSearchRequest; props.onSearchRequestUpdate(updatedSearchRequest); } @@ -98,7 +124,7 @@ export function QueryEditor(props: SearchComponentProps) { } return ( - + ); diff --git a/quickwit/quickwit-ui/src/components/QueryEditor/config.ts b/quickwit/quickwit-ui/src/components/QueryEditor/config.ts index 38353e69222..2ff4d105550 100644 --- a/quickwit/quickwit-ui/src/components/QueryEditor/config.ts +++ b/quickwit/quickwit-ui/src/components/QueryEditor/config.ts @@ -19,15 +19,15 @@ export enum CompletionItemKind { Operator = 11, } -const BRACES: [string, string] = ['{', '}']; -const BRACKETS: [string, string] = ['[', ']']; -const PARENTHESES: [string, string] = ['(', ')']; +const BRACES: [string, string] = ["{", "}"]; +const BRACKETS: [string, string] = ["[", "]"]; +const PARENTHESES: [string, string] = ["(", ")"]; export const LANGUAGE_CONFIG = { comments: { lineComment: "//", }, - brackets: [ BRACES, BRACKETS, PARENTHESES ], + brackets: [BRACES, BRACKETS, PARENTHESES], autoClosingPairs: [ { open: "{", close: "}" }, { open: "[", close: "]" }, @@ -50,123 +50,136 @@ export function LanguageFeatures(): any { return { defaultToken: "invalid", //wordDefinition: /(-?\d*\.\d\w*)|([^\`\~\!\#\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)/g, - operators: ['+', '-'], - brackets: [ - { open: "(", close: ")", token: "delimiter.parenthesis" }, - ], - keywords: [ - 'AND', 'OR', - ], - symbols: /[=>](?!@symbols)/, '@brackets'], - [/@symbols/, { cases: { '@operators': 'operator', - '@default' : '' } } ], + [/[{}()[]]/, "@brackets"], + [/[<>](?!@symbols)/, "@brackets"], + [/@symbols/, { cases: { "@operators": "operator", "@default": "" } }], // @ annotations. // As an example, we emit a debugging log message on these tokens. // Note: message are suppressed during the first load -- change some lines to see them. - [/@\s*[a-zA-Z_$][\w$]*/, { token: 'annotation', log: 'annotation token: $0' }], + [ + /@\s*[a-zA-Z_$][\w$]*/, + { token: "annotation", log: "annotation token: $0" }, + ], // numbers - [/\d*\.\d+([eE][-+]?\d+)?/, 'number.float'], - [/0[xX][0-9a-fA-F]+/, 'number.hex'], - [/\d+/, 'number'], + [/\d*\.\d+([eE][-+]?\d+)?/, "number.float"], + [/0[xX][0-9a-fA-F]+/, "number.hex"], + [/\d+/, "number"], // delimiter: after number because of .\d floats - [/[;,.]/, 'delimiter'], + [/[;,.]/, "delimiter"], // strings - [/"([^"\\]|\\.)*$/, 'string.invalid' ], // non-terminated string - [/"/, { token: 'string.quote', bracket: '@open', next: '@string' } ], + [/"([^"\\]|\\.)*$/, "string.invalid"], // non-terminated string + [/"/, { token: "string.quote", bracket: "@open", next: "@string" }], // characters - [/'[^\\']'/, 'string'], - [/(')(@escapes)(')/, ['string','string.escape','string']], - [/'/, 'string.invalid'] + [/'[^\\']'/, "string"], + [/(')(@escapes)(')/, ["string", "string.escape", "string"]], + [/'/, "string.invalid"], ], comment: [ - [/[^/*]+/, 'comment' ], - [/\/\*/, 'comment', '@push' ], // nested comment - ["\\*/", 'comment', '@pop' ], - [/[/*]/, 'comment' ] + [/[^/*]+/, "comment"], + [/\/\*/, "comment", "@push"], // nested comment + ["\\*/", "comment", "@pop"], + [/[/*]/, "comment"], ], string: [ - [/[^\\"]+/, 'string'], - [/@escapes/, 'string.escape'], - [/\\./, 'string.escape.invalid'], - [/"/, { token: 'string.quote', bracket: '@close', next: '@pop' } ] + [/[^\\"]+/, "string"], + [/@escapes/, "string.escape"], + [/\\./, "string.escape.invalid"], + [/"/, { token: "string.quote", bracket: "@close", next: "@pop" }], ], whitespace: [ - [/[ \t\r\n]+/, 'white'], - [/\/\*/, 'comment', '@comment' ], - [/\/\/.*$/, 'comment'], + [/[ \t\r\n]+/, "white"], + [/\/\*/, "comment", "@comment"], + [/\/\/.*$/, "comment"], ], }, }; } export const createIndexCompletionProvider = (indexMetadata: IndexMetadata) => { - const fields = getAllFields(indexMetadata.index_config.doc_mapping.field_mappings); + const fields = getAllFields( + indexMetadata.index_config.doc_mapping.field_mappings, + ); const completionProvider = { provideCompletionItems(model: any, position: any) { - const word = model.getWordUntilPosition(position) + const word = model.getWordUntilPosition(position); const range = { startLineNumber: position.lineNumber, endLineNumber: position.lineNumber, startColumn: word.startColumn, endColumn: word.endColumn, - } + }; // We want to auto complete all fields except timestamp that is handled with `TimeRangeSelect` component. const fieldSuggestions = fields - .filter(field => field.json_path !== indexMetadata.index_config.doc_mapping.timestamp_field) - .map(field => { + .filter( + (field) => + field.json_path !== + indexMetadata.index_config.doc_mapping.timestamp_field, + ) + .map((field) => { return { label: field.json_path, kind: CompletionItemKind.Field, - insertText: field.field_mapping.type == 'json' ? field.json_path + '.' : field.json_path + ':', + insertText: + field.field_mapping.type == "json" + ? field.json_path + "." + : field.json_path + ":", range: range, - } + }; }); return { suggestions: fieldSuggestions.concat([ { - label: 'OR', + label: "OR", kind: CompletionItemKind.Operator, - insertText: 'OR ', + insertText: "OR ", range: range, }, { - label: 'AND', + label: "AND", kind: CompletionItemKind.Operator, - insertText: 'AND ', + insertText: "AND ", range: range, - } + }, ]), - } + }; }, - } - - return completionProvider -} + }; + return completionProvider; +}; export const setErrorMarker = ( monaco: any, @@ -175,7 +188,7 @@ export const setErrorMarker = ( startColumnNumber: number, message: string, ) => { - const model = editor.getModel() + const model = editor.getModel(); if (model) { monaco.editor.setModelMarkers(model, "QuestDBLanguageName", [ @@ -187,6 +200,6 @@ export const setErrorMarker = ( startColumn: startColumnNumber, endColumn: startColumnNumber, }, - ]) + ]); } -} +}; diff --git a/quickwit/quickwit-ui/src/components/ResponseErrorDisplay.tsx b/quickwit/quickwit-ui/src/components/ResponseErrorDisplay.tsx index 183c929b3ac..b43103b6ac3 100644 --- a/quickwit/quickwit-ui/src/components/ResponseErrorDisplay.tsx +++ b/quickwit/quickwit-ui/src/components/ResponseErrorDisplay.tsx @@ -12,30 +12,47 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { Box } from '@mui/material'; -import { ResponseError } from '../utils/models'; -import SentimentVeryDissatisfiedIcon from '@mui/icons-material/SentimentVeryDissatisfied'; +import SentimentVeryDissatisfiedIcon from "@mui/icons-material/SentimentVeryDissatisfied"; +import { Box } from "@mui/material"; +import { ResponseError } from "../utils/models"; function renderMessage(error: ResponseError) { - if (error.message !== null && error.message.includes('No search node available.')) { - return - Your cluster does not contain any search node. You need at least one search node. + if ( + error.message !== null && + error.message.includes("No search node available.") + ) { + return ( + + Your cluster does not contain any search node. You need at least one + search node. + ); } else { - return <> - - {error.status && Status: {error.status}} - - - Error: {error.message} - - + return ( + <> + + {error.status && Status: {error.status}} + + + Error: {error.message} + + + ); } } export default function ErrorResponseDisplay(error: ResponseError) { - return - - {renderMessage(error)} - + return ( + + + {renderMessage(error)} + + ); } diff --git a/quickwit/quickwit-ui/src/components/SearchResult/AggregationResult.tsx b/quickwit/quickwit-ui/src/components/SearchResult/AggregationResult.tsx index 21138dd70e7..b4ef8cea66c 100644 --- a/quickwit/quickwit-ui/src/components/SearchResult/AggregationResult.tsx +++ b/quickwit/quickwit-ui/src/components/SearchResult/AggregationResult.tsx @@ -12,10 +12,16 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { SearchResponse, HistogramResult, TermResult, ParsedAggregationResult, extractAggregationResults } from "../../utils/models"; -import { LineChart } from '@mui/x-charts/LineChart'; -import { BarChart } from '@mui/x-charts/BarChart'; -import { CurveType } from '@mui/x-charts/models/seriesType/line'; +import { BarChart } from "@mui/x-charts/BarChart"; +import { LineChart } from "@mui/x-charts/LineChart"; +import { CurveType } from "@mui/x-charts/models/seriesType/line"; +import { + extractAggregationResults, + HistogramResult, + ParsedAggregationResult, + SearchResponse, + TermResult, +} from "../../utils/models"; function isHistogram(agg: ParsedAggregationResult): agg is HistogramResult { return agg != null && "timestamps" in agg; @@ -25,38 +31,42 @@ function isTerm(agg: ParsedAggregationResult): agg is TermResult { return Array.isArray(agg); } -export function AggregationResult({searchResponse}: {searchResponse: SearchResponse}) { +export function AggregationResult({ + searchResponse, +}: { + searchResponse: SearchResponse; +}) { const result = extractAggregationResults(searchResponse.aggregations); if (isHistogram(result)) { - const xAxis = [{ - data: result.timestamps, - valueFormatter: (date: number) => { - return new Date(date).toISOString() + const xAxis = [ + { + data: result.timestamps, + valueFormatter: (date: number) => { + return new Date(date).toISOString(); + }, }, - }]; + ]; const series = result.data.map((line) => { - const curve: CurveType = "monotoneX"; - return { - curve, - label: line.name, - data: line.value, - }; + const curve: CurveType = "monotoneX"; + return { + curve, + label: line.name, + data: line.value, + }; }); // we don't customize colors because we would need a full palette. + return ; + } else if (isTerm(result)) { return ( - entry.value), color: "#004BD9A5" }, + ]} + xAxis={[{ data: result.map((entry) => entry.term), scaleType: "band" }]} + margin={{ top: 10, bottom: 30, left: 40, right: 10 }} /> - ) - } else if (isTerm(result)) { - return ( entry.value), color: "#004BD9A5"}]} - xAxis={[{ data: result.map(entry => entry.term), scaleType: 'band' }]} - margin={{ top: 10, bottom: 30, left: 40, right: 10 }} - />) + ); } else { - return (

no result to display

); + return

no result to display

; } } diff --git a/quickwit/quickwit-ui/src/components/SearchResult/ResultTable.tsx b/quickwit/quickwit-ui/src/components/SearchResult/ResultTable.tsx index 7f79037b51c..d966c9ef2f9 100644 --- a/quickwit/quickwit-ui/src/components/SearchResult/ResultTable.tsx +++ b/quickwit/quickwit-ui/src/components/SearchResult/ResultTable.tsx @@ -12,8 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { Table, TableBody, TableContainer, Box, styled } from "@mui/material"; -import { Field as Field, getAllFields, Index, SearchResponse} from "../../utils/models"; +import { Box, styled, Table, TableBody, TableContainer } from "@mui/material"; +import { + Field as Field, + getAllFields, + Index, + SearchResponse, +} from "../../utils/models"; import { Row } from "./Row"; const TableBox = styled(Box)` @@ -22,22 +27,24 @@ flex-direction: column; overflow: auto; flex: 1 1 100%; height: 100%; -` +`; -export function ResultTable({searchResponse, index}: {searchResponse: SearchResponse, index: Index}) { +export function ResultTable({ + searchResponse, + index, +}: { + searchResponse: SearchResponse; + index: Index; +}) { const timestampField = getTimestampField(index); return ( - +
- { searchResponse.hits.map((hit, idx) => - - )} + {searchResponse.hits.map((hit, idx) => ( + + ))}
@@ -46,8 +53,13 @@ export function ResultTable({searchResponse, index}: {searchResponse: SearchResp } function getTimestampField(index: Index): Field | null { - const fields = getAllFields(index.metadata.index_config.doc_mapping.field_mappings); - const timestamp_field_name = index.metadata.index_config.doc_mapping.timestamp_field; - const timestamp_field = fields.filter(field => field.field_mapping.name === timestamp_field_name)[0]; + const fields = getAllFields( + index.metadata.index_config.doc_mapping.field_mappings, + ); + const timestamp_field_name = + index.metadata.index_config.doc_mapping.timestamp_field; + const timestamp_field = fields.filter( + (field) => field.field_mapping.name === timestamp_field_name, + )[0]; return timestamp_field ?? null; } diff --git a/quickwit/quickwit-ui/src/components/SearchResult/Row.tsx b/quickwit/quickwit-ui/src/components/SearchResult/Row.tsx index 4df7cf5de99..846b1b0f860 100644 --- a/quickwit/quickwit-ui/src/components/SearchResult/Row.tsx +++ b/quickwit/quickwit-ui/src/components/SearchResult/Row.tsx @@ -16,10 +16,16 @@ import { KeyboardArrowDown } from "@mui/icons-material"; import ChevronRight from "@mui/icons-material/ChevronRight"; import { Box, IconButton, styled, TableCell, TableRow } from "@mui/material"; import dayjs from "dayjs"; -import relativeTime from "dayjs/plugin/relativeTime" -import utc from "dayjs/plugin/utc" +import relativeTime from "dayjs/plugin/relativeTime"; +import utc from "dayjs/plugin/utc"; import React, { useState } from "react"; -import { DATE_TIME_WITH_SECONDS_FORMAT as DATE_TIME_WITH_MILLISECONDS_FORMAT, DATE_TIME_WITH_SECONDS_FORMAT, Entry, Field, RawDoc } from "../../utils/models"; +import { + DATE_TIME_WITH_SECONDS_FORMAT as DATE_TIME_WITH_MILLISECONDS_FORMAT, + DATE_TIME_WITH_SECONDS_FORMAT, + Entry, + Field, + RawDoc, +} from "../../utils/models"; import { QUICKWIT_INTERMEDIATE_GREY } from "../../utils/theme"; import { JsonEditor } from "../JsonEditor"; @@ -31,7 +37,7 @@ interface RowProps { row: RawDoc; } -const EntryName = styled('dt')` +const EntryName = styled("dt")` display: inline; background-color: ${QUICKWIT_INTERMEDIATE_GREY}; color: #343741; @@ -41,7 +47,7 @@ word-break: normal; border-radius: 3px; `; -const EntryValue = styled('dd')` +const EntryValue = styled("dd")` display: inline; margin: 0; padding: 0; @@ -50,72 +56,101 @@ margin-inline-end: 5px; function EntryFormatter(entry: Entry) { // Some field can contains objects, stringify them to render them otherwise React will crash. - const value = typeof entry.value === 'object' ? JSON.stringify(entry.value) : entry.value; + const value = + typeof entry.value === "object" ? JSON.stringify(entry.value) : entry.value; return ( <> {entry.key}: {value} - ) + ); } // Display the timestamp value if found in a `TableCell`. function DisplayTimestampValue(row: RawDoc, timestampField: Field | null) { - if (timestampField === null || timestampField.field_mapping.output_format === null) { + if ( + timestampField === null || + timestampField.field_mapping.output_format === null + ) { return <>; } let field_value = row; for (const path_segment of timestampField.path_segments) { - field_value = field_value[path_segment] + field_value = field_value[path_segment]; } if (!field_value) { - return <> + return <>; } - return - - {formatDateTime(field_value, timestampField.field_mapping.output_format)} - - + return ( + + + {formatDateTime( + field_value, + timestampField.field_mapping.output_format, + )} + + + ); } /* eslint-disable @typescript-eslint/no-explicit-any */ function formatDateTime(field_value: any, timestampOutputFormat: string): any { // A unix timestamp can be in secs/millis/micros/nanos and need to be converted properly. - if (timestampOutputFormat === 'unix_timestamp_secs' && typeof field_value === 'number') { - return dayjs(field_value * 1000).utc().format(DATE_TIME_WITH_SECONDS_FORMAT); - } else if (timestampOutputFormat === 'unix_timestamp_millis' && typeof field_value === 'number') { + if ( + timestampOutputFormat === "unix_timestamp_secs" && + typeof field_value === "number" + ) { + return dayjs(field_value * 1000) + .utc() + .format(DATE_TIME_WITH_SECONDS_FORMAT); + } else if ( + timestampOutputFormat === "unix_timestamp_millis" && + typeof field_value === "number" + ) { return dayjs(field_value).utc().format(DATE_TIME_WITH_MILLISECONDS_FORMAT); - } else if (timestampOutputFormat === 'unix_timestamp_micros' && typeof field_value === 'number') { - return dayjs(field_value / 1000).utc().format(DATE_TIME_WITH_MILLISECONDS_FORMAT); - } else if (timestampOutputFormat === 'unix_timestamp_nanos' && typeof field_value === 'number') { - return dayjs(field_value/ 1000000).utc().format(DATE_TIME_WITH_MILLISECONDS_FORMAT); + } else if ( + timestampOutputFormat === "unix_timestamp_micros" && + typeof field_value === "number" + ) { + return dayjs(field_value / 1000) + .utc() + .format(DATE_TIME_WITH_MILLISECONDS_FORMAT); + } else if ( + timestampOutputFormat === "unix_timestamp_nanos" && + typeof field_value === "number" + ) { + return dayjs(field_value / 1000000) + .utc() + .format(DATE_TIME_WITH_MILLISECONDS_FORMAT); } else { // Other formats are string values and we can just display it as is. return field_value; } } -const BreakWordBox = styled('dl')({ - verticalAlign: 'top', - display: 'inline-block', - color: '#464646', - wordBreak: 'break-all', - wordWrap: 'break-word', +const BreakWordBox = styled("dl")({ + verticalAlign: "top", + display: "inline-block", + color: "#464646", + wordBreak: "break-all", + wordWrap: "break-word", margin: 1, - overflow: 'hidden', - lineHeight: '1.8em', + overflow: "hidden", + lineHeight: "1.8em", }); export function Row(props: RowProps) { const [open, setOpen] = useState(false); const entries: Entry[] = []; for (const [key, value] of Object.entries(props.row)) { - entries.push({key: key, value: value}); + entries.push({ key: key, value: value }); } return ( <> - + {DisplayTimestampValue(props.row, props.timestampField)} - - {!open && - { entries.map((entry) => {EntryFormatter(entry)}) } + + {!open && ( + + {entries.map((entry) => ( + + {EntryFormatter(entry)} + + ))} - } - {open && - - } + )} + {open && } diff --git a/quickwit/quickwit-ui/src/components/SearchResult/SearchResult.tsx b/quickwit/quickwit-ui/src/components/SearchResult/SearchResult.tsx index 7002787f94c..7178aa3b712 100644 --- a/quickwit/quickwit-ui/src/components/SearchResult/SearchResult.tsx +++ b/quickwit/quickwit-ui/src/components/SearchResult/SearchResult.tsx @@ -12,16 +12,16 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { useMemo } from 'react'; import { Box, Typography } from "@mui/material"; +import { useMemo } from "react"; import NumberFormat from "react-number-format"; import { Index, ResponseError, SearchResponse } from "../../utils/models"; import Loader from "../Loader"; -import { ResultTable } from "./ResultTable"; -import { AggregationResult } from "./AggregationResult"; import ErrorResponseDisplay from "../ResponseErrorDisplay"; +import { AggregationResult } from "./AggregationResult"; +import { ResultTable } from "./ResultTable"; -function HitCount({searchResponse}: {searchResponse: SearchResponse}) { +function HitCount({ searchResponse }: { searchResponse: SearchResponse }) { return ( @@ -40,7 +40,7 @@ function HitCount({searchResponse}: {searchResponse: SearchResponse}) { seconds - ) + ); } interface SearchResultProps { @@ -55,7 +55,12 @@ export default function SearchResult(props: SearchResultProps) { if (props.searchResponse == null || props.index == null) { return null; } else if (props.searchResponse.aggregations === undefined) { - return ; + return ( + + ); } else { return ; } @@ -64,7 +69,7 @@ export default function SearchResult(props: SearchResultProps) { if (props.queryRunning) { return ; } - + if (props.searchError !== null) { return ErrorResponseDisplay(props.searchError); } @@ -74,12 +79,35 @@ export default function SearchResult(props: SearchResultProps) { } return ( - - - + + + - + {result} diff --git a/quickwit/quickwit-ui/src/components/SideBar.tsx b/quickwit/quickwit-ui/src/components/SideBar.tsx index e0866fa02f5..3600a32dfd1 100644 --- a/quickwit/quickwit-ui/src/components/SideBar.tsx +++ b/quickwit/quickwit-ui/src/components/SideBar.tsx @@ -12,23 +12,23 @@ // See the License for the specific language governing permissions and // limitations under the License. -import * as React from 'react'; -import List from '@mui/material/List'; -import ListItem from '@mui/material/ListItem'; -import ListItemIcon from '@mui/material/ListItemIcon'; -import ListItemText from '@mui/material/ListItemText'; +import { ListSubheader, styled, Typography } from "@mui/material"; +import List from "@mui/material/List"; +import ListItem from "@mui/material/ListItem"; +import ListItemIcon from "@mui/material/ListItemIcon"; +import ListItemText from "@mui/material/ListItemText"; +import { Database } from "@styled-icons/feather/Database"; +import { Settings } from "@styled-icons/feather/Settings"; +import { GroupWork } from "@styled-icons/material-outlined/GroupWork"; +import { CodeSSlash } from "@styled-icons/remix-line/CodeSSlash"; +import * as React from "react"; import { Link as RouterLink, LinkProps as RouterLinkProps, -} from 'react-router-dom'; -import { ListSubheader, styled, Typography } from '@mui/material'; -import { CodeSSlash } from "@styled-icons/remix-line/CodeSSlash" -import { GroupWork } from '@styled-icons/material-outlined/GroupWork'; -import { Database } from '@styled-icons/feather/Database'; -import { Settings } from '@styled-icons/feather/Settings'; -import { useLocalStorage } from '../providers/LocalStorageProvider'; -import { toUrlSearchRequestParams } from '../utils/urls'; -import { APP_BAR_HEIGHT_PX } from './LayoutUtils'; +} from "react-router-dom"; +import { useLocalStorage } from "../providers/LocalStorageProvider"; +import { toUrlSearchRequestParams } from "../utils/urls"; +import { APP_BAR_HEIGHT_PX } from "./LayoutUtils"; interface ListItemLinkProps { icon?: React.ReactElement; @@ -41,52 +41,76 @@ function ListItemLink(props: ListItemLinkProps) { const renderLink = React.useMemo( () => - React.forwardRef>(function Link( - itemProps, - ref, - ) { - return ; - }), + React.forwardRef>( + function Link(itemProps, ref) { + return ( + + ); + }, + ), [to], ); return ( - {icon ? {icon} : null} + {icon ? ( + {icon} + ) : null} ); } -const SideBarWrapper = styled('div')({ - display: 'flex', +const SideBarWrapper = styled("div")({ + display: "flex", marginTop: `${APP_BAR_HEIGHT_PX}`, height: `calc(100% - ${APP_BAR_HEIGHT_PX})`, - flex: '0 0 180px', - flexDirection: 'column', - borderRight: '1px solid rgba(0, 0, 0, 0.12)', + flex: "0 0 180px", + flexDirection: "column", + borderRight: "1px solid rgba(0, 0, 0, 0.12)", }); const SideBar = () => { const lastSearchRequest = useLocalStorage().lastSearchRequest; - let searchUrl = '/search'; + let searchUrl = "/search"; if (lastSearchRequest.indexId || lastSearchRequest.query) { - searchUrl = '/search?' + toUrlSearchRequestParams(lastSearchRequest).toString(); + searchUrl = + "/search?" + toUrlSearchRequestParams(lastSearchRequest).toString(); } return ( - - + + Discover - Query editor} icon={} /> - + Query editor} + icon={} + /> + Admin - Indexes} icon={} /> - Cluster} icon={} /> - Node info} icon={} /> - API } icon={} /> + Indexes} + icon={} + /> + Cluster} + icon={} + /> + Node info} + icon={} + /> + API } + icon={} + /> ); diff --git a/quickwit/quickwit-ui/src/components/TimeRangeSelect.tsx b/quickwit/quickwit-ui/src/components/TimeRangeSelect.tsx index 88c8b88d26f..2f8126376b0 100644 --- a/quickwit/quickwit-ui/src/components/TimeRangeSelect.tsx +++ b/quickwit/quickwit-ui/src/components/TimeRangeSelect.tsx @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -import React, { useEffect, useMemo, useState } from "react"; +import { AccessTime, ChevronRight, DateRange } from "@mui/icons-material"; import { Box, Button, @@ -25,13 +25,12 @@ import { TextField, TextFieldProps, } from "@mui/material"; -import { AccessTime, ChevronRight, DateRange } from "@mui/icons-material"; -import { Dayjs, default as dayjs } from 'dayjs'; -import relativeTime from "dayjs/plugin/relativeTime" -import utc from "dayjs/plugin/utc" -import { DateTimePicker } from '@mui/x-date-pickers'; -import { AdapterDayjs, } from '@mui/x-date-pickers/AdapterDayjs'; -import { LocalizationProvider } from '@mui/x-date-pickers'; +import { DateTimePicker, LocalizationProvider } from "@mui/x-date-pickers"; +import { AdapterDayjs } from "@mui/x-date-pickers/AdapterDayjs"; +import { Dayjs, default as dayjs } from "dayjs"; +import relativeTime from "dayjs/plugin/relativeTime"; +import utc from "dayjs/plugin/utc"; +import React, { useEffect, useMemo, useState } from "react"; import { DATE_TIME_WITH_SECONDS_FORMAT } from "../utils/models"; dayjs.extend(relativeTime); @@ -48,14 +47,14 @@ const TIME_RANGE_CHOICES = [ ]; type TimeRange = { - startTimestamp: number | null, - endTimestamp: number | null, -} + startTimestamp: number | null; + endTimestamp: number | null; +}; export interface TimeRangeSelectProps { timeRange: TimeRange; disabled?: boolean; - onUpdate(newTimeRange:TimeRange): void; + onUpdate(newTimeRange: TimeRange): void; } interface TimeRangeSelectState { @@ -65,8 +64,12 @@ interface TimeRangeSelectState { } export function TimeRangeSelect(props: TimeRangeSelectProps): JSX.Element { - const getInitialState = () => {return {width: 220, anchor: null, customDatesPanelOpen: false}}; - const initialState = useMemo(() => {return getInitialState(); }, []); + const getInitialState = () => { + return { width: 220, anchor: null, customDatesPanelOpen: false }; + }; + const initialState = useMemo(() => { + return getInitialState(); + }, []); const [state, setState] = useState(initialState); const handleOpenClick = (event: React.MouseEvent) => { @@ -83,13 +86,15 @@ export function TimeRangeSelect(props: TimeRangeSelectProps): JSX.Element { useEffect(() => { setState(initialState); - }, [props.disabled, initialState]) + }, [props.disabled, initialState]); const handleClose = () => { setState(initialState); }; - const handleTimeRangeChoiceClick = (secondsBeforeNow: number | string | undefined) => { + const handleTimeRangeChoiceClick = ( + secondsBeforeNow: number | string | undefined, + ) => { if (secondsBeforeNow === undefined) { return; } @@ -97,18 +102,18 @@ export function TimeRangeSelect(props: TimeRangeSelectProps): JSX.Element { secondsBeforeNow = +secondsBeforeNow; setState(initialState); const startTimestamp = Math.trunc(Date.now() / 1000) - secondsBeforeNow; - props.onUpdate({startTimestamp, endTimestamp:null}) + props.onUpdate({ startTimestamp, endTimestamp: null }); }; const handleReset = () => { - props.onUpdate({startTimestamp:null, endTimestamp:null}) + props.onUpdate({ startTimestamp: null, endTimestamp: null }); }; const open = Boolean(state.anchor); const id = open ? "time-range-select-popover" : undefined; return ( - + {TIME_RANGE_CHOICES.map((value, idx) => { - return handleTimeRangeChoiceClick(value[1])} + return ( + handleTimeRangeChoiceClick(value[1])} > - - + + + ); })} - + - - + + {state.anchor !== null && state.customDatesPanelOpen && ( - + )} @@ -183,25 +200,38 @@ function CustomDatesPanel(props: TimeRangeSelectProps): JSX.Element { const [endDate, setEndDate] = useState(null); useEffect(() => { - setStartDate(props.timeRange.startTimestamp ? convertTimestampSecsIntoDateUtc(props.timeRange.startTimestamp) : null); - setEndDate(props.timeRange.endTimestamp ? convertTimestampSecsIntoDateUtc(props.timeRange.endTimestamp) : null); + setStartDate( + props.timeRange.startTimestamp + ? convertTimestampSecsIntoDateUtc(props.timeRange.startTimestamp) + : null, + ); + setEndDate( + props.timeRange.endTimestamp + ? convertTimestampSecsIntoDateUtc(props.timeRange.endTimestamp) + : null, + ); }, [props.timeRange.startTimestamp, props.timeRange.endTimestamp]); const handleReset = (event: React.MouseEvent) => { event.preventDefault(); setStartDate(null); - setEndDate(null) - props.onUpdate({startTimestamp:null, endTimestamp:null}); + setEndDate(null); + props.onUpdate({ startTimestamp: null, endTimestamp: null }); }; const handleApply = (event: React.MouseEvent) => { event.preventDefault(); const startTimestamp = startDate ? startDate.valueOf() / 1000 : null; const endTimestamp = endDate ? endDate.valueOf() / 1000 : null; - props.onUpdate({startTimestamp, endTimestamp}); + props.onUpdate({ startTimestamp, endTimestamp }); }; return ( - + { // By default, newValue is a datetime defined on the local time zone and for now we consider // input/output only in UTC. - setStartDate(newValue ? dayjs(newValue.valueOf() + newValue.utcOffset() * 60 * 1000).utc() : null); + setStartDate( + newValue + ? dayjs( + newValue.valueOf() + newValue.utcOffset() * 60 * 1000, + ).utc() + : null, + ); }} - renderInput={(props: TextFieldProps) => } + renderInput={(props: TextFieldProps) => ( + + )} /> @@ -224,9 +262,17 @@ function CustomDatesPanel(props: TimeRangeSelectProps): JSX.Element { onChange={(newValue: null | Dayjs) => { // By default, newValue is a datetime defined on the local time zone and for now we consider // input/output only in UTC. - setEndDate(newValue ? dayjs(newValue.valueOf() + newValue.utcOffset() * 60 * 1000).utc() : null); + setEndDate( + newValue + ? dayjs( + newValue.valueOf() + newValue.utcOffset() * 60 * 1000, + ).utc() + : null, + ); }} - renderInput={(props: TextFieldProps) => } + renderInput={(props: TextFieldProps) => ( + + )} /> @@ -236,7 +282,7 @@ function CustomDatesPanel(props: TimeRangeSelectProps): JSX.Element { color="primary" onClick={handleReset} disableElevation - style={{marginRight: 10}} + style={{ marginRight: 10 }} > Reset @@ -262,20 +308,39 @@ interface DateTimeRangeLabelProps { function DateTimeRangeLabel(props: DateTimeRangeLabelProps): JSX.Element { function Label() { if (props.startTimestamp !== null && props.endTimestamp !== null) { - return <> - {convertTimestampSecsIntoDateUtc(props.startTimestamp).format(DATE_TIME_WITH_SECONDS_FORMAT)} -{" "} - {convertTimestampSecsIntoDateUtc(props.endTimestamp).format(DATE_TIME_WITH_SECONDS_FORMAT)} - + return ( + <> + {convertTimestampSecsIntoDateUtc(props.startTimestamp).format( + DATE_TIME_WITH_SECONDS_FORMAT, + )}{" "} + -{" "} + {convertTimestampSecsIntoDateUtc(props.endTimestamp).format( + DATE_TIME_WITH_SECONDS_FORMAT, + )} + + ); } else if (props.startTimestamp !== null && props.endTimestamp === null) { - return <>Since {convertTimestampSecsIntoDateUtc(props.startTimestamp).fromNow(true)} + return ( + <> + Since{" "} + {convertTimestampSecsIntoDateUtc(props.startTimestamp).fromNow(true)} + + ); } else if (props.startTimestamp == null && props.endTimestamp != null) { - return <>Before {convertTimestampSecsIntoDateUtc(props.endTimestamp).format(DATE_TIME_WITH_SECONDS_FORMAT)} + return ( + <> + Before{" "} + {convertTimestampSecsIntoDateUtc(props.endTimestamp).format( + DATE_TIME_WITH_SECONDS_FORMAT, + )} + + ); } - return <>No date range + return <>No date range; } return ( - + ); diff --git a/quickwit/quickwit-ui/src/components/TopBar.tsx b/quickwit/quickwit-ui/src/components/TopBar.tsx index 6aa03be421d..f9339979b94 100644 --- a/quickwit/quickwit-ui/src/components/TopBar.tsx +++ b/quickwit/quickwit-ui/src/components/TopBar.tsx @@ -12,21 +12,29 @@ // See the License for the specific language governing permissions and // limitations under the License. -import AppBar from '@mui/material/AppBar'; -import Toolbar from '@mui/material/Toolbar'; -import GitHubIcon from '@mui/icons-material/GitHub'; -import { Box, IconButton, Link, styled, SvgIcon, Tooltip, Typography } from '@mui/material'; -import { Discord } from '@styled-icons/fa-brands/Discord'; -import { ReactComponent as Logo } from '../assets/img/quickwit-logo.svg'; -import { Client } from '../services/client'; -import { useEffect, useMemo, useState } from 'react'; +import GitHubIcon from "@mui/icons-material/GitHub"; +import { + Box, + IconButton, + Link, + SvgIcon, + styled, + Tooltip, + Typography, +} from "@mui/material"; +import AppBar from "@mui/material/AppBar"; +import Toolbar from "@mui/material/Toolbar"; +import { Discord } from "@styled-icons/fa-brands/Discord"; +import { useEffect, useMemo, useState } from "react"; +import { ReactComponent as Logo } from "../assets/img/quickwit-logo.svg"; +import { Client } from "../services/client"; -const StyledAppBar = styled(AppBar)(({ theme })=>({ +const StyledAppBar = styled(AppBar)(({ theme }) => ({ zIndex: theme.zIndex.drawer + 1, })); // Update the Button's color prop options -declare module '@mui/material/AppBar' { +declare module "@mui/material/AppBar" { interface AppBarPropsColorOverrides { neutral: true; } @@ -37,24 +45,30 @@ const TopBar = () => { const quickwitClient = useMemo(() => new Client(), []); useEffect(() => { - quickwitClient.cluster().then(cluster => { + quickwitClient.cluster().then((cluster) => { setClusterId(cluster.cluster_id); }); - }, []) + }, []); return ( - - + + - - {clusterId} - + {clusterId} - Docs + Docs diff --git a/quickwit/quickwit-ui/src/index.css b/quickwit/quickwit-ui/src/index.css index d57d07f0dd8..f04f8d80804 100644 --- a/quickwit/quickwit-ui/src/index.css +++ b/quickwit/quickwit-ui/src/index.css @@ -23,5 +23,5 @@ body { } div.swagger-ui div.information-container { - display: none; + display: none; } diff --git a/quickwit/quickwit-ui/src/index.test.js b/quickwit/quickwit-ui/src/index.test.js index 98803c46e93..32990483fa3 100644 --- a/quickwit/quickwit-ui/src/index.test.js +++ b/quickwit/quickwit-ui/src/index.test.js @@ -12,15 +12,19 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { render, screen } from '@testing-library/react'; -import { BrowserRouter } from 'react-router-dom'; -import App from './views/App'; +import { render, screen } from "@testing-library/react"; +import { BrowserRouter } from "react-router-dom"; +import App from "./views/App"; -describe('App', function () { - it('Should display side bar links', function () { - render(); - expect(screen.getByText(/Discover/)).toBeInTheDocument(); - expect(screen.getByText(/Query editor/)).toBeInTheDocument(); - expect(screen.getByText(/Admin/)).toBeInTheDocument(); +describe("App", function () { + it("Should display side bar links", function () { + render( + + + , + ); + expect(screen.getByText(/Discover/)).toBeInTheDocument(); + expect(screen.getByText(/Query editor/)).toBeInTheDocument(); + expect(screen.getByText(/Admin/)).toBeInTheDocument(); }); }); diff --git a/quickwit/quickwit-ui/src/index.tsx b/quickwit/quickwit-ui/src/index.tsx index 6c8e7f19012..6061e2547db 100644 --- a/quickwit/quickwit-ui/src/index.tsx +++ b/quickwit/quickwit-ui/src/index.tsx @@ -12,12 +12,12 @@ // See the License for the specific language governing permissions and // limitations under the License. -import React from 'react'; -import ReactDOM from 'react-dom'; -import './index.css'; -import App from './views/App'; -import reportWebVitals from './reportWebVitals'; -import { BrowserRouter } from 'react-router-dom'; +import React from "react"; +import ReactDOM from "react-dom"; +import "./index.css"; +import { BrowserRouter } from "react-router-dom"; +import reportWebVitals from "./reportWebVitals"; +import App from "./views/App"; ReactDOM.render( @@ -25,7 +25,7 @@ ReactDOM.render( , - document.getElementById('root') + document.getElementById("root"), ); // If you want to start measuring performance in your app, pass a function diff --git a/quickwit/quickwit-ui/src/providers/EditorProvider.tsx b/quickwit/quickwit-ui/src/providers/EditorProvider.tsx index 92423c237a0..faa571c9741 100644 --- a/quickwit/quickwit-ui/src/providers/EditorProvider.tsx +++ b/quickwit/quickwit-ui/src/providers/EditorProvider.tsx @@ -12,24 +12,30 @@ // See the License for the specific language governing permissions and // limitations under the License. -import * as monacoEditor from 'monaco-editor/esm/vs/editor/editor.api'; -import { createContext, MutableRefObject, PropsWithChildren, useContext, useRef } from "react" +import * as monacoEditor from "monaco-editor/esm/vs/editor/editor.api"; +import { + createContext, + MutableRefObject, + PropsWithChildren, + useContext, + useRef, +} from "react"; type ContextProps = { - editorRef: MutableRefObject | null - monacoRef: MutableRefObject | null -} + editorRef: MutableRefObject | null; + monacoRef: MutableRefObject | null; +}; const defaultValues = { editorRef: null, monacoRef: null, -} +}; const EditorContext = createContext(defaultValues); export const EditorProvider = ({ children }: PropsWithChildren<{}>) => { - const editorRef = useRef(null) - const monacoRef = useRef(null) + const editorRef = useRef(null); + const monacoRef = useRef(null); return ( ) => { > {children} - ) -} + ); +}; export const useEditor = () => { - return useContext(EditorContext) -} + return useContext(EditorContext); +}; diff --git a/quickwit/quickwit-ui/src/providers/LocalStorageProvider.tsx b/quickwit/quickwit-ui/src/providers/LocalStorageProvider.tsx index 8cc65b75305..75c36b75f65 100644 --- a/quickwit/quickwit-ui/src/providers/LocalStorageProvider.tsx +++ b/quickwit/quickwit-ui/src/providers/LocalStorageProvider.tsx @@ -12,8 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { createContext, PropsWithChildren, useContext, useEffect, useState } from "react" -import { EMPTY_SEARCH_REQUEST, SearchRequest } from "../utils/models" +import { + createContext, + PropsWithChildren, + useContext, + useEffect, + useState, +} from "react"; +import { EMPTY_SEARCH_REQUEST, SearchRequest } from "../utils/models"; type Props = Record; @@ -25,7 +31,7 @@ type ContextProps = { const defaultValues = { lastSearchRequest: EMPTY_SEARCH_REQUEST, updateLastSearchRequest: () => undefined, -} +}; function parseSearchRequest(value: string | null): SearchRequest { if (value === null) { @@ -39,17 +45,23 @@ export const LocalStorageContext = createContext(defaultValues); export const LocalStorageProvider = ({ children, }: PropsWithChildren) => { - const [lastSearchRequest, setLastSearchRequest] = useState(EMPTY_SEARCH_REQUEST); + const [lastSearchRequest, setLastSearchRequest] = + useState(EMPTY_SEARCH_REQUEST); useEffect(() => { - if (localStorage.getItem('lastSearchRequest') !== null) { - const lastSearchRequest = parseSearchRequest(localStorage.getItem('lastSearchRequest')); + if (localStorage.getItem("lastSearchRequest") !== null) { + const lastSearchRequest = parseSearchRequest( + localStorage.getItem("lastSearchRequest"), + ); setLastSearchRequest(lastSearchRequest); } }, []); useEffect(() => { - localStorage.setItem('lastSearchRequest', JSON.stringify(lastSearchRequest)); + localStorage.setItem( + "lastSearchRequest", + JSON.stringify(lastSearchRequest), + ); }, [lastSearchRequest]); function updateLastSearchRequest(searchRequest: SearchRequest) { @@ -61,13 +73,13 @@ export const LocalStorageProvider = ({ value={{ lastSearchRequest, updateLastSearchRequest, - }} + }} > {children} - ) -} + ); +}; export const useLocalStorage = () => { - return useContext(LocalStorageContext) -} + return useContext(LocalStorageContext); +}; diff --git a/quickwit/quickwit-ui/src/reportWebVitals.ts b/quickwit/quickwit-ui/src/reportWebVitals.ts index 8124164f404..e98fd45dfde 100644 --- a/quickwit/quickwit-ui/src/reportWebVitals.ts +++ b/quickwit/quickwit-ui/src/reportWebVitals.ts @@ -12,11 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { ReportHandler } from 'web-vitals'; +import { ReportHandler } from "web-vitals"; const reportWebVitals = (onPerfEntry?: ReportHandler) => { if (onPerfEntry && onPerfEntry instanceof Function) { - import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { + import("web-vitals").then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { getCLS(onPerfEntry); getFID(onPerfEntry); getFCP(onPerfEntry); diff --git a/quickwit/quickwit-ui/src/services/client.test.ts b/quickwit/quickwit-ui/src/services/client.test.ts index b393aabc335..2031c0d95b5 100644 --- a/quickwit/quickwit-ui/src/services/client.test.ts +++ b/quickwit/quickwit-ui/src/services/client.test.ts @@ -12,39 +12,43 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { SearchRequest } from '../utils/models'; -import { Client } from './client'; +import { SearchRequest } from "../utils/models"; +import { Client } from "./client"; -describe('Client unit test', () => { - it('Should construct correct search URL', async () => { - // Mocking the fetch function to simulate network requests - const mockFetch = jest.fn(() => Promise.resolve({ ok: true, json: () => Promise.resolve({}) })); - (global as any).fetch = mockFetch; // eslint-disable-line @typescript-eslint/no-explicit-any +describe("Client unit test", () => { + it("Should construct correct search URL", async () => { + // Mocking the fetch function to simulate network requests + const mockFetch = jest.fn(() => + Promise.resolve({ ok: true, json: () => Promise.resolve({}) }), + ); + (global as any).fetch = mockFetch; // eslint-disable-line @typescript-eslint/no-explicit-any - const searchRequest: SearchRequest = { - indexId: 'my-new-fresh-index-id', - query: 'severity_error:ERROR', - startTimestamp: 100, - endTimestamp: 200, - maxHits: 20, - sortByField: { - field_name: 'timestamp', - order: 'Desc', - }, - aggregation: false, - aggregationConfig: { - metric: null, - term: null, - histogram: null, - }, - }; + const searchRequest: SearchRequest = { + indexId: "my-new-fresh-index-id", + query: "severity_error:ERROR", + startTimestamp: 100, + endTimestamp: 200, + maxHits: 20, + sortByField: { + field_name: "timestamp", + order: "Desc", + }, + aggregation: false, + aggregationConfig: { + metric: null, + term: null, + histogram: null, + }, + }; - const client = new Client(); - expect(client.buildSearchBody(searchRequest, null)).toBe('{"query":"severity_error:ERROR","max_hits":20,"start_timestamp":100,"end_timestamp":200,"sort_by_field":"+timestamp"}'); + const client = new Client(); + expect(client.buildSearchBody(searchRequest, null)).toBe( + '{"query":"severity_error:ERROR","max_hits":20,"start_timestamp":100,"end_timestamp":200,"sort_by_field":"+timestamp"}', + ); - await client.search(searchRequest, null); - const expectedUrl = `${client.apiRoot()}my-new-fresh-index-id/search`; - expect(mockFetch).toHaveBeenCalledTimes(1); - expect(mockFetch).toHaveBeenCalledWith(expectedUrl, expect.any(Object)); - }); + await client.search(searchRequest, null); + const expectedUrl = `${client.apiRoot()}my-new-fresh-index-id/search`; + expect(mockFetch).toHaveBeenCalledTimes(1); + expect(mockFetch).toHaveBeenCalledWith(expectedUrl, expect.any(Object)); + }); }); diff --git a/quickwit/quickwit-ui/src/services/client.ts b/quickwit/quickwit-ui/src/services/client.ts index 6e8e57e8388..2daa01840c1 100644 --- a/quickwit/quickwit-ui/src/services/client.ts +++ b/quickwit/quickwit-ui/src/services/client.ts @@ -12,17 +12,25 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { Cluster, Index, IndexMetadata, QuickwitBuildInfo, SearchRequest, SearchResponse, SplitMetadata } from "../utils/models"; +import { + Cluster, + Index, + IndexMetadata, + QuickwitBuildInfo, + SearchRequest, + SearchResponse, + SplitMetadata, +} from "../utils/models"; import { serializeSortByField } from "../utils/urls"; export class Client { - private readonly _host: string + private readonly _host: string; constructor(host?: string) { if (!host) { - this._host = window.location.origin + this._host = window.location.origin; } else { - this._host = host + this._host = host; } } @@ -30,10 +38,13 @@ export class Client { return this._host + "/api/v1/"; } - async search(request: SearchRequest, timestamp_field: string | null): Promise { + async search( + request: SearchRequest, + timestamp_field: string | null, + ): Promise { // TODO: improve validation of request. if (request.indexId === null || request.indexId === undefined) { - throw Error("Search request must have and index id.") + throw Error("Search request must have and index id."); } const url = `${this.apiRoot()}${request.indexId}/search`; const body = this.buildSearchBody(request, timestamp_field); @@ -41,16 +52,25 @@ export class Client { } async cluster(): Promise { - return await this.fetch(`${this.apiRoot()}cluster`, this.defaultGetRequestParams()); + return await this.fetch( + `${this.apiRoot()}cluster`, + this.defaultGetRequestParams(), + ); } async buildInfo(): Promise { - return await this.fetch(`${this.apiRoot()}version`, this.defaultGetRequestParams()); + return await this.fetch( + `${this.apiRoot()}version`, + this.defaultGetRequestParams(), + ); } // eslint-disable-next-line async config(): Promise> { - return await this.fetch(`${this.apiRoot()}config`, this.defaultGetRequestParams()); + return await this.fetch( + `${this.apiRoot()}config`, + this.defaultGetRequestParams(), + ); } // // Index management API @@ -58,12 +78,12 @@ export class Client { async getIndex(indexId: string): Promise { const [metadata, splits] = await Promise.all([ this.getIndexMetadata(indexId), - this.getAllSplits(indexId) + this.getAllSplits(indexId), ]); return { metadata: metadata, - splits: splits - } + splits: splits, + }; } async getIndexMetadata(indexId: string): Promise { @@ -72,20 +92,30 @@ export class Client { async getAllSplits(indexId: string): Promise> { // TODO: restrieve all the splits. - const results: {splits: Array} = await this.fetch(`${this.apiRoot()}indexes/${indexId}/splits?limit=10000`, {}); + const results: { splits: Array } = await this.fetch( + `${this.apiRoot()}indexes/${indexId}/splits?limit=10000`, + {}, + ); - return results['splits']; + return results["splits"]; } async listIndexes(): Promise> { return this.fetch(`${this.apiRoot()}indexes`, {}); } - async fetch(url: string, params: RequestInit, body: string|null = null): Promise { + async fetch( + url: string, + params: RequestInit, + body: string | null = null, + ): Promise { if (body !== null) { params.method = "POST"; params.body = body; - params.headers = {...params.headers, "content-type": "application/json"}; + params.headers = { + ...params.headers, + "content-type": "application/json", + }; } const response = await fetch(url, params); if (response.ok) { @@ -94,7 +124,7 @@ export class Client { const message = await response.text(); return await Promise.reject({ message: message, - status: response.status + status: response.status, }); } @@ -104,10 +134,13 @@ export class Client { headers: { Accept: "application/json" }, mode: "cors", cache: "default", - } + }; } - buildSearchBody(request: SearchRequest, timestamp_field: string | null): string { + buildSearchBody( + request: SearchRequest, + timestamp_field: string | null, + ): string { /* eslint-disable @typescript-eslint/no-explicit-any */ const body: any = { // TODO: the trim should be done in the backend. @@ -134,21 +167,24 @@ export class Client { } /* eslint-disable @typescript-eslint/no-explicit-any */ - buildAggregation(request: SearchRequest, timestamp_field: string | null): any { + buildAggregation( + request: SearchRequest, + timestamp_field: string | null, + ): any { let aggregation = undefined; if (request.aggregationConfig.metric) { const metric = request.aggregationConfig.metric; aggregation = { metric: { [metric.type]: { - field: metric.field - } - } - } + field: metric.field, + }, + }, + }; } if (request.aggregationConfig.histogram && timestamp_field) { const histogram = request.aggregationConfig.histogram; - const interval = histogram.interval + const interval = histogram.interval; let extended_bounds; if (request.startTimestamp && request.endTimestamp) { extended_bounds = { @@ -166,9 +202,9 @@ export class Client { fixed_interval: interval, min_doc_count: 0, extended_bounds: extended_bounds, - } - } - } + }, + }, + }; } if (request.aggregationConfig.term) { const term = request.aggregationConfig.term; @@ -179,12 +215,12 @@ export class Client { field: term.field, size: term.size, order: { - _count: "desc" + _count: "desc", }, min_doc_count: 1, - } - } - } + }, + }, + }; } return aggregation; } diff --git a/quickwit/quickwit-ui/src/setupTests.ts b/quickwit/quickwit-ui/src/setupTests.ts index 5e5df0fd200..c10cce53099 100644 --- a/quickwit/quickwit-ui/src/setupTests.ts +++ b/quickwit/quickwit-ui/src/setupTests.ts @@ -16,4 +16,4 @@ // allows you to do things like: // expect(element).toHaveTextContent(/react/i) // learn more: https://github.com/testing-library/jest-dom -import '@testing-library/jest-dom'; +import "@testing-library/jest-dom"; diff --git a/quickwit/quickwit-ui/src/utils/models.ts b/quickwit/quickwit-ui/src/utils/models.ts index 2d87598cb42..4fb18263b84 100644 --- a/quickwit/quickwit-ui/src/utils/models.ts +++ b/quickwit/quickwit-ui/src/utils/models.ts @@ -13,7 +13,7 @@ // limitations under the License. /* eslint-disable @typescript-eslint/no-explicit-any */ -export type RawDoc = Record +export type RawDoc = Record; export type FieldMapping = { description: string | null; @@ -25,7 +25,7 @@ export type FieldMapping = { // Specific datetime field attributes. output_format: string | null; field_mappings?: FieldMapping[]; -} +}; export type Field = { // Json path (path segments concatenated as a string with dots between segments). @@ -33,12 +33,12 @@ export type Field = { // Json path of the field. path_segments: string[]; field_mapping: FieldMapping; -} +}; export type Entry = { key: string; value: any; -} +}; export const DATE_TIME_WITH_SECONDS_FORMAT = "YYYY/MM/DD HH:mm:ss"; export const DATE_TIME_WITH_MILLISECONDS_FORMAT = "YYYY/MM/DD HH:mm:ss.SSS"; @@ -47,12 +47,27 @@ export const DATE_TIME_WITH_MILLISECONDS_FORMAT = "YYYY/MM/DD HH:mm:ss.SSS"; export function getAllFields(field_mappings: Array): Field[] { const fields: Field[] = []; for (const field_mapping of field_mappings) { - if (field_mapping.type === 'object' && field_mapping.field_mappings !== undefined) { - for (const child_field_mapping of getAllFields(field_mapping.field_mappings)) { - fields.push({json_path: field_mapping.name + '.' + child_field_mapping.json_path, path_segments: [field_mapping.name].concat(child_field_mapping.path_segments), field_mapping: child_field_mapping.field_mapping}) + if ( + field_mapping.type === "object" && + field_mapping.field_mappings !== undefined + ) { + for (const child_field_mapping of getAllFields( + field_mapping.field_mappings, + )) { + fields.push({ + json_path: field_mapping.name + "." + child_field_mapping.json_path, + path_segments: [field_mapping.name].concat( + child_field_mapping.path_segments, + ), + field_mapping: child_field_mapping.field_mapping, + }); } } else { - fields.push({json_path: field_mapping.name, path_segments: [field_mapping.name], field_mapping: field_mapping}); + fields.push({ + json_path: field_mapping.name, + path_segments: [field_mapping.name], + field_mapping: field_mapping, + }); } } @@ -65,14 +80,14 @@ export type DocMapping = { store: boolean; dynamic_mapping: boolean; timestamp_field: string | null; -} +}; -export type SortOrder = 'Asc' | 'Desc'; +export type SortOrder = "Asc" | "Desc"; export type SortByField = { - field_name: string, - order: SortOrder -} + field_name: string; + order: SortOrder; +}; export type SearchRequest = { indexId: string | null; @@ -83,38 +98,40 @@ export type SearchRequest = { sortByField: SortByField | null; aggregation: boolean; aggregationConfig: Aggregation; -} +}; export type Aggregation = { metric: Metric | null; term: TermAgg | null; histogram: HistogramAgg | null; -} +}; export type Metric = { type: string; field: string; -} +}; export type TermAgg = { field: string; size: number; -} +}; export type HistogramAgg = { interval: string; -} +}; -export type ParsedAggregationResult = TermResult | HistogramResult| null; +export type ParsedAggregationResult = TermResult | HistogramResult | null; -export type TermResult = {term: string, value: number}[]; +export type TermResult = { term: string; value: number }[]; export type HistogramResult = { - timestamps: Date[], - data: {name: string | undefined, value: number[]}[], -} + timestamps: Date[]; + data: { name: string | undefined; value: number[] }[]; +}; -export function extractAggregationResults(aggregation: any): ParsedAggregationResult { +export function extractAggregationResults( + aggregation: any, +): ParsedAggregationResult { const extract_value = (entry: any) => { if ("metric" in entry) { return entry.metric.value || 0; @@ -129,8 +146,8 @@ export function extractAggregationResults(aggregation: any): ParsedAggregationRe // we are in the "simple histogram" case return { timestamps, - data: [{name: undefined, value }] - } + data: [{ name: undefined, value }], + }; } else if ("term_agg" in aggregation) { // we have a term aggregation, but maybe there is an histogram inside const term_buckets = aggregation.term_agg.buckets; @@ -140,10 +157,12 @@ export function extractAggregationResults(aggregation: any): ParsedAggregationRe if (term_buckets.length > 0 && "histo_agg" in term_buckets[0]) { // we have a term+histo aggregation const timestamps_set: Set = new Set(); - term_buckets.forEach((bucket: any) => bucket.histo_agg.buckets.forEach( - (entry: any) => timestamps_set.add(entry.key) - )); - const timestamps = [... timestamps_set]; + term_buckets.forEach((bucket: any) => + bucket.histo_agg.buckets.forEach((entry: any) => + timestamps_set.add(entry.key), + ), + ); + const timestamps = [...timestamps_set]; timestamps.sort(); const data = term_buckets.map((bucket: any) => { @@ -151,24 +170,24 @@ export function extractAggregationResults(aggregation: any): ParsedAggregationRe const first_elem_key = histo_buckets[0].key; const last_elem_key = histo_buckets[histo_buckets.length - 1].key; const prefix_len = timestamps.indexOf(first_elem_key); - const suffix_len = timestamps.length - timestamps.indexOf(last_elem_key) - 1; - const value = Array(prefix_len).fill(0).concat( - histo_buckets.map(extract_value), - Array(suffix_len).fill(0), - ); - - return {name: bucket.key, value, } - }) + const suffix_len = + timestamps.length - timestamps.indexOf(last_elem_key) - 1; + const value = Array(prefix_len) + .fill(0) + .concat(histo_buckets.map(extract_value), Array(suffix_len).fill(0)); + + return { name: bucket.key, value }; + }); return { timestamps: timestamps.map((date) => new Date(date)), data, - } + }; } else { return term_buckets.map((bucket: any) => { return { term: bucket.key, value: extract_value(bucket), - } + }; }); } } @@ -177,8 +196,8 @@ export function extractAggregationResults(aggregation: any): ParsedAggregationRe } export const EMPTY_SEARCH_REQUEST: SearchRequest = { - indexId: '', - query: '', + indexId: "", + query: "", startTimestamp: null, endTimestamp: null, maxHits: 100, @@ -189,12 +208,12 @@ export const EMPTY_SEARCH_REQUEST: SearchRequest = { term: null, histogram: null, }, -} +}; export type ResponseError = { status: number | null; message: string | null; -} +}; export type SearchResponse = { num_hits: number; @@ -202,7 +221,7 @@ export type SearchResponse = { elapsed_time_micros: number; errors: Array | undefined; aggregations: any | undefined; -} +}; export type IndexConfig = { version: string; @@ -212,26 +231,26 @@ export type IndexConfig = { indexing_settings: object; search_settings: object; retention: object; -} +}; export type IndexMetadata = { index_config: IndexConfig; checkpoint: object; sources: object[] | undefined; create_timestamp: number; -} +}; export const EMPTY_INDEX_METADATA: IndexMetadata = { index_config: { - version: '', - index_uri: '', - index_id: '', + version: "", + index_uri: "", + index_id: "", doc_mapping: { field_mappings: [], tag_fields: [], store: false, dynamic_mapping: false, - timestamp_field: null + timestamp_field: null, }, indexing_settings: {}, search_settings: {}, @@ -254,62 +273,62 @@ export type SplitMetadata = { tags: string[]; demux_num_ops: number; footer_offsets: Range; -} +}; export type Range = { start: number; end: number; -} +}; export type Index = { metadata: IndexMetadata; splits: SplitMetadata[]; -} +}; export type Cluster = { - node_id: string, - cluster_id: string, - state: ClusterState, -} + node_id: string; + cluster_id: string; + state: ClusterState; +}; export type ClusterState = { state: ClusterStateSnapshot; live_nodes: any[]; dead_nodes: any[]; -} +}; export type ClusterStateSnapshot = { - seed_addrs: string[], - node_states: Record, -} + seed_addrs: string[]; + node_states: Record; +}; export type NodeState = { - key_values: KeyValues, - max_version: number, -} + key_values: KeyValues; + max_version: number; +}; export type KeyValues = { - available_services: KeyValue, - grpc_address: KeyValue, - heartbeat: KeyValue, -} + available_services: KeyValue; + grpc_address: KeyValue; + heartbeat: KeyValue; +}; export type KeyValue = { - value: any, - version: number, -} + value: any; + version: number; +}; export type QuickwitBuildInfo = { - commit_version_tag: string, - cargo_pkg_version: string, - cargo_build_target: string, - commit_short_hash: string, - commit_date: string, - version: string, -} + commit_version_tag: string; + cargo_pkg_version: string; + cargo_build_target: string; + commit_short_hash: string; + commit_date: string; + version: string; +}; export type NodeId = { - id: string, - grpc_address: string, - self: boolean, -} + id: string; + grpc_address: string; + self: boolean; +}; diff --git a/quickwit/quickwit-ui/src/utils/theme.ts b/quickwit/quickwit-ui/src/utils/theme.ts index cec9f8438fa..9e7328997ff 100644 --- a/quickwit/quickwit-ui/src/utils/theme.ts +++ b/quickwit/quickwit-ui/src/utils/theme.ts @@ -13,22 +13,21 @@ // limitations under the License. import { createTheme } from "@mui/material"; -import SoehneMonoKraftigWoff2 from "./../assets/fonts/soehne-mono-web-kraftig.woff2"; import SoehneMonoDreiviertelfettWoff2 from "./../assets/fonts/soehne-mono-web-dreiviertelfett.woff2"; -import SoehneHalbfettWoff2 from "./../assets/fonts/soehne-web-halbfett.woff2"; +import SoehneMonoKraftigWoff2 from "./../assets/fonts/soehne-mono-web-kraftig.woff2"; import SoehneBuchWoff2 from "./../assets/fonts/soehne-web-buch.woff2"; +import SoehneHalbfettWoff2 from "./../assets/fonts/soehne-web-halbfett.woff2"; -export const QUICKWIT_BLUE = '#004BD9'; -export const QUICKWIT_RED = '#FF0026'; -export const QUICKWIT_GREEN = '#00D588'; -export const QUICKWIT_GREY = '#CBD1DE'; -export const QUICKWIT_INTERMEDIATE_GREY = 'rgba(203,209,222,0.5)'; -export const QUICKWIT_LIGHT_GREY = '#F8F9FB'; -export const QUICKWIT_BLACK = '#1F232A'; - +export const QUICKWIT_BLUE = "#004BD9"; +export const QUICKWIT_RED = "#FF0026"; +export const QUICKWIT_GREEN = "#00D588"; +export const QUICKWIT_GREY = "#CBD1DE"; +export const QUICKWIT_INTERMEDIATE_GREY = "rgba(203,209,222,0.5)"; +export const QUICKWIT_LIGHT_GREY = "#F8F9FB"; +export const QUICKWIT_BLACK = "#1F232A"; // Update the Typography's var@iant prop options -declare module '@mui/material/Typography' { +declare module "@mui/material/Typography" { interface TypographyPropsVariantOverrides { fontSize: true; poster: true; @@ -36,22 +35,22 @@ declare module '@mui/material/Typography' { } } -declare module '@mui/material/styles' { +declare module "@mui/material/styles" { interface Theme { status: { - danger: React.CSSProperties['color']; + danger: React.CSSProperties["color"]; }; } interface PaletteOptions { - neutral: PaletteOptions['primary']; + neutral: PaletteOptions["primary"]; } interface Palette { - primary: Palette['primary']; - secondary: Palette['secondary']; - text: Palette['text']; - neutral: Palette['primary']; + primary: Palette["primary"]; + secondary: Palette["secondary"]; + text: Palette["text"]; + neutral: Palette["primary"]; } } @@ -59,24 +58,24 @@ export const theme = createTheme({ palette: { primary: { main: "#000000", - contrastText: '#ffffff' + contrastText: "#ffffff", }, secondary: { - main: '#000000', + main: "#000000", }, text: { - primary: '#000000', + primary: "#000000", }, neutral: { - main: '#F8F9FB', - contrastText: '#000000', + main: "#F8F9FB", + contrastText: "#000000", }, }, typography: { fontSize: 12, - fontFamily: 'SoehneMono, Arial', + fontFamily: "SoehneMono, Arial", body1: { - fontSize: '0.8rem', + fontSize: "0.8rem", }, }, components: { @@ -115,19 +114,18 @@ export const theme = createTheme({ }, }); - export const EDITOR_THEME = { - base: 'vs', + base: "vs", inherit: true, rules: [ - { token: 'comment', foreground: '#1F232A', fontStyle: 'italic' }, - { token: 'keyword', foreground: QUICKWIT_BLUE } + { token: "comment", foreground: "#1F232A", fontStyle: "italic" }, + { token: "keyword", foreground: QUICKWIT_BLUE }, ], colors: { - 'editor.comment.foreground': '#CBD1DE', - 'editor.foreground': '#000000', - 'editor.background': QUICKWIT_LIGHT_GREY, - 'editorLineNumber.foreground': 'black', - 'editor.lineHighlightBackground': '#DFE0E1', + "editor.comment.foreground": "#CBD1DE", + "editor.foreground": "#000000", + "editor.background": QUICKWIT_LIGHT_GREY, + "editorLineNumber.foreground": "black", + "editor.lineHighlightBackground": "#DFE0E1", }, }; diff --git a/quickwit/quickwit-ui/src/utils/urls.ts b/quickwit/quickwit-ui/src/utils/urls.ts index cd85b9c77af..8080e19c713 100644 --- a/quickwit/quickwit-ui/src/utils/urls.ts +++ b/quickwit/quickwit-ui/src/utils/urls.ts @@ -12,13 +12,17 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { SearchRequest, SortByField, SortOrder, Aggregation } from "./models"; +import { Aggregation, SearchRequest, SortByField, SortOrder } from "./models"; export function hasSearchParams(historySearch: string): boolean { const searchParams = new URLSearchParams(historySearch); - return searchParams.has('index_id') || searchParams.has('query') - || searchParams.has('start_timestamp') || searchParams.has('end_timestamp'); + return ( + searchParams.has("index_id") || + searchParams.has("query") || + searchParams.has("start_timestamp") || + searchParams.has("end_timestamp") + ); } export function parseSearchUrl(historySearch: string): SearchRequest { @@ -27,13 +31,13 @@ export function parseSearchUrl(historySearch: string): SearchRequest { let startTimestamp = null; const startTimeStampParsedInt = parseInt(startTimestampString || ""); if (!isNaN(startTimeStampParsedInt)) { - startTimestamp = startTimeStampParsedInt + startTimestamp = startTimeStampParsedInt; } let endTimestamp = null; const endTimestampString = searchParams.get("end_timestamp"); const endTimestampParsedInt = parseInt(endTimestampString || ""); if (!isNaN(endTimestampParsedInt)) { - endTimestamp = endTimestampParsedInt + endTimestamp = endTimestampParsedInt; } let indexId = null; const indexIdParam = searchParams.get("index_id"); @@ -43,15 +47,15 @@ export function parseSearchUrl(historySearch: string): SearchRequest { let sortByField = null; const sortByFieldParam = searchParams.get("sort_by_field"); if (sortByFieldParam !== null) { - if (sortByFieldParam.startsWith('+')) { - const order: SortOrder = 'Desc'; - sortByField = {field_name: sortByFieldParam.substring(1), order: order}; - } else if (sortByFieldParam.startsWith('-')) { - const order: SortOrder = 'Asc'; - sortByField = {field_name: sortByFieldParam.substring(1), order: order}; + if (sortByFieldParam.startsWith("+")) { + const order: SortOrder = "Desc"; + sortByField = { field_name: sortByFieldParam.substring(1), order: order }; + } else if (sortByFieldParam.startsWith("-")) { + const order: SortOrder = "Asc"; + sortByField = { field_name: sortByFieldParam.substring(1), order: order }; } else { - const order: SortOrder = 'Desc'; - sortByField = {field_name: sortByFieldParam, order: order}; + const order: SortOrder = "Desc"; + sortByField = { field_name: sortByFieldParam, order: order }; } } const aggregationParam = searchParams.get("aggregation"); @@ -68,12 +72,12 @@ export function parseSearchUrl(historySearch: string): SearchRequest { }; } -function parseAggregation(param: string|null): Aggregation { +function parseAggregation(param: string | null): Aggregation { const empty: Aggregation = { metric: null, term: null, histogram: null, - } + }; if (param !== null) { try { const aggregation: Aggregation = JSON.parse(param); @@ -85,9 +89,11 @@ function parseAggregation(param: string|null): Aggregation { return empty; } -export function toUrlSearchRequestParams(request: SearchRequest): URLSearchParams { +export function toUrlSearchRequestParams( + request: SearchRequest, +): URLSearchParams { const params = new URLSearchParams(); - params.append("query", request.query || '*'); + params.append("query", request.query || "*"); // We have to set the index ID in url params as it's not present in the UI path params. // This enables the react app to be able to get index ID from url params // if the user enter directly the UI url. @@ -96,10 +102,7 @@ export function toUrlSearchRequestParams(request: SearchRequest): URLSearchParam params.append("max_hits", request.maxHits.toString()); } if (request.startTimestamp) { - params.append( - "start_timestamp", - request.startTimestamp.toString() - ); + params.append("start_timestamp", request.startTimestamp.toString()); } if (request.endTimestamp) { params.append("end_timestamp", request.endTimestamp.toString()); @@ -108,18 +111,21 @@ export function toUrlSearchRequestParams(request: SearchRequest): URLSearchParam params.append("sort_by_field", serializeSortByField(request.sortByField)); } if (request.aggregation) { - params.append("aggregation", JSON.stringify(request.aggregationConfig, (_, val) => { - if (val == null) { - return undefined; - } else { - return val; - } - })) + params.append( + "aggregation", + JSON.stringify(request.aggregationConfig, (_, val) => { + if (val == null) { + return undefined; + } else { + return val; + } + }), + ); } return params; } export function serializeSortByField(sortByField: SortByField): string { - const order = sortByField.order === 'Desc' ? '+' : '-'; + const order = sortByField.order === "Desc" ? "+" : "-"; return `${order}${sortByField.field_name}`; } diff --git a/quickwit/quickwit-ui/src/views/ApiView.tsx b/quickwit/quickwit-ui/src/views/ApiView.tsx index c87c445a8b6..49cdfe1a15c 100644 --- a/quickwit/quickwit-ui/src/views/ApiView.tsx +++ b/quickwit/quickwit-ui/src/views/ApiView.tsx @@ -12,19 +12,25 @@ // See the License for the specific language governing permissions and // limitations under the License. -import "swagger-ui-react/swagger-ui.css" -import SwaggerUI from 'swagger-ui-react' -import { ViewUnderAppBarBox, FullBoxContainer } from '../components/LayoutUtils'; +import "swagger-ui-react/swagger-ui.css"; +import SwaggerUI from "swagger-ui-react"; +import { + FullBoxContainer, + ViewUnderAppBarBox, +} from "../components/LayoutUtils"; function ApiView() { - return + return ( + + layout="BaseLayout" + defaultModelsExpandDepth={-1} + url="/openapi.json" + /> + ); } export default ApiView; diff --git a/quickwit/quickwit-ui/src/views/App.tsx b/quickwit/quickwit-ui/src/views/App.tsx index bb073770664..573b9aea628 100644 --- a/quickwit/quickwit-ui/src/views/App.tsx +++ b/quickwit/quickwit-ui/src/views/App.tsx @@ -12,25 +12,25 @@ // See the License for the specific language governing permissions and // limitations under the License. -import TopBar from '../components/TopBar'; -import { CssBaseline, ThemeProvider } from '@mui/material'; -import SideBar from '../components/SideBar'; -import { Navigate, Route, Routes } from 'react-router-dom'; -import SearchView from './SearchView'; -import IndexesView from './IndexesView'; -import { theme } from '../utils/theme'; -import IndexView from './IndexView'; -import { FullBoxContainer } from '../components/LayoutUtils'; -import { LocalStorageProvider } from '../providers/LocalStorageProvider'; -import ClusterView from './ClusterView'; -import NodeInfoView from './NodeInfoView'; -import ApiView from './ApiView'; +import { CssBaseline, ThemeProvider } from "@mui/material"; +import { Navigate, Route, Routes } from "react-router-dom"; +import { FullBoxContainer } from "../components/LayoutUtils"; +import SideBar from "../components/SideBar"; +import TopBar from "../components/TopBar"; +import { LocalStorageProvider } from "../providers/LocalStorageProvider"; +import { theme } from "../utils/theme"; +import ApiView from "./ApiView"; +import ClusterView from "./ClusterView"; +import IndexesView from "./IndexesView"; +import IndexView from "./IndexView"; +import NodeInfoView from "./NodeInfoView"; +import SearchView from "./SearchView"; function App() { return ( - + diff --git a/quickwit/quickwit-ui/src/views/ClusterView.test.jsx b/quickwit/quickwit-ui/src/views/ClusterView.test.jsx index 332af9d75ad..e4989b7194e 100644 --- a/quickwit/quickwit-ui/src/views/ClusterView.test.jsx +++ b/quickwit/quickwit-ui/src/views/ClusterView.test.jsx @@ -12,17 +12,17 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { render, unmountComponentAtNode } from "react-dom"; +import { screen } from "@testing-library/dom"; import { waitFor } from "@testing-library/react"; -import { screen } from '@testing-library/dom'; -import ClusterView from './ClusterView'; +import { render, unmountComponentAtNode } from "react-dom"; import { act } from "react-dom/test-utils"; import { Client } from "../services/client"; +import ClusterView from "./ClusterView"; -jest.mock('../services/client'); +jest.mock("../services/client"); const mockedUsedNavigate = jest.fn(); -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), +jest.mock("react-router-dom", () => ({ + ...jest.requireActual("react-router-dom"), useNavigate: () => mockedUsedNavigate, })); @@ -40,38 +40,42 @@ afterEach(() => { container = null; }); -test('renders ClusterStateView', async () => { +test("renders ClusterStateView", async () => { const clusterState = { - "state": { - "seed_addrs": [], - "node_states": { - "node-green-uCdq/1656700092": { - "key_values": { - "available_services": { - "value": "searcher", - "version": 3 - }, - "grpc_address": { - "value": "127.0.0.1:7281", - "version": 2 - }, - "heartbeat": { - "value": "24", - "version": 27 - } + state: { + seed_addrs: [], + node_states: { + "node-green-uCdq/1656700092": { + key_values: { + available_services: { + value: "searcher", + version: 3, + }, + grpc_address: { + value: "127.0.0.1:7281", + version: 2, + }, + heartbeat: { + value: "24", + version: 27, }, - "max_version": 27 - } - } + }, + max_version: 27, + }, }, - "live_nodes": [], - "dead_nodes": [] + }, + live_nodes: [], + dead_nodes: [], }; - Client.prototype.cluster.mockImplementation(() => Promise.resolve(clusterState)); + Client.prototype.cluster.mockImplementation(() => + Promise.resolve(clusterState), + ); await act(async () => { render(, container); }); - await waitFor(() => expect(screen.getByText(/node-green-uCdq/)).toBeInTheDocument()); + await waitFor(() => + expect(screen.getByText(/node-green-uCdq/)).toBeInTheDocument(), + ); }); diff --git a/quickwit/quickwit-ui/src/views/ClusterView.tsx b/quickwit/quickwit-ui/src/views/ClusterView.tsx index 48ba6245aaa..f3c6146cf29 100644 --- a/quickwit/quickwit-ui/src/views/ClusterView.tsx +++ b/quickwit/quickwit-ui/src/views/ClusterView.tsx @@ -12,21 +12,26 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { Typography } from '@mui/material'; -import { useEffect, useMemo, useState } from 'react'; -import ApiUrlFooter from '../components/ApiUrlFooter'; -import { JsonEditor } from '../components/JsonEditor'; -import { ViewUnderAppBarBox, FullBoxContainer, QBreadcrumbs } from '../components/LayoutUtils'; -import Loader from '../components/Loader'; -import ErrorResponseDisplay from '../components/ResponseErrorDisplay'; -import { Client } from '../services/client'; -import { Cluster, ResponseError } from '../utils/models'; - +import { Typography } from "@mui/material"; +import { useEffect, useMemo, useState } from "react"; +import ApiUrlFooter from "../components/ApiUrlFooter"; +import { JsonEditor } from "../components/JsonEditor"; +import { + FullBoxContainer, + QBreadcrumbs, + ViewUnderAppBarBox, +} from "../components/LayoutUtils"; +import Loader from "../components/Loader"; +import ErrorResponseDisplay from "../components/ResponseErrorDisplay"; +import { Client } from "../services/client"; +import { Cluster, ResponseError } from "../utils/models"; function ClusterView() { const [loading, setLoading] = useState(false); const [cluster, setCluster] = useState(null); - const [responseError, setResponseError] = useState(null); + const [responseError, setResponseError] = useState( + null, + ); const quickwitClient = useMemo(() => new Client(), []); useEffect(() => { @@ -40,7 +45,7 @@ function ClusterView() { (error) => { setLoading(false); setResponseError(error); - } + }, ); }, [quickwitClient]); @@ -51,8 +56,8 @@ function ClusterView() { if (loading || cluster == null) { return ; } - return - } + return ; + }; return ( @@ -60,11 +65,9 @@ function ClusterView() { Cluster - - { renderResult() } - + {renderResult()} - { ApiUrlFooter('api/v1/cluster') } + {ApiUrlFooter("api/v1/cluster")} ); } diff --git a/quickwit/quickwit-ui/src/views/IndexView.test.jsx b/quickwit/quickwit-ui/src/views/IndexView.test.jsx index a69cca95e7f..03a546ae8d4 100644 --- a/quickwit/quickwit-ui/src/views/IndexView.test.jsx +++ b/quickwit/quickwit-ui/src/views/IndexView.test.jsx @@ -12,36 +12,38 @@ // See the License for the specific language governing permissions and // limitations under the License. +import { render, screen, waitFor } from "@testing-library/react"; import { unmountComponentAtNode } from "react-dom"; -import { render, waitFor, screen } from "@testing-library/react"; import { act } from "react-dom/test-utils"; +import { BrowserRouter } from "react-router-dom"; import { Client } from "../services/client"; import IndexView from "./IndexView"; -import { BrowserRouter } from "react-router-dom"; -jest.mock('../services/client'); +jest.mock("../services/client"); const mockedUsedNavigate = jest.fn(); -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), +jest.mock("react-router-dom", () => ({ + ...jest.requireActual("react-router-dom"), useParams: () => ({ - indexId: 'my-new-fresh-index-id' - }) + indexId: "my-new-fresh-index-id", + }), })); -test('renders IndexView', async () => { +test("renders IndexView", async () => { const index = { metadata: { index_config: { - index_uri: 'my-new-fresh-index-uri', - } + index_uri: "my-new-fresh-index-uri", + }, }, - splits: [] + splits: [], }; Client.prototype.getIndex.mockImplementation(() => Promise.resolve(index)); await act(async () => { - render( , {wrapper: BrowserRouter}); + render(, { wrapper: BrowserRouter }); }); - await waitFor(() => expect(screen.getByText(/my-new-fresh-index-uri/)).toBeInTheDocument()); + await waitFor(() => + expect(screen.getByText(/my-new-fresh-index-uri/)).toBeInTheDocument(), + ); }); diff --git a/quickwit/quickwit-ui/src/views/IndexView.tsx b/quickwit/quickwit-ui/src/views/IndexView.tsx index 20b658da11b..400ada8cbef 100644 --- a/quickwit/quickwit-ui/src/views/IndexView.tsx +++ b/quickwit/quickwit-ui/src/views/IndexView.tsx @@ -12,22 +12,26 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { Box, styled, Typography, Tab } from '@mui/material'; -import Link, { LinkProps } from '@mui/material/Link'; -import React, { useCallback, useEffect, useMemo, useState } from 'react'; -import { Client } from '../services/client'; -import Loader from '../components/Loader'; -import { Link as RouterLink, useParams } from 'react-router-dom'; -import { Index } from '../utils/models'; -import { TabContext, TabList, TabPanel } from '@mui/lab'; -import { IndexSummary } from '../components/IndexSummary'; -import { JsonEditor } from '../components/JsonEditor'; -import { ViewUnderAppBarBox, FullBoxContainer, QBreadcrumbs } from '../components/LayoutUtils'; -import ApiUrlFooter from '../components/ApiUrlFooter'; +import { TabContext, TabList, TabPanel } from "@mui/lab"; +import { Box, styled, Tab, Typography } from "@mui/material"; +import Link, { LinkProps } from "@mui/material/Link"; +import React, { useCallback, useEffect, useMemo, useState } from "react"; +import { Link as RouterLink, useParams } from "react-router-dom"; +import ApiUrlFooter from "../components/ApiUrlFooter"; +import { IndexSummary } from "../components/IndexSummary"; +import { JsonEditor } from "../components/JsonEditor"; +import { + FullBoxContainer, + QBreadcrumbs, + ViewUnderAppBarBox, +} from "../components/LayoutUtils"; +import Loader from "../components/Loader"; +import { Client } from "../services/client"; +import { Index } from "../utils/models"; export type ErrorResult = { error: string; -} +}; const CustomTabPanel = styled(TabPanel)` padding-left: 0; @@ -47,10 +51,10 @@ function LinkRouter(props: LinkRouterProps) { function IndexView() { const { indexId } = useParams(); - const [loading, setLoading] = useState(false) - const [, setLoadingError] = useState(null) - const [tabIndex, setTabIndex] = useState('1'); - const [index, setIndex] = useState() + const [loading, setLoading] = useState(false); + const [, setLoadingError] = useState(null); + const [tabIndex, setTabIndex] = useState("1"); + const [index, setIndex] = useState(); const quickwitClient = useMemo(() => new Client(), []); const handleTabIndexChange = (_: React.SyntheticEvent, newValue: string) => { @@ -71,8 +75,8 @@ function IndexView() { }, (error) => { setLoading(false); - setLoadingError({error: error}); - } + setLoadingError({ error: error }); + }, ); } }, [indexId, quickwitClient]); @@ -82,44 +86,67 @@ function IndexView() { return ; } else { // TODO: remove this css with magic number `48px`. - return - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + return ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ); } - } + }; useEffect(() => { fetchIndex(); @@ -134,9 +161,9 @@ function IndexView() { {indexId} - { renderFetchIndexResult() } + {renderFetchIndexResult()} - { ApiUrlFooter('api/v1/indexes/' + indexId) } + {ApiUrlFooter("api/v1/indexes/" + indexId)} ); } diff --git a/quickwit/quickwit-ui/src/views/IndexesView.test.jsx b/quickwit/quickwit-ui/src/views/IndexesView.test.jsx index b0b586de0cc..db2403755aa 100644 --- a/quickwit/quickwit-ui/src/views/IndexesView.test.jsx +++ b/quickwit/quickwit-ui/src/views/IndexesView.test.jsx @@ -12,16 +12,16 @@ // See the License for the specific language governing permissions and // limitations under the License. +import { screen } from "@testing-library/dom"; import { render, unmountComponentAtNode } from "react-dom"; -import {screen} from '@testing-library/dom' -import IndexesView from './IndexesView'; import { act } from "react-dom/test-utils"; -import {Client} from "../services/client"; +import { Client } from "../services/client"; +import IndexesView from "./IndexesView"; -jest.mock('../services/client'); +jest.mock("../services/client"); const mockedUsedNavigate = jest.fn(); -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), +jest.mock("react-router-dom", () => ({ + ...jest.requireActual("react-router-dom"), useNavigate: () => mockedUsedNavigate, })); @@ -39,31 +39,35 @@ afterEach(() => { container = null; }); -test('renders IndexesView', async () => { - const indexes = [{ - index_config: { - index_id: 'my-new-fresh-index', - index_uri: 'my-uri', - indexing_settings: { - timestamp_field: 'timestamp' - }, - search_settings: {}, - doc_mapping: { - store: false, - field_mappings: [], - tag_fields: [], - dynamic_mapping: false, +test("renders IndexesView", async () => { + const indexes = [ + { + index_config: { + index_id: "my-new-fresh-index", + index_uri: "my-uri", + indexing_settings: { + timestamp_field: "timestamp", + }, + search_settings: {}, + doc_mapping: { + store: false, + field_mappings: [], + tag_fields: [], + dynamic_mapping: false, + }, }, + sources: [], + create_timestamp: 1000, + update_timestamp: 1000, }, - sources: [], - create_timestamp: 1000, - update_timestamp: 1000, - }]; + ]; Client.prototype.listIndexes.mockResolvedValueOnce(() => indexes); await act(async () => { render(, container); }); - expect(screen.getByText(indexes[0].index_config.index_id)).toBeInTheDocument(); + expect( + screen.getByText(indexes[0].index_config.index_id), + ).toBeInTheDocument(); }); diff --git a/quickwit/quickwit-ui/src/views/IndexesView.tsx b/quickwit/quickwit-ui/src/views/IndexesView.tsx index ab637040576..507f5b6291d 100644 --- a/quickwit/quickwit-ui/src/views/IndexesView.tsx +++ b/quickwit/quickwit-ui/src/views/IndexesView.tsx @@ -12,19 +12,25 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { Box, Typography } from '@mui/material'; -import { useEffect, useMemo, useState } from 'react'; -import IndexesTable from '../components/IndexesTable'; -import { Client } from '../services/client'; -import Loader from '../components/Loader'; -import { IndexMetadata, ResponseError } from '../utils/models'; -import { ViewUnderAppBarBox, FullBoxContainer, QBreadcrumbs } from '../components/LayoutUtils'; -import ApiUrlFooter from '../components/ApiUrlFooter'; -import ErrorResponseDisplay from '../components/ResponseErrorDisplay'; +import { Box, Typography } from "@mui/material"; +import { useEffect, useMemo, useState } from "react"; +import ApiUrlFooter from "../components/ApiUrlFooter"; +import IndexesTable from "../components/IndexesTable"; +import { + FullBoxContainer, + QBreadcrumbs, + ViewUnderAppBarBox, +} from "../components/LayoutUtils"; +import Loader from "../components/Loader"; +import ErrorResponseDisplay from "../components/ResponseErrorDisplay"; +import { Client } from "../services/client"; +import { IndexMetadata, ResponseError } from "../utils/models"; function IndexesView() { const [loading, setLoading] = useState(false); - const [responseError, setResponseError] = useState(null); + const [responseError, setResponseError] = useState( + null, + ); const [indexesMetadata, setIndexesMetadata] = useState(); const quickwitClient = useMemo(() => new Client(), []); @@ -36,14 +42,14 @@ function IndexesView() { return ; } if (indexesMetadata.length > 0) { - return + return ( + + ); } - return - You have no index registered in your metastore. - - } + return You have no index registered in your metastore.; + }; useEffect(() => { setLoading(true); @@ -56,7 +62,7 @@ function IndexesView() { (error) => { setLoading(false); setResponseError(error); - } + }, ); }, [quickwitClient]); @@ -66,9 +72,9 @@ function IndexesView() { Indexes - { renderFetchIndexesResult() } + {renderFetchIndexesResult()} - { ApiUrlFooter('api/v1/indexes') } + {ApiUrlFooter("api/v1/indexes")} ); } diff --git a/quickwit/quickwit-ui/src/views/NodeInfoView.test.jsx b/quickwit/quickwit-ui/src/views/NodeInfoView.test.jsx index b32e981fea3..ed3c240315e 100644 --- a/quickwit/quickwit-ui/src/views/NodeInfoView.test.jsx +++ b/quickwit/quickwit-ui/src/views/NodeInfoView.test.jsx @@ -12,20 +12,20 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { render, unmountComponentAtNode } from "react-dom"; +import { screen } from "@testing-library/dom"; import { waitFor } from "@testing-library/react"; -import { screen } from '@testing-library/dom'; +import { render, unmountComponentAtNode } from "react-dom"; import { act } from "react-dom/test-utils"; import { Client } from "../services/client"; import NodeInfoView from "./NodeInfoView"; -jest.mock('../services/client'); +jest.mock("../services/client"); const mockedUsedNavigate = jest.fn(); -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), +jest.mock("react-router-dom", () => ({ + ...jest.requireActual("react-router-dom"), useParams: () => ({ - indexId: 'my-new-fresh-index-id' - }) + indexId: "my-new-fresh-index-id", + }), })); let container = null; @@ -42,24 +42,28 @@ afterEach(() => { container = null; }); -test('renders NodeInfoView', async () => { +test("renders NodeInfoView", async () => { const cluster = { - cluster_id: 'my cluster id', + cluster_id: "my cluster id", }; Client.prototype.cluster.mockImplementation(() => Promise.resolve(cluster)); const config = { - node_id: 'my-node-id', + node_id: "my-node-id", }; Client.prototype.config.mockImplementation(() => Promise.resolve(config)); const buildInfo = { - version: '0.3.2', + version: "0.3.2", }; - Client.prototype.buildInfo.mockImplementation(() => Promise.resolve(buildInfo)); + Client.prototype.buildInfo.mockImplementation(() => + Promise.resolve(buildInfo), + ); await act(async () => { render(, container); }); - await waitFor(() => expect(screen.getByText(/my-node-id/)).toBeInTheDocument()); + await waitFor(() => + expect(screen.getByText(/my-node-id/)).toBeInTheDocument(), + ); }); diff --git a/quickwit/quickwit-ui/src/views/NodeInfoView.tsx b/quickwit/quickwit-ui/src/views/NodeInfoView.tsx index 76d8ba50b06..5b052fbd97d 100644 --- a/quickwit/quickwit-ui/src/views/NodeInfoView.tsx +++ b/quickwit/quickwit-ui/src/views/NodeInfoView.tsx @@ -12,15 +12,19 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { TabContext, TabList, TabPanel } from '@mui/lab'; -import { Box, Tab, Typography, styled } from '@mui/material'; -import { useEffect, useMemo, useState } from 'react'; -import ApiUrlFooter from '../components/ApiUrlFooter'; -import { JsonEditor } from '../components/JsonEditor'; -import { ViewUnderAppBarBox, FullBoxContainer, QBreadcrumbs } from '../components/LayoutUtils'; -import Loader from '../components/Loader'; -import { Client } from '../services/client'; -import { QuickwitBuildInfo } from '../utils/models'; +import { TabContext, TabList, TabPanel } from "@mui/lab"; +import { Box, styled, Tab, Typography } from "@mui/material"; +import { useEffect, useMemo, useState } from "react"; +import ApiUrlFooter from "../components/ApiUrlFooter"; +import { JsonEditor } from "../components/JsonEditor"; +import { + FullBoxContainer, + QBreadcrumbs, + ViewUnderAppBarBox, +} from "../components/LayoutUtils"; +import Loader from "../components/Loader"; +import { Client } from "../services/client"; +import { QuickwitBuildInfo } from "../utils/models"; const CustomTabPanel = styled(TabPanel)` padding-left: 0; @@ -32,15 +36,17 @@ function NodeInfoView() { const [loadingCounter, setLoadingCounter] = useState(2); const [nodeId, setNodeId] = useState(""); // eslint-disable-next-line - const [nodeConfig, setNodeConfig] = useState>(null); + const [nodeConfig, setNodeConfig] = useState>( + null, + ); const [buildInfo, setBuildInfo] = useState(null); - const [tabIndex, setTabIndex] = useState('1'); + const [tabIndex, setTabIndex] = useState("1"); const quickwitClient = useMemo(() => new Client(), []); const urlByTab: Record = { - '1': 'api/v1/config', - '2': 'api/v1/version', - } + "1": "api/v1/config", + "2": "api/v1/version", + }; const handleTabIndexChange = (_: React.SyntheticEvent, newValue: string) => { setTabIndex(newValue); @@ -52,31 +58,31 @@ function NodeInfoView() { setNodeId(cluster.node_id); }, (error) => { - console.log('Error when fetching cluster info:', error); - } - ) + console.log("Error when fetching cluster info:", error); + }, + ); }); useEffect(() => { setLoadingCounter(2); quickwitClient.buildInfo().then( (fetchedBuildInfo) => { - setLoadingCounter(prevCounter => prevCounter - 1); + setLoadingCounter((prevCounter) => prevCounter - 1); setBuildInfo(fetchedBuildInfo); }, (error) => { - setLoadingCounter(prevCounter => prevCounter - 1); - console.log('Error when fetching build info: ', error); - } + setLoadingCounter((prevCounter) => prevCounter - 1); + console.log("Error when fetching build info: ", error); + }, ); quickwitClient.config().then( (fetchedConfig) => { - setLoadingCounter(prevCounter => prevCounter - 1); + setLoadingCounter((prevCounter) => prevCounter - 1); setNodeConfig(fetchedConfig); }, (error) => { - setLoadingCounter(prevCounter => prevCounter - 1); - console.log('Error when fetching node config: ', error); - } + setLoadingCounter((prevCounter) => prevCounter - 1); + console.log("Error when fetching node config: ", error); + }, ); }, [quickwitClient]); @@ -84,24 +90,26 @@ function NodeInfoView() { if (loadingCounter !== 0) { return ; } else { - return - - - - - - - - - - - - - - - + return ( + + + + + + + + + + + + + + + + + ); } - } + }; return ( @@ -109,9 +117,9 @@ function NodeInfoView() { Node ID: {nodeId} (self) - { renderResult() } + {renderResult()} - { ApiUrlFooter(urlByTab[tabIndex] || '') } + {ApiUrlFooter(urlByTab[tabIndex] || "")} ); } diff --git a/quickwit/quickwit-ui/src/views/SearchView.test.jsx b/quickwit/quickwit-ui/src/views/SearchView.test.jsx index 2d5ddf9d4d0..2e0f25b10d4 100644 --- a/quickwit/quickwit-ui/src/views/SearchView.test.jsx +++ b/quickwit/quickwit-ui/src/views/SearchView.test.jsx @@ -12,22 +12,23 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { render, unmountComponentAtNode } from "react-dom"; +import { screen } from "@testing-library/dom"; import { waitFor } from "@testing-library/react"; -import { screen } from '@testing-library/dom'; +import { render, unmountComponentAtNode } from "react-dom"; import { act } from "react-dom/test-utils"; import { Client } from "../services/client"; import SearchView from "./SearchView"; -jest.mock('../services/client'); +jest.mock("../services/client"); const mockedUsedNavigate = jest.fn(); -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), +jest.mock("react-router-dom", () => ({ + ...jest.requireActual("react-router-dom"), useLocation: () => ({ - pathname: '/search', - search: 'index_id=my-new-fresh-index-idmax_hits=10&start_timestamp=1460554590&end_timestamp=1460554592&sort_by_field=-timestamp' + pathname: "/search", + search: + "index_id=my-new-fresh-index-idmax_hits=10&start_timestamp=1460554590&end_timestamp=1460554592&sort_by_field=-timestamp", }), - useNavigate: () => mockedUsedNavigate + useNavigate: () => mockedUsedNavigate, })); let container = null; @@ -44,37 +45,48 @@ afterEach(() => { container = null; }); -test('renders SearchView', async () => { +test("renders SearchView", async () => { const index = { metadata: { index_config: { - index_id: 'my-new-fresh-index-id', - index_uri: 'my-new-fresh-index-uri', + index_id: "my-new-fresh-index-id", + index_uri: "my-new-fresh-index-uri", indexing_settings: {}, doc_mapping: { - field_mappings: [{ - name: 'timestamp', - type: 'i64' - }] - } - } + field_mappings: [ + { + name: "timestamp", + type: "i64", + }, + ], + }, + }, }, - splits: [] + splits: [], }; Client.prototype.getIndex.mockImplementation(() => Promise.resolve(index)); - Client.prototype.listIndexes.mockImplementation(() => Promise.resolve([index.metadata])); + Client.prototype.listIndexes.mockImplementation(() => + Promise.resolve([index.metadata]), + ); const searchResponse = { num_hits: 2, - hits: [{body: 'INFO This is an info log'}, {body: 'WARN This is a warn log'}], + hits: [ + { body: "INFO This is an info log" }, + { body: "WARN This is a warn log" }, + ], elapsed_time_micros: 10, - errors: [] - } - Client.prototype.search.mockImplementation(() => Promise.resolve(searchResponse)); + errors: [], + }; + Client.prototype.search.mockImplementation(() => + Promise.resolve(searchResponse), + ); await act(async () => { render(, container); }); - await waitFor(() => expect(screen.getByText(/This is an info log/)).toBeInTheDocument()); + await waitFor(() => + expect(screen.getByText(/This is an info log/)).toBeInTheDocument(), + ); }); diff --git a/quickwit/quickwit-ui/src/views/SearchView.tsx b/quickwit/quickwit-ui/src/views/SearchView.tsx index 070bd4db1f4..486885cf402 100644 --- a/quickwit/quickwit-ui/src/views/SearchView.tsx +++ b/quickwit/quickwit-ui/src/views/SearchView.tsx @@ -12,26 +12,43 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { useEffect, useMemo, useRef, useState } from 'react'; -import { useLocation, useNavigate } from 'react-router-dom'; -import ApiUrlFooter from '../components/ApiUrlFooter'; -import { IndexSideBar } from '../components/IndexSideBar'; -import { ViewUnderAppBarBox, FullBoxContainer } from '../components/LayoutUtils'; -import { QueryEditorActionBar } from '../components/QueryActionBar'; -import { QueryEditor } from '../components/QueryEditor/QueryEditor'; -import { AggregationEditor } from '../components/QueryEditor/AggregationEditor'; -import SearchResult from '../components/SearchResult/SearchResult'; -import { useLocalStorage } from '../providers/LocalStorageProvider'; -import { Client } from '../services/client'; -import { EMPTY_SEARCH_REQUEST, Index, IndexMetadata, ResponseError, SearchRequest, SearchResponse } from '../utils/models'; -import { hasSearchParams, parseSearchUrl, toUrlSearchRequestParams } from '../utils/urls'; +import { useEffect, useMemo, useRef, useState } from "react"; +import { useLocation, useNavigate } from "react-router-dom"; +import ApiUrlFooter from "../components/ApiUrlFooter"; +import { IndexSideBar } from "../components/IndexSideBar"; +import { + FullBoxContainer, + ViewUnderAppBarBox, +} from "../components/LayoutUtils"; +import { QueryEditorActionBar } from "../components/QueryActionBar"; +import { AggregationEditor } from "../components/QueryEditor/AggregationEditor"; +import { QueryEditor } from "../components/QueryEditor/QueryEditor"; +import SearchResult from "../components/SearchResult/SearchResult"; +import { useLocalStorage } from "../providers/LocalStorageProvider"; +import { Client } from "../services/client"; +import { + EMPTY_SEARCH_REQUEST, + Index, + IndexMetadata, + ResponseError, + SearchRequest, + SearchResponse, +} from "../utils/models"; +import { + hasSearchParams, + parseSearchUrl, + toUrlSearchRequestParams, +} from "../utils/urls"; -function updateSearchRequestWithIndex(index: Index | null, searchRequest: SearchRequest) { +function updateSearchRequestWithIndex( + index: Index | null, + searchRequest: SearchRequest, +) { // If we have a timestamp field, order by desc on the timestamp field. if (index?.metadata.index_config.doc_mapping.timestamp_field) { searchRequest.sortByField = { field_name: index?.metadata.index_config.doc_mapping.timestamp_field, - order: 'Desc' + order: "Desc", }; } else { searchRequest.sortByField = null; @@ -48,10 +65,16 @@ function SearchView() { const navigate = useNavigate(); const [index, setIndex] = useState(null); const prevIndexIdRef = useRef(); - const [searchResponse, setSearchResponse] = useState(null); + const [searchResponse, setSearchResponse] = useState( + null, + ); const [searchError, setSearchError] = useState(null); const [queryRunning, setQueryRunning] = useState(false); - const [searchRequest, setSearchRequest] = useState(hasSearchParams(location.search) ? parseSearchUrl(location.search) : EMPTY_SEARCH_REQUEST); + const [searchRequest, setSearchRequest] = useState( + hasSearchParams(location.search) + ? parseSearchUrl(location.search) + : EMPTY_SEARCH_REQUEST, + ); const updateLastSearchRequest = useLocalStorage().updateLastSearchRequest; const quickwitClient = useMemo(() => new Client(), []); @@ -60,32 +83,42 @@ function SearchView() { return; } - console.log('Run search...', updatedSearchRequest); + console.log("Run search...", updatedSearchRequest); updateSearchRequestWithIndex(index, updatedSearchRequest); setSearchRequest(updatedSearchRequest); setQueryRunning(true); setSearchError(null); - navigate('/search?' + toUrlSearchRequestParams(updatedSearchRequest).toString()); - const timestamp_field = index?.metadata.index_config.doc_mapping.timestamp_field || null; - quickwitClient.search(updatedSearchRequest, timestamp_field).then((response) => { - updateLastSearchRequest(updatedSearchRequest); - setSearchResponse(response); - setQueryRunning(false); - }, (error) => { - setQueryRunning(false); - setSearchError(error); - console.error('Error when running search request', error); - }); - } + navigate( + "/search?" + toUrlSearchRequestParams(updatedSearchRequest).toString(), + ); + const timestamp_field = + index?.metadata.index_config.doc_mapping.timestamp_field || null; + quickwitClient.search(updatedSearchRequest, timestamp_field).then( + (response) => { + updateLastSearchRequest(updatedSearchRequest); + setSearchResponse(response); + setQueryRunning(false); + }, + (error) => { + setQueryRunning(false); + setSearchError(error); + console.error("Error when running search request", error); + }, + ); + }; const onIndexMetadataUpdate = (indexMetadata: IndexMetadata | null) => { - setSearchRequest(previousRequest => { + setSearchRequest((previousRequest) => { updateSearchRequestWithIndex(index, previousRequest); - return {...previousRequest, indexId: indexMetadata === null ? null : indexMetadata.index_config.index_id}; + return { + ...previousRequest, + indexId: + indexMetadata === null ? null : indexMetadata.index_config.index_id, + }; }); - } + }; const onSearchRequestUpdate = (searchRequest: SearchRequest) => { setSearchRequest(searchRequest); - } + }; useEffect(() => { if (prevIndexIdRef.current !== index?.metadata.index_config.index_id) { setSearchResponse(null); @@ -94,19 +127,26 @@ function SearchView() { if (prevIndexIdRef.current === null) { runSearch(searchRequest); } - prevIndexIdRef.current = index === null ? null : index.metadata.index_config.index_id; + prevIndexIdRef.current = + index === null ? null : index.metadata.index_config.index_id; }, [index]); useEffect(() => { if (!searchRequest.indexId) { return; } - if (index !== null && index.metadata.index_config.index_id === searchRequest.indexId) { + if ( + index !== null && + index.metadata.index_config.index_id === searchRequest.indexId + ) { return; } // If index id is changing, it's better to reset timestamps as the time unit may be different // between indexes. - if (prevIndexIdRef.current !== null && prevIndexIdRef.current !== index?.metadata.index_config.index_id) { + if ( + prevIndexIdRef.current !== null && + prevIndexIdRef.current !== index?.metadata.index_config.index_id + ) { searchRequest.startTimestamp = null; searchRequest.endTimestamp = null; } @@ -118,39 +158,49 @@ function SearchView() { const searchParams = toUrlSearchRequestParams(searchRequest); // `toUrlSearchRequestParams` is used for the UI urls. We need to remove the `indexId` request parameter to generate // the correct API url, this is the only difference. - searchParams.delete('index_id'); + searchParams.delete("index_id"); return ( - - - - - - - - - - { index !== null && ApiUrlFooter(`api/v1/${index?.metadata.index_config.index_id}/search?${searchParams.toString()}`) } + + + + + + + + - + {index !== null && + ApiUrlFooter( + `api/v1/${index?.metadata.index_config.index_id}/search?${searchParams.toString()}`, + )} + + ); } diff --git a/quickwit/quickwit-ui/tsconfig.json b/quickwit/quickwit-ui/tsconfig.json index 294507dc6f1..37e696fc628 100644 --- a/quickwit/quickwit-ui/tsconfig.json +++ b/quickwit/quickwit-ui/tsconfig.json @@ -1,11 +1,7 @@ { "compilerOptions": { "target": "ESNext", - "lib": [ - "dom", - "dom.iterable", - "esnext" - ], + "lib": ["dom", "dom.iterable", "esnext"], "allowJs": true, "skipLibCheck": true, "esModuleInterop": true, @@ -22,11 +18,7 @@ "resolveJsonModule": true, "isolatedModules": true, "noEmit": true, - "jsx": "react-jsx", + "jsx": "react-jsx" }, - "include": [ - "src", "typings", - "tests", - "tests/integration/index.test.ts" - ] + "include": ["src", "typings", "tests", "tests/integration/index.test.ts"] } diff --git a/quickwit/quickwit-ui/typings/fonts.d.ts b/quickwit/quickwit-ui/typings/fonts.d.ts index 956c01c4725..5dffd39d7e4 100644 --- a/quickwit/quickwit-ui/typings/fonts.d.ts +++ b/quickwit/quickwit-ui/typings/fonts.d.ts @@ -1,3 +1,3 @@ -declare module '*.woff'; -declare module '*.woff2'; -declare module '*.svg'; \ No newline at end of file +declare module "*.woff"; +declare module "*.woff2"; +declare module "*.svg"; diff --git a/quickwit/quickwit-ui/yarn.lock b/quickwit/quickwit-ui/yarn.lock index 1c7c7a4f1b7..57e118af384 100644 --- a/quickwit/quickwit-ui/yarn.lock +++ b/quickwit/quickwit-ui/yarn.lock @@ -1295,6 +1295,60 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== +"@biomejs/biome@2.3.5": + version "2.3.5" + resolved "https://registry.yarnpkg.com/@biomejs/biome/-/biome-2.3.5.tgz#fae9977584fc7fe28f6d4a140982871aa11ae8f6" + integrity sha512-HvLhNlIlBIbAV77VysRIBEwp55oM/QAjQEin74QQX9Xb259/XP/D5AGGnZMOyF1el4zcvlNYYR3AyTMUV3ILhg== + optionalDependencies: + "@biomejs/cli-darwin-arm64" "2.3.5" + "@biomejs/cli-darwin-x64" "2.3.5" + "@biomejs/cli-linux-arm64" "2.3.5" + "@biomejs/cli-linux-arm64-musl" "2.3.5" + "@biomejs/cli-linux-x64" "2.3.5" + "@biomejs/cli-linux-x64-musl" "2.3.5" + "@biomejs/cli-win32-arm64" "2.3.5" + "@biomejs/cli-win32-x64" "2.3.5" + +"@biomejs/cli-darwin-arm64@2.3.5": + version "2.3.5" + resolved "https://registry.yarnpkg.com/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.3.5.tgz#364de69c055851223d8bab37390ead748e4b208d" + integrity sha512-fLdTur8cJU33HxHUUsii3GLx/TR0BsfQx8FkeqIiW33cGMtUD56fAtrh+2Fx1uhiCsVZlFh6iLKUU3pniZREQw== + +"@biomejs/cli-darwin-x64@2.3.5": + version "2.3.5" + resolved "https://registry.yarnpkg.com/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.3.5.tgz#6ecfdfb9644e86278801081db88dc4fceb36a666" + integrity sha512-qpT8XDqeUlzrOW8zb4k3tjhT7rmvVRumhi2657I2aGcY4B+Ft5fNwDdZGACzn8zj7/K1fdWjgwYE3i2mSZ+vOA== + +"@biomejs/cli-linux-arm64-musl@2.3.5": + version "2.3.5" + resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.3.5.tgz#cc064d50165687ec5917f0d04d258cc72f6b143f" + integrity sha512-eGUG7+hcLgGnMNl1KHVZUYxahYAhC462jF/wQolqu4qso2MSk32Q+QrpN7eN4jAHAg7FUMIo897muIhK4hXhqg== + +"@biomejs/cli-linux-arm64@2.3.5": + version "2.3.5" + resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.3.5.tgz#b340d9b8b45f568fc719b9c00b3d725a38d2c6be" + integrity sha512-u/pybjTBPGBHB66ku4pK1gj+Dxgx7/+Z0jAriZISPX1ocTO8aHh8x8e7Kb1rB4Ms0nA/SzjtNOVJ4exVavQBCw== + +"@biomejs/cli-linux-x64-musl@2.3.5": + version "2.3.5" + resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.3.5.tgz#097d46b6cac00bd58e41dfcc02afcfaff834e2ab" + integrity sha512-awVuycTPpVTH/+WDVnEEYSf6nbCBHf/4wB3lquwT7puhNg8R4XvonWNZzUsfHZrCkjkLhFH/vCZK5jHatD9FEg== + +"@biomejs/cli-linux-x64@2.3.5": + version "2.3.5" + resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-x64/-/cli-linux-x64-2.3.5.tgz#0c02b06aca6ba905c674175bd6b24d88c862ae9f" + integrity sha512-XrIVi9YAW6ye0CGQ+yax0gLfx+BFOtKaNX74n+xHWla6Cl6huUmcKNO7HPx7BiKnJUzrxXY1qYlm7xMvi08X4g== + +"@biomejs/cli-win32-arm64@2.3.5": + version "2.3.5" + resolved "https://registry.yarnpkg.com/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.3.5.tgz#c06653bfc77c09aa1a5e714042d737984ea8c1e1" + integrity sha512-DlBiMlBZZ9eIq4H7RimDSGsYcOtfOIfZOaI5CqsWiSlbTfqbPVfWtCf92wNzx8GNMbu1s7/g3ZZESr6+GwM/SA== + +"@biomejs/cli-win32-x64@2.3.5": + version "2.3.5" + resolved "https://registry.yarnpkg.com/@biomejs/cli-win32-x64/-/cli-win32-x64-2.3.5.tgz#8594814adb902c996603c729dda9ca359eeec316" + integrity sha512-nUmR8gb6yvrKhtRgzwo/gDimPwnO5a4sCydf8ZS2kHIJhEmSmk+STsusr1LHTuM//wXppBawvSQi2xFXJCdgKQ== + "@braintree/sanitize-url@=6.0.4": version "6.0.4" resolved "https://registry.yarnpkg.com/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz#923ca57e173c6b232bbbb07347b1be982f03e783" @@ -10548,11 +10602,6 @@ prelude-ls@~1.1.2: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== -prettier@2.8.1: - version "2.8.1" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.1.tgz#4e1fd11c34e2421bc1da9aea9bd8127cd0a35efc" - integrity sha512-lqGoSJBQNJidqCHE80vqZJHWHRFoNYsSpP9AjFhlhi9ODCJA541svILes/+/1GM3VaL/abZi7cpFzOpdR9UPKg== - pretty-bytes@^5.3.0, pretty-bytes@^5.4.1, pretty-bytes@^5.6.0: version "5.6.0" resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" diff --git a/quickwit/rest-api-tests/scenarii/aggregations/0001-aggregations.yaml b/quickwit/rest-api-tests/scenarii/aggregations/0001-aggregations.yaml index f81c2215f40..63daf92db06 100644 --- a/quickwit/rest-api-tests/scenarii/aggregations/0001-aggregations.yaml +++ b/quickwit/rest-api-tests/scenarii/aggregations/0001-aggregations.yaml @@ -375,6 +375,7 @@ expected: aggregations: response_stats: sum_of_squares: 55300.0 +--- # Test term aggs number precision method: [GET] engines: @@ -393,3 +394,86 @@ expected: buckets: - doc_count: 1 key: 1769070189829214200 +--- +# Test composite aggregation +method: [GET] +engines: + - quickwit +endpoint: _elastic/aggregations/_search +json: + size: 0 + aggs: + host_name_composite: + composite: + size: 5 + sources: + - host: + terms: + field: "host" + missing_bucket: true + - name: + terms: + field: "name" + - response: + histogram: + field: "response" + interval: 50 +expected: + aggregations: + host_name_composite: + buckets: + - key: { "host": null, "name": "Bernhard", "response": 100.0 } + doc_count: 1 + - key: { "host": null, "name": "Fritz", "response": 0.0 } + doc_count: 2 + - key: { "host": "192.168.0.1", "name": "Fred", "response": 100.0 } + doc_count: 1 + - key: { "host": "192.168.0.1", "name": "Fritz", "response": 0.0 } + doc_count: 1 + - key: { "host": "192.168.0.10", "name": "Albert", "response": 100.0 } + doc_count: 1 + after_key: + host: "192.168.0.10" + name: "Albert" + response: 100.0 + +--- +# Test composite aggregation paging +method: [GET] +engines: + - quickwit +endpoint: _elastic/aggregations/_search +json: + size: 0 + aggs: + host_name_composite: + composite: + size: 5 + sources: + - host: + terms: + field: "host" + missing_bucket: true + - name: + terms: + field: "name" + - response: + histogram: + field: "response" + interval: 50 + after: + host: "192.168.0.10" + name: "Albert" + response: 100.0 +expected: + aggregations: + host_name_composite: + buckets: + - key: { "host": "192.168.0.10", "name": "Holger", "response": 0.0 } + doc_count: 1 + # Horst is missing because his response field is missing + - key: { "host": "192.168.0.10", "name": "Werner", "response": 0.0 } + doc_count: 1 + - key: { "host": "192.168.0.11", "name": "Manfred", "response": 100.0 } + doc_count: 1 +--- \ No newline at end of file diff --git a/quickwit/rest-api-tests/scenarii/es_compatibility/0025-msearch.yaml b/quickwit/rest-api-tests/scenarii/es_compatibility/0025-msearch.yaml index 1a7fbf631ed..2ab8f12fa0c 100644 --- a/quickwit/rest-api-tests/scenarii/es_compatibility/0025-msearch.yaml +++ b/quickwit/rest-api-tests/scenarii/es_compatibility/0025-msearch.yaml @@ -104,3 +104,25 @@ expected: $expect: "len(val) == 1" # Contains only 'actor' actor: id: 5688 +--- +# test missing index +endpoint: "_msearch" +method: POST +ndjson: + - {"index":"idontexist"} + - {"query" : {"match" : { "type": "PushEvent"}}, "size": 0} +expected: + responses: + - status: 404 +--- +endpoint: "_msearch" +method: POST +ndjson: + - {"index":"idontexist", "ignore_unavailable": true} + - {"query" : {"match" : { "type": "PushEvent"}}, "size": 0} +expected: + responses: + - hits: + total: + value: 0 + status: 200 diff --git a/quickwit/rest-api-tests/scenarii/es_compatibility/multi-indices/0004-missing_index_query.yaml b/quickwit/rest-api-tests/scenarii/es_compatibility/multi-indices/0004-missing_index_query.yaml new file mode 100644 index 00000000000..096f3a8f8d3 --- /dev/null +++ b/quickwit/rest-api-tests/scenarii/es_compatibility/multi-indices/0004-missing_index_query.yaml @@ -0,0 +1,27 @@ +endpoint: "idontexist/_search" +params: + q: "*" +status_code: 404 +--- +endpoint: "idontexist/_search" +params: + q: "*" + ignore_unavailable: "true" +expected: + hits: + total: + value: 0 +--- +endpoint: "gharchive-*,idontexist/_search" +params: + q: "*" +status_code: 404 +--- +endpoint: "gharchive-*,idontexist/_search" +params: + q: "*" + ignore_unavailable: "true" +expected: + hits: + total: + value: 4 diff --git a/quickwit/rust-toolchain.toml b/quickwit/rust-toolchain.toml index a7a5e1afcc8..e54a09951e9 100644 --- a/quickwit/rust-toolchain.toml +++ b/quickwit/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "1.90" +channel = "1.91" components = ["cargo", "clippy", "rustfmt", "rust-docs"]