diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 0000000..36ded71 --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,18 @@ +{ + "permissions": { + "allow": [ + "Bash(deno test:*)", + "Bash(deno add:*)", + "Bash(deno task:*)", + "Bash(deno check:*)", + "Bash(find:*)", + "Bash(deno doc:*)", + "WebFetch(domain:docs.eventsourcingdb.io)", + "Bash(cat:*)", + "WebSearch", + "Bash(ls:*)" + ], + "deny": [], + "ask": [] + } +} diff --git a/.claudeignore b/.claudeignore new file mode 100644 index 0000000..cf6b454 --- /dev/null +++ b/.claudeignore @@ -0,0 +1,5 @@ +.env +.env.* +.otel_token +*.pem +*.key diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml index 91e476a..6ac1c12 100644 --- a/.github/workflows/checks.yaml +++ b/.github/workflows/checks.yaml @@ -12,62 +12,34 @@ concurrency: jobs: deploy: - name: Build & Deploy + name: Format, Lint, Type Check & Test runs-on: ubuntu-latest env: DENO_DIR: deno_cache_dir steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - name: Cache Deno dependencies - uses: actions/cache@v4 + uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 with: path: ${{ env.DENO_DIR }} - key: deno_cache + key: deno-${{ runner.os }}-${{ hashFiles('deno.lock') }} + restore-keys: | + deno-${{ runner.os }}- - - uses: denoland/setup-deno@v2 + - uses: denoland/setup-deno@e95548e56dfa95d4e1a28d6f422fafe75c4c26fb with: - deno-version: v2.3.x + deno-version: v2 - # Check if the code is formatted correctly. - - name: Check formatting (core) + - name: Check formatting run: deno fmt --check - working-directory: ./packages/core - - name: Check formatting (mongodb) - run: deno fmt --check - working-directory: ./packages/mongodb - - name: Check formatting (oak) - run: deno fmt --check - working-directory: ./packages/oak - - name: Check formatting (utils) - run: deno fmt --check - working-directory: ./packages/utils - # Scan the code for syntax errors and style issues. - - name: Lint (core) - run: deno lint - working-directory: ./packages/core - - name: Lint (mongodb) - run: deno lint - working-directory: ./packages/mongodb - - name: Lint (oak) + - name: Lint run: deno lint - working-directory: ./packages/oak - - name: Lint (utils) - run: deno lint - working-directory: ./packages/utils - # Run all test files in the repository and collect code coverage. - - name: Test (core) - run: deno test --allow-all --permit-no-files - working-directory: ./packages/core - - name: Test (mongodb) - run: deno test --allow-all --permit-no-files - working-directory: ./packages/mongodb - - name: Test (oak) - run: deno test --allow-all --permit-no-files - working-directory: ./packages/oak - - name: Test (utils) + - name: Type check + run: deno check + + - name: Test run: deno test --allow-all --permit-no-files - working-directory: ./packages/utils diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 218dbbd..609267b 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -5,12 +5,6 @@ on: release: types: [created] -# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages -permissions: - contents: read - pages: write - id-token: write - # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. concurrency: @@ -20,19 +14,21 @@ concurrency: jobs: build: runs-on: ubuntu-latest + permissions: + contents: read steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - name: Setup Node - uses: actions/setup-node@v4 + uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 with: node-version: 20 cache: npm cache-dependency-path: "./docs/package-lock.json" - name: Setup Pages - uses: actions/configure-pages@v4 + uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b - name: Install dependencies working-directory: ./docs @@ -43,7 +39,7 @@ jobs: run: npm run build - name: Upload artifact - uses: actions/upload-pages-artifact@v3 + uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b with: path: docs/.vitepress/dist @@ -51,6 +47,9 @@ jobs: name: Deploy needs: build runs-on: ubuntu-latest + permissions: + pages: write + id-token: write environment: name: github-pages @@ -59,4 +58,4 @@ jobs: steps: - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v4 + uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index e1f0c92..0a1af16 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -5,30 +5,31 @@ on: release: types: [created] -permissions: - contents: read - id-token: write # The OIDC ID token is used for authentication with JSR. - jobs: publish: + permissions: + contents: read + id-token: write # The OIDC ID token is used for authentication with JSR. + runs-on: ubuntu-latest + steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - - uses: denoland/setup-deno@v2 + - uses: denoland/setup-deno@e95548e56dfa95d4e1a28d6f422fafe75c4c26fb with: - deno-version: v2.3.x + deno-version: v2 - name: Publish core package working-directory: ./packages/core - run: deno publish --allow-slow-types + run: deno publish - name: Publish mongodb package working-directory: ./packages/mongodb run: deno publish - - name: Publish oak package - working-directory: ./packages/oak + - name: Publish hono package + working-directory: ./packages/hono run: deno publish - name: Publish utils package diff --git a/.zed/settings.json b/.zed/settings.json new file mode 100644 index 0000000..ed5f873 --- /dev/null +++ b/.zed/settings.json @@ -0,0 +1,40 @@ +{ + "lsp": { + "deno": { + "settings": { + "deno": { + "enable": true + } + } + } + }, + "languages": { + "JavaScript": { + "language_servers": [ + "deno", + "!typescript-language-server", + "!vtsls", + "!eslint" + ], + "formatter": "language_server" + }, + "TypeScript": { + "language_servers": [ + "deno", + "!typescript-language-server", + "!vtsls", + "!eslint" + ], + "formatter": "language_server" + }, + "TSX": { + "language_servers": [ + "deno", + "!typescript-language-server", + "!vtsls", + "!eslint" + ], + "formatter": "language_server" + } + } +} diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..4b873a9 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,10 @@ +# Format, Lint, Type Check & Test + +Whenever something is changed in the examples or packages, run the following commands to format, lint, type check and test the code. Running these commands from the repository root will check all examples and packages. + +``` +deno fmt --check +deno lint +deno check +deno test +``` diff --git a/README.md b/README.md index d37f31c..2512c27 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,20 @@ -Nimbus # Installation -Find all packages and installation instructions at [jsr.io/@nimbus](https://jsr.io/@nimbus). +Find all packages and installation instructions at +[jsr.io/@nimbus](https://jsr.io/@nimbus). # Documentation -Find the full documentation at [https://nimbus.overlap.at/](https://nimbus.overlap.at/). +Find the full documentation at +[https://nimbus.overlap.at/](https://nimbus.overlap.at/). -In addition code documentation is automatically generated and can be found for each package at [jsr.io/@nimbus](https://jsr.io/@nimbus). +In addition code documentation is automatically generated and can be found for +each package at [jsr.io/@nimbus](https://jsr.io/@nimbus). # License diff --git a/RELEASE.md b/RELEASE.md index da82d5e..3ecb5ee 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,6 +1,8 @@ # How to release a new version of the packages -For each package make sure the version in the `packages//deno.json` is set correctly and stick to semantic versioning. +For each package make sure the version in the +`packages//deno.json` is set correctly and stick to semantic +versioning. Once everything is ready make a new commit with a message of this type: @@ -8,14 +10,12 @@ Once everything is ready make a new commit with a message of this type: chore: release 0.0.0 ``` -Push to `main` and create a new release on GitHub from there on the GitHub workflow will take care of the rest. +Push to `main` and create a new release on GitHub from there on the GitHub +workflow will take care of the rest. ## Manually publish to JSR ``` cd packages/ -deno publish --allow-slow-types +deno publish ``` - -**Slow Types** -Because of some Zod inferred types, the `--allow-slow-types` flag is required to publish the package to JSR. diff --git a/deno.json b/deno.json index 695a9fe..fa10ed8 100644 --- a/deno.json +++ b/deno.json @@ -2,12 +2,39 @@ "compilerOptions": { "strict": true }, + "exclude": [ + "docs/" + ], "workspace": [ "./packages/core", "./packages/mongodb", - "./packages/oak", + "./packages/hono", "./packages/utils", - "./examples/the-expense" + "./examples/hono-demo" ], - "nodeModulesDir": "none" -} + "nodeModulesDir": "none", + "fmt": { + "include": [ + "examples/", + "packages/" + ] + }, + "lint": { + "include": [ + "examples/", + "packages/" + ] + }, + "check": { + "include": [ + "examples/", + "packages/" + ] + }, + "test": { + "include": [ + "examples/", + "packages/" + ] + } +} \ No newline at end of file diff --git a/deno.lock b/deno.lock index f74a6e7..e30337d 100644 --- a/deno.lock +++ b/deno.lock @@ -1,87 +1,35 @@ { "version": "5", "specifiers": { - "jsr:@oak/commons@1": "1.0.0", - "jsr:@oak/oak@^17.1.4": "17.1.4", - "jsr:@std/assert@1": "1.0.10", - "jsr:@std/assert@^1.0.10": "1.0.10", - "jsr:@std/bytes@1": "1.0.4", - "jsr:@std/bytes@^1.0.2": "1.0.4", - "jsr:@std/crypto@1": "1.0.3", + "jsr:@std/assert@^1.0.10": "1.0.15", "jsr:@std/dotenv@*": "0.225.3", - "jsr:@std/encoding@1": "1.0.6", - "jsr:@std/encoding@^1.0.5": "1.0.6", + "jsr:@std/dotenv@~0.225.6": "0.225.6", "jsr:@std/fmt@^1.0.4": "1.0.5", "jsr:@std/fmt@^1.0.5": "1.0.5", - "jsr:@std/http@1": "1.0.12", - "jsr:@std/internal@^1.0.5": "1.0.5", - "jsr:@std/io@0.224": "0.224.9", - "jsr:@std/media-types@1": "1.1.0", - "jsr:@std/path@1": "1.0.8", + "jsr:@std/internal@^1.0.12": "1.0.12", "jsr:@std/text@^1.0.10": "1.0.10", "jsr:@std/ulid@1": "1.0.0", - "jsr:@tajpouria/cors@^1.2.1": "1.2.1", + "npm:@opentelemetry/api@^1.9.0": "1.9.0", "npm:@types/node@*": "22.5.4", - "npm:mongodb@^6.12.0": "6.12.0", - "npm:path-to-regexp@6.2.1": "6.2.1", - "npm:path-to-regexp@^6.3.0": "6.3.0", - "npm:zod@^3.24.1": "3.24.1" + "npm:hono@^4.11.4": "4.11.4", + "npm:mongodb@7": "7.0.0", + "npm:zod@^4.3.5": "4.3.5" }, "jsr": { - "@oak/commons@1.0.0": { - "integrity": "49805b55603c3627a9d6235c0655aa2b6222d3036b3a13ff0380c16368f607ac", - "dependencies": [ - "jsr:@std/assert@1", - "jsr:@std/bytes@1", - "jsr:@std/crypto", - "jsr:@std/encoding@1", - "jsr:@std/http", - "jsr:@std/media-types" - ] - }, - "@oak/oak@17.1.3": { - "integrity": "d89296c22db91681dd3a2a1e1fd14e258d0d5a9654de55637aee5b661c159f33", - "dependencies": [ - "jsr:@oak/commons", - "jsr:@std/assert@1", - "jsr:@std/bytes@1", - "jsr:@std/crypto", - "jsr:@std/http", - "jsr:@std/io", - "jsr:@std/media-types", - "jsr:@std/path", - "npm:path-to-regexp@6.2.1" - ] - }, - "@oak/oak@17.1.4": { - "integrity": "60530b582bf276ff741e39cc664026781aa08dd5f2bc5134d756cc427bf2c13e", - "dependencies": [ - "jsr:@oak/commons", - "jsr:@std/assert@1", - "jsr:@std/bytes@1", - "jsr:@std/http", - "jsr:@std/media-types", - "jsr:@std/path", - "npm:path-to-regexp@^6.3.0" - ] - }, - "@std/assert@1.0.10": { - "integrity": "59b5cbac5bd55459a19045d95cc7c2ff787b4f8527c0dd195078ff6f9481fbb3", + "@std/assert@1.0.15": { + "integrity": "d64018e951dbdfab9777335ecdb000c0b4e3df036984083be219ce5941e4703b", "dependencies": [ "jsr:@std/internal" ] }, - "@std/bytes@1.0.4": { - "integrity": "11a0debe522707c95c7b7ef89b478c13fb1583a7cfb9a85674cd2cc2e3a28abc" - }, - "@std/crypto@1.0.3": { - "integrity": "a2a32f51ddef632d299e3879cd027c630dcd4d1d9a5285d6e6788072f4e51e7f" - }, "@std/dotenv@0.225.3": { "integrity": "a95e5b812c27b0854c52acbae215856d9cce9d4bbf774d938c51d212711e8d4a" }, - "@std/encoding@1.0.6": { - "integrity": "ca87122c196e8831737d9547acf001766618e78cd8c33920776c7f5885546069" + "@std/dotenv@0.225.5": { + "integrity": "9ce6f9d0ec3311f74a32535aa1b8c62ed88b1ab91b7f0815797d77a6f60c922f" + }, + "@std/dotenv@0.225.6": { + "integrity": "1d6f9db72f565bd26790fa034c26e45ecb260b5245417be76c2279e5734c421b" }, "@std/fmt@1.0.4": { "integrity": "e14fe5bedee26f80877e6705a97a79c7eed599e81bb1669127ef9e8bc1e29a74" @@ -89,44 +37,26 @@ "@std/fmt@1.0.5": { "integrity": "0cfab43364bc36650d83c425cd6d99910fc20c4576631149f0f987eddede1a4d" }, - "@std/http@1.0.12": { - "integrity": "85246d8bfe9c8e2538518725b158bdc31f616e0869255f4a8d9e3de919cab2aa", - "dependencies": [ - "jsr:@std/encoding@^1.0.5" - ] - }, - "@std/internal@1.0.5": { - "integrity": "54a546004f769c1ac9e025abd15a76b6671ddc9687e2313b67376125650dc7ba" - }, - "@std/io@0.224.9": { - "integrity": "4414664b6926f665102e73c969cfda06d2c4c59bd5d0c603fd4f1b1c840d6ee3", - "dependencies": [ - "jsr:@std/bytes@^1.0.2" - ] - }, - "@std/media-types@1.1.0": { - "integrity": "c9d093f0c05c3512932b330e3cc1fe1d627b301db33a4c2c2185c02471d6eaa4" - }, - "@std/path@1.0.8": { - "integrity": "548fa456bb6a04d3c1a1e7477986b6cffbce95102d0bb447c67c4ee70e0364be" + "@std/internal@1.0.12": { + "integrity": "972a634fd5bc34b242024402972cd5143eac68d8dffaca5eaa4dba30ce17b027" }, "@std/text@1.0.10": { "integrity": "9dcab377450253c0efa9a9a0c731040bfd4e1c03f8303b5934381467b7954338" }, "@std/ulid@1.0.0": { "integrity": "d41c3d27a907714413649fee864b7cde8d42ee68437d22b79d5de4f81d808780" - }, - "@tajpouria/cors@1.2.1": { - "integrity": "eca42e4fb7cb3906ef0ee3d1e565dd6bb4632ccd8e70a95cf4279759743328f0" } }, "npm": { - "@mongodb-js/saslprep@1.1.9": { - "integrity": "sha512-tVkljjeEaAhCqTzajSdgbQ6gE6f3oneVwa3iXR6csiEwXXOFsiC6Uh9iAjAhXPtqa/XMDHWjjeNH/77m/Yq2dw==", + "@mongodb-js/saslprep@1.4.4": { + "integrity": "sha512-p7X/ytJDIdwUfFL/CLOhKgdfJe1Fa8uw9seJYvdOmnP9JBWGWHW69HkOixXS6Wy9yvGf1MbhcS6lVmrhy4jm2g==", "dependencies": [ "sparse-bitfield" ] }, + "@opentelemetry/api@1.9.0": { + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==" + }, "@types/node@22.5.4": { "integrity": "sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==", "dependencies": [ @@ -136,49 +66,36 @@ "@types/webidl-conversions@7.0.3": { "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==" }, - "@types/whatwg-url@11.0.5": { - "integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==", + "@types/whatwg-url@13.0.0": { + "integrity": "sha512-N8WXpbE6Wgri7KUSvrmQcqrMllKZ9uxkYWMt+mCSGwNc0Hsw9VQTW7ApqI4XNrx6/SaM2QQJCzMPDEXE058s+Q==", "dependencies": [ "@types/webidl-conversions" ] }, - "bson@6.10.1": { - "integrity": "sha512-P92xmHDQjSKPLHqFxefqMxASNq/aWJMEZugpCjf+AF/pgcUpMMQCg7t7+ewko0/u8AapvF3luf/FoehddEK+sA==", - "deprecated": true + "bson@7.0.0": { + "integrity": "sha512-Kwc6Wh4lQ5OmkqqKhYGKIuELXl+EPYSCObVE6bWsp1T/cGkOCBN0I8wF/T44BiuhHyNi1mmKVPXk60d41xZ7kw==" + }, + "hono@4.11.4": { + "integrity": "sha512-U7tt8JsyrxSRKspfhtLET79pU8K+tInj5QZXs1jSugO1Vq5dFj3kmZsRldo29mTBfcjDRVRXrEZ6LS63Cog9ZA==" }, "memory-pager@1.5.0": { "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" }, - "mongodb-connection-string-url@3.0.1": { - "integrity": "sha512-XqMGwRX0Lgn05TDB4PyG2h2kKO/FfWJyCzYQbIhXUxz7ETt0I/FqHjUeqj37irJ+Dl1ZtU82uYyj14u2XsZKfg==", + "mongodb-connection-string-url@7.0.0": { + "integrity": "sha512-irhhjRVLE20hbkRl4zpAYLnDMM+zIZnp0IDB9akAFFUZp/3XdOfwwddc7y6cNvF2WCEtfTYRwYbIfYa2kVY0og==", "dependencies": [ "@types/whatwg-url", "whatwg-url" ] }, - "mongodb@6.12.0": { - "integrity": "sha512-RM7AHlvYfS7jv7+BXund/kR64DryVI+cHbVAy9P61fnb1RcWZqOW1/Wj2YhqMCx+MuYhqTRGv7AwHBzmsCKBfA==", + "mongodb@7.0.0": { + "integrity": "sha512-vG/A5cQrvGGvZm2mTnCSz1LUcbOPl83hfB6bxULKQ8oFZauyox/2xbZOoGNl+64m8VBrETkdGCDBdOsCr3F3jg==", "dependencies": [ "@mongodb-js/saslprep", "bson", "mongodb-connection-string-url" - ], - "optionalPeers": [ - "@aws-sdk/credential-providers@^3.188.0", - "@mongodb-js/zstd@^1.1.0 || ^2.0.0", - "gcp-metadata@^5.2.0", - "kerberos@^2.0.1", - "mongodb-client-encryption@>=6.0.0 <7", - "snappy@^7.2.2", - "socks@^2.7.1" ] }, - "path-to-regexp@6.2.1": { - "integrity": "sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw==" - }, - "path-to-regexp@6.3.0": { - "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==" - }, "punycode@2.3.1": { "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==" }, @@ -188,8 +105,8 @@ "memory-pager" ] }, - "tr46@4.1.1": { - "integrity": "sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw==", + "tr46@5.1.1": { + "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==", "dependencies": [ "punycode" ] @@ -200,15 +117,15 @@ "webidl-conversions@7.0.0": { "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==" }, - "whatwg-url@13.0.0": { - "integrity": "sha512-9WWbymnqj57+XEuqADHrCJ2eSXzn8WXIW/YSGaZtb2WKAInQ6CHfaUUcTyyver0p8BDg5StLQq8h1vtZuwmOig==", + "whatwg-url@14.2.0": { + "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==", "dependencies": [ "tr46", "webidl-conversions" ] }, - "zod@3.24.1": { - "integrity": "sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A==" + "zod@4.3.5": { + "integrity": "sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==" } }, "redirects": { @@ -252,35 +169,38 @@ }, "workspace": { "members": { - "examples/the-expense": { + "examples/hono-demo": { "dependencies": [ - "jsr:@oak/oak@^17.1.4", + "jsr:@std/dotenv@~0.225.6", "jsr:@std/ulid@1", - "jsr:@tajpouria/cors@^1.2.1", - "npm:mongodb@^6.12.0", - "npm:zod@^3.24.1" + "npm:hono@^4.11.4", + "npm:mongodb@7", + "npm:zod@^4.3.5" ] }, "packages/core": { "dependencies": [ "jsr:@std/assert@^1.0.10", "jsr:@std/fmt@^1.0.5", - "npm:zod@^3.24.1" + "jsr:@std/ulid@1", + "npm:@opentelemetry/api@^1.9.0", + "npm:zod@^4.3.5" ] }, - "packages/mongodb": { + "packages/hono": { "dependencies": [ - "jsr:@std/assert@^1.0.10", - "jsr:@std/text@^1.0.10", - "npm:mongodb@^6.12.0", - "npm:zod@^3.24.1" + "jsr:@std/ulid@1", + "npm:@opentelemetry/api@^1.9.0", + "npm:hono@^4.11.4" ] }, - "packages/oak": { + "packages/mongodb": { "dependencies": [ - "jsr:@oak/oak@^17.1.4", - "jsr:@std/ulid@1", - "npm:zod@^3.24.1" + "jsr:@std/assert@^1.0.10", + "jsr:@std/text@^1.0.10", + "npm:@opentelemetry/api@^1.9.0", + "npm:mongodb@7", + "npm:zod@^4.3.5" ] } } diff --git a/docs/.vitepress/config.mjs b/docs/.vitepress/config.mjs index a640471..0a6fac2 100644 --- a/docs/.vitepress/config.mjs +++ b/docs/.vitepress/config.mjs @@ -3,7 +3,7 @@ import { defineConfig } from "vitepress"; // https://vitepress.dev/reference/site-config export default defineConfig({ title: "Nimbus", - description: "A Framework to build event-driven applications in the cloud.", + description: "Build event-driven applications with typescript.", themeConfig: { // https://vitepress.dev/reference/default-theme-config logo: @@ -33,10 +33,9 @@ export default defineConfig({ link: "/guide/quickstart", }, { - text: "Project Structure", - link: "/guide/project-structure", + text: "Observability", + link: "/guide/observability", }, - { text: "Core", link: "/guide/core", @@ -61,28 +60,32 @@ export default defineConfig({ text: "Event Bus", link: "/guide/core/event-bus", }, - { - text: "Exceptions", - link: "/guide/core/exceptions", - }, { text: "Logging", link: "/guide/core/logging", }, + { + text: "Exceptions", + link: "/guide/core/exceptions", + }, ], }, { - text: "Oak (HTTP)", - link: "/guide/oak", + text: "Hono", + link: "/guide/hono", items: [ { - text: "Router", - link: "/guide/oak/router", + text: "CorrelationID Middleware", + link: "/guide/hono/correlationid", }, { - text: "Middleware", - link: "/guide/oak/middleware", + text: "Logger Middleware", + link: "/guide/hono/logger", + }, + { + text: "onError Handler", + link: "/guide/hono/on-error", }, ], }, @@ -91,6 +94,10 @@ export default defineConfig({ text: "MongoDB", link: "/guide/mongodb", items: [ + { + text: "Connection Manager", + link: "/guide/mongodb/connection-manager", + }, { text: "Repository", link: "/guide/mongodb/repository", @@ -99,6 +106,18 @@ export default defineConfig({ text: "CRUD+", link: "/guide/mongodb/crud", }, + { + text: "MongoJSON", + link: "/guide/mongodb/mongo-json", + }, + { + text: "handleMongoError", + link: "/guide/mongodb/handle-mongo-error", + }, + { + text: "Deploy Collection", + link: "/guide/mongodb/deploy-collection", + }, ], }, diff --git a/docs/guide/core/commands.md b/docs/guide/core/commands.md index 24b739b..25a47d1 100644 --- a/docs/guide/core/commands.md +++ b/docs/guide/core/commands.md @@ -1,127 +1,121 @@ +--- +prev: + text: "Observability" + link: "/guide/core/observability" + +next: + text: "Queries" + link: "/guide/core/queries" +--- + # Commands -Commands are the messages that tell your application to do something. -Like "Hey, create a new account with the following data". +Commands represent write operations - intentions to change system state in the application. + +Commands also fit perfectly into the CQRS pattern (Command Query Responsibility Segregation), where writes and reads are separated for better scalability and maintainability. But keep it simple for your use case and needs. CQRS in an option, but not required. ::: info Example Application -You can find the full example on GitHub [The Expense Repo](https://github.com/overlap-dev/Nimbus/tree/main/examples/the-expense) +The examples on this page reference the hono-demo application. -Check it out and run it with `deno task dev` +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) ::: -## Example +## Key Characteristics -At first we define the command and the core functionality in a file called `addAccount.ts` in the `core/commands` folder. If you like you can also split the command definition and the function into separate files. Or add more functions to handle the core business logic involved when adding an account. +- **Write Operations**: Commands modify application state +- **Intent-Based**: Commands express what should happen (e.g., "AddUser", "DeleteUser") +- **Type-Safe**: Commands are fully typed and validated using Zod -Next we add a command handler in a fille called `addAccount.handler.ts` in the `shell/commands` folder. This is the first function that is executed when the app receives this specific command. +## Command Structure -The command handler contains all the glue needed to communicate with other parts of the application and to handle all the side-effects. In this example we first call the core function to get a new account. Then we write the account to the database, we publish an event that the account was added and finally we return the account to the caller. +A command in Nimbus follows the CloudEvents specification and consists of: -::: code-group +```typescript +type Command = { + specversion: "1.0"; + id: string; + correlationid: string; + time: string; + source: string; + type: string; + subject?: string; + data: TData; + datacontenttype?: string; + dataschema?: string; +}; +``` -```typescript [Core] -import { - AuthContext, - Command, - CommandMetadata, - InvalidInputException, -} from "@nimbus/core"; -import { ObjectId } from "mongodb"; +| Property | Description | +| ----------------- | ---------------------------------------------------------------------------------- | +| `specversion` | The CloudEvents specification version (always `'1.0'`) | +| `id` | A globally unique identifier for the command | +| `correlationid` | A unique identifier to correlate this command with related messages | +| `time` | ISO 8601 timestamp when the command was created | +| `source` | A URI reference identifying the system creating the command | +| `type` | The command type following CloudEvents naming (e.g., `at.overlap.nimbus.add-user`) | +| `subject` | Optional identifier for the entity the command targets | +| `data` | The command payload containing the business data | +| `datacontenttype` | Optional MIME type of the data (defaults to `application/json`) | +| `dataschema` | Optional URL to the schema the data adheres to | + +## Command Schema + +Nimbus provides a base Zod schema for validating commands: + +```typescript +import { commandSchema } from "@nimbus/core"; import { z } from "zod"; -import { Account } from "../account.type.ts"; -// Define the data for the command -export const AddAccountData = z.object({ - name: z.string(), +// Extend the base schema with your specific command type and data +const addUserCommandSchema = commandSchema.extend({ + type: z.literal("at.overlap.nimbus.add-user"), + data: z.object({ + email: z.email(), + firstName: z.string(), + lastName: z.string(), + }), }); -export type AddAccountData = z.infer; - -// Define the Command with it's unique name, data and metadata -export const AddAccountCommand = Command( - z.literal("ADD_ACCOUNT"), - AddAccountData, - CommandMetadata(AuthContext) // You can define you own meta data type if needed -); -export type AddAccountCommand = z.infer; - -// The core logic -// We take the command data and the authContext and return the new account. -// -// Apply any important business logic here if needed. -// For example to set the balance of the account to 0 -// or in case of a promotion add a starting balance. -export const addAccount = ( - data: AddAccountData, - authContext?: AuthContext -): Account => { - if (!authContext) { - throw new InvalidInputException(); - } - - return { - _id: new ObjectId().toString(), - name: data.name, - status: "active", - }; -}; + +type AddUserCommand = z.infer; ``` -```typescript [Shell] -import { InvalidInputException, type RouteHandler } from "@nimbus/core"; -import { eventBus } from "../../../eventBus.ts"; -import { Account } from "../../core/account.type.ts"; -import { - addAccount, - AddAccountCommand, -} from "../../core/commands/addAccount.ts"; -import { AccountAddedEvent } from "../../core/events/accountAdded.ts"; -import { accountRepository } from "../account.repository.ts"; - -export const addAccountHandler: RouteHandler = async ( - command: AddAccountCommand -) => { - // Call the Core with validated and type-safe inputs. - // The Nimbus router takes care these are type checked and validated. - // Learn more about the router on the next sections of the guide. - let account = addAccount(command.data, command.metadata.authContext); - - // Write the new account to the database - try { - account = await accountRepository.insertOne({ item: account }); - } catch (error: any) { - if (error.message.startsWith("E11000")) { - throw new InvalidInputException("Account already exists", { - errorCode: "ACCOUNT_ALREADY_EXISTS", - reason: "An account with the same name already exists", - }); - } - - throw error; - } - - // We want to publish an event that the account was added - // See more about events in the next section of the guide - eventBus.putEvent({ - name: "ACCOUNT_ADDED", - data: { - account: account, - }, - metadata: { - correlationId: command.metadata.correlationId, - authContext: command.metadata.authContext, - }, - }); - - // Return the successful result - return { - statusCode: 200, - data: account, - }; -}; +## Create Commands + +You can create commands using the `createCommand()` helper: + +```typescript +import { createCommand } from "@nimbus/core"; +import { AddUserCommand } from "./addUser.command.ts"; + +const commandForJane = createCommand({ + type: "at.overlap.nimbus.add-user", + source: "nimbus.overlap.at", + data: { + email: "jane@example.com", + firstName: "Jane", + lastName: "Doe", + }, +}); + +const commandForJohn = createCommand({ + type: "at.overlap.nimbus.add-user", + source: "nimbus.overlap.at", + data: { + email: "john@example.com", + firstName: "John", + lastName: "Doe", + }, +}); ``` -::: +The `createCommand()` helper automatically generates default values for: + +- `id` - A unique ULID +- `correlationid` - A unique ULID (if not provided) +- `time` - Current ISO timestamp +- `specversion` - Always `'1.0'` +- `datacontenttype` - Defaults to `'application/json'` -## Receive and Route Commands +## Routing Commands -Learn more about how to receive and route commands in the [Router](/guide/core/router.md) guide. +Commands are routed to handlers using the [MessageRouter](/guide/core/router). See the Router documentation for details on registering handlers and routing messages. diff --git a/docs/guide/core/event-bus.md b/docs/guide/core/event-bus.md index 19d1207..330ffba 100644 --- a/docs/guide/core/event-bus.md +++ b/docs/guide/core/event-bus.md @@ -1,90 +1,153 @@ +--- +prev: + text: "Router" + link: "/guide/core/router" + +next: + text: "Exceptions" + link: "/guide/core/exceptions" +--- + # Event Bus -The Nimbus event bus allows to publish and subscribe to [events](/guide/core/events.md) within the application. +The NimbusEventBus enables publish/subscribe messaging for [events](/guide/core/events) within your application. Events are delivered asynchronously to all registered handlers with automatic retry on failure. ::: info Example Application -You can find the full example on GitHub [The Expense Repo](https://github.com/overlap-dev/Nimbus/tree/main/examples/the-expense) +The examples on this page reference the hono-demo application. -Check it out and run it with `deno task dev` +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) ::: -## Event Subscriptions +## Setup and Configuration -To set up event subscriptions, we want to create a new instance of the `NimbusEventBus` first. Then we want to use the `subscribeEvent` method to subscribe to all the events the application needs to handle. +Configure the event bus at application startup using `setupEventBus()`, then retrieve it anywhere using `getEventBus()`. -In the `main.ts` file we call the `initEventBusSubscriptions` function to subscribe to all the events for the different domains when the application starts. +```typescript +import { getLogger, setupEventBus } from "@nimbus/core"; + +setupEventBus("MyEventBus", { + maxRetries: 2, + baseDelay: 1000, + maxDelay: 30000, + useJitter: true, + logPublish: (event) => { + getLogger().debug({ + category: "MyEventBus", + message: "Published event", + data: { event }, + ...(event?.correlationid + ? { correlationId: event.correlationid } + : {}), + }); + }, +}); +``` -::: code-group +### Configuration Options -```typescript [eventBus.ts] -import { NimbusEventBus, RouteHandlerMap } from "@nimbus/core"; -import { accountEventSubscriptions } from "./account/shell/account.eventBus.ts"; +| Option | Type | Default | Description | +| ------------ | ----------------- | ------- | -------------------------------------------------- | +| `maxRetries` | `number` | `2` | Maximum retry attempts for failed handlers | +| `baseDelay` | `number` | `1000` | Base delay in milliseconds for exponential backoff | +| `maxDelay` | `number` | `30000` | Maximum delay cap in milliseconds | +| `useJitter` | `boolean` | `true` | Add randomness to delay to prevent thundering herd | +| `logPublish` | `(event) => void` | - | Optional callback when an event is published | -// -// Create a new instance of the event bus -// -export const eventBus = new NimbusEventBus({ - maxRetries: 3, -}); +## Subscribing to Events -// -// Create a function that subscribes to all -// the events for the different domains -// -export const initEventBusSubscriptions = () => { - const subscriptions: Record = { - account: accountEventSubscriptions, - }; - - for (const [, handlerMap] of Object.entries(subscriptions)) { - for (const eventName of Object.keys(handlerMap)) { - eventBus.subscribeEvent( - eventName, - handlerMap[eventName].inputType, - handlerMap[eventName].handler - ); - } - } -}; -``` +Subscribe to event types using `subscribeEvent()`: -```typescript [account.eventBus.ts] -import { RouteHandlerMap } from "@nimbus/core"; -import { AccountAddedEvent } from "../core/events/accountAdded.ts"; -import { accountAddedHandler } from "./events/accountAdded.handler.ts"; +```typescript +import { getEventBus } from "@nimbus/core"; -export const accountEventSubscriptions: RouteHandlerMap = { - ACCOUNT_ADDED: { - handler: accountAddedHandler, - inputType: AccountAddedEvent, +const eventBus = getEventBus("MyEventBus"); + +eventBus.subscribeEvent({ + type: "at.overlap.nimbus.user-added", + handler: async (event: UserAddedEvent) => { + // Process event and return result + }, +}); + +eventBus.subscribeEvent({ + type: "at.overlap.nimbus.onboarding-started", + handler: async (event: OnboardingStartedEvent) => { + // Process event and return result }, -}; + onError: (error, event) => { + // Handle the error + }, + options: { + maxRetries: 0, // Override the default of 2 retries for this subscription + }, +}); ``` -```typescript [main.ts] -import { initEventBusSubscriptions } from "./eventBus.ts"; +### Subscription Options -initEventBusSubscriptions(); -``` +The `subscribeEvent()` method accepts the following options: -::: +| Option | Type | Description | +| --------- | -------------------------- | ---------------------------------------------------- | +| `type` | `string` | The CloudEvents type to subscribe to | +| `handler` | `(event) => Promise` | Async handler function for the event | +| `onError` | `(error, event) => void` | Optional callback when all retries are exhausted | +| `options` | `object` | Optional retry options to override EventBus defaults | -## Publish Events +## Publishing Events -To publish an event, we can use the `putEvent` method of the `NimbusEventBus` class. +Publish events using `putEvent()`: ```typescript -import { eventBus } from "../../../eventBus.ts"; -import { AccountAddedEvent } from "../../core/events/accountAdded.ts"; +import { createEvent, getEventBus } from "@nimbus/core"; + +const eventBus = getEventBus("default"); -eventBus.putEvent({ - name: "ACCOUNT_ADDED", +const event = createEvent({ + type: "at.overlap.nimbus.user-added", + source: "nimbus.overlap.at", + correlationid: command.correlationid, + subject: `/users/${user.id}`, data: { - account: account, - }, - metadata: { - correlationId: command.metadata.correlationId, - authContext: command.metadata.authContext, + email: user.email, + firstName: user.firstName, + lastName: user.lastName, }, }); + +eventBus.putEvent(event); ``` + +## Retry Mechanism + +When a handler throws an error, the event bus automatically retries using exponential backoff: + +1. **First retry**: Waits `baseDelay` ms (default: 1000ms) +2. **Second retry**: Waits `baseDelay * 2` ms (2000ms) +3. **Third retry**: Waits `baseDelay * 4` ms (4000ms) +4. ... continues until `maxDelay` is reached + +With `useJitter: true`, a small random amount (up to 10% of the delay) is added to prevent multiple handlers from retrying simultaneously. + +After all retries are exhausted, the `onError` callback is invoked (if provided), or the error is logged. + +## Event Size Limit + +The event bus enforces the CloudEvents specification size limit of 64KB. If you attempt to publish an event larger than this, a `GenericException` is thrown. + +## Observability + +The event bus is fully instrumented with OpenTelemetry tracing and metrics. See the [Observability](/guide/core/observability) documentation for details. + +**Tracing:** + +- `eventbus.publish` span for event publishing +- `eventbus.handle` span for event handling + +**Metrics:** + +- `eventbus_events_published_total` - Counter for published events +- `eventbus_events_delivered_total` - Counter for delivered events (with success/error status) +- `eventbus_event_handling_duration_seconds` - Histogram of handler execution time +- `eventbus_retry_attempts_total` - Counter for retry attempts +- `eventbus_event_size_bytes` - Histogram of event sizes diff --git a/docs/guide/core/events.md b/docs/guide/core/events.md index 212c166..c44b667 100644 --- a/docs/guide/core/events.md +++ b/docs/guide/core/events.md @@ -1,74 +1,164 @@ +--- +prev: + text: "Queries" + link: "/guide/core/queries" + +next: + text: "Router" + link: "/guide/core/router" +--- + # Events -Events are the messages that tell your application something has happened. -Like "Hey, the account with the ID 1234 has been updated". +Events represent facts - things that have already happened in the system. + +Events are immutable records of state changes that occurred in the application. They enable event-driven architectures, event sourcing, and asynchronous processing. ::: info Example Application -You can find the full example on GitHub [The Expense Repo](https://github.com/overlap-dev/Nimbus/tree/main/examples/the-expense) +The examples on this page reference the hono-demo application. -Check it out and run it with `deno task dev` +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) ::: -## Example - -At first we define the event in a file called `accountAdded.ts` in the `core/events` folder. +## Key Characteristics + +- **Immutable Facts**: Events represent things that already happened and cannot be changed +- **Past Tense**: Event names use past tense (e.g., "UserAdded", not "AddUser") +- **Observable**: Other parts of the system can subscribe and react to events +- **Type-Safe**: Events are fully typed and validated using Zod + +## Event Structure + +An event in Nimbus follows the CloudEvents specification and consists of: + +```typescript +type Event = { + specversion: "1.0"; + id: string; + correlationid: string; + time: string; + source: string; + type: string; + subject: string; + data: TData; + datacontenttype?: string; + dataschema?: string; +}; +``` -Next we create an event handler function in a fille called `accountAdded.handler.ts` in the `shell/events` folder. This is the first function that is executed when the app receives this specific event. +| Property | Description | +| ----------------- | ---------------------------------------------------------------------------------- | +| `specversion` | The CloudEvents specification version (always `'1.0'`) | +| `id` | A globally unique identifier for the event | +| `correlationid` | A unique identifier to correlate this event with related messages | +| `time` | ISO 8601 timestamp when the event was created | +| `source` | A URI reference identifying the system creating the event | +| `type` | The event type following CloudEvents naming (e.g., `at.overlap.nimbus.user-added`) | +| `subject` | An identifier for the entity the event is about (e.g., `/users/123`) | +| `data` | The event payload containing the business data | +| `datacontenttype` | Optional MIME type of the data (defaults to `application/json`) | +| `dataschema` | Optional URL to the schema the data adheres to | + +## Event Subjects + +Unlike commands and queries, events **require** a `subject` field. +Events use subjects to organize and identify the entities they relate to: + +```typescript +// Subject examples +"/users/123"; // Specific user +"/orders/456"; // Specific order +"/users/123/orders/456"; // Order belonging to a user +``` -The event handler contains all the glue needed to communicate with other parts of the application and to handle all the side-effects. In this example we simply wait a second and log an info to the console. +## Event Schema -::: code-group +Nimbus provides a base Zod schema for validating events: -```typescript [Core] -import { AuthContext, Event, EventMetadata } from "@nimbus/core"; +```typescript +import { eventSchema } from "@nimbus/core"; import { z } from "zod"; -import { Account } from "../account.type.ts"; -// Define the data for the event -export const AccountAddedData = z.object({ - account: Account, +// Extend the base schema with your specific event type and data +const userAddedEventSchema = eventSchema.extend({ + type: z.literal("at.overlap.nimbus.user-added"), + data: z.object({ + _id: z.string(), + email: z.string(), + firstName: z.string(), + lastName: z.string(), + }), }); -export type AccountAddedData = z.infer; - -// Define the Event with it's unique name, data and metadata -export const AccountAddedEvent = Event( - z.literal("ACCOUNT_ADDED"), - AccountAddedData, - EventMetadata(AuthContext) // You can define you own meta data type if needed -); -export type AccountAddedEvent = z.infer; + +type UserAddedEvent = z.infer; ``` -```typescript [Shell] -import { getLogger, RouteHandler } from "@nimbus/core"; -import { - AccountAddedData, - AccountAddedEvent, -} from "../../core/events/accountAdded.ts"; - -export const accountAddedHandler: RouteHandler< - AccountAddedEvent, - AccountAddedData -> = async (event) => { - await new Promise((resolve) => setTimeout(resolve, 1000)); - - getLogger().info({ - message: `New account was added: ${event.data.account.name}`, - }); - - // This is just an example. - // Change the code to do what has to be done after an account got added. - // For example send a mail to the owner. - - return { - statusCode: 200, - data: event.data, - }; -}; +## Create Events + +You can create events using the `createEvent()` helper: + +```typescript +import { createEvent } from "@nimbus/core"; +import { UserAddedEvent } from "./userAdded.event.ts"; + +const event = createEvent({ + type: "at.overlap.nimbus.user-added", + source: "nimbus.overlap.at", + correlationid: command.correlationid, + subject: `/users/${userState._id}`, + data: userState, +}); ``` -::: +The `createEvent()` helper automatically generates default values for: + +- `id` - A unique ULID +- `correlationid` - A unique ULID (if not provided) +- `time` - Current ISO timestamp +- `specversion` - Always `'1.0'` +- `datacontenttype` - Defaults to `'application/json'` + +## Best Practices + +### Use Past Tense Names + +Event names should describe what happened, not what should happen: + +```typescript +// ✅ Good - Past tense +UserAddedEvent; +OrderShippedEvent; +PaymentProcessedEvent; + +// ❌ Bad - Imperative +AddUserEvent; +ShipOrderEvent; +ProcessPaymentEvent; +``` + +### Propagate Correlation IDs + +Always pass correlation IDs from commands to events for tracing: + +```typescript +const event = createEvent({ + type: USER_ADDED_EVENT_TYPE, + source: "nimbus.overlap.at", + correlationid: command.correlationid, // Always propagate + data: state, +}); +``` + +### Use Meaningful Subjects + +Subjects should be hierarchical and meaningful: + +```typescript +// ✅ Good - Hierarchical and clear +`/users/${userId}``/users/${userId}/orders/${orderId}``/organizations/${orgId}/members/${memberId}` // ❌ Bad - Flat and unclear +`user-${userId}``order_${orderId}`; +``` -## Publish and Subscribe to Events +## Publish & Subscribe Events -Learn more about how to publish and subscribe to events in the [Event Bus](/guide/core/event-bus.md) guide. +Events are published and subscribed to using the [EventBus](/guide/core/event-bus). See the EventBus documentation for details on publishing and subscribing to events. diff --git a/docs/guide/core/exceptions.md b/docs/guide/core/exceptions.md index beae563..66a1d6b 100644 --- a/docs/guide/core/exceptions.md +++ b/docs/guide/core/exceptions.md @@ -1,18 +1,39 @@ +--- +prev: + text: "Logging" + link: "/guide/core/logging" + +next: + text: "Nimbus Hono" + link: "/guide/hono" +--- + # Exceptions -Nimbus defines a set of exceptions that you can use to handle errors in your application. These exceptions are used to communicate errors of a certain type. +Nimbus provides a set of structured exceptions for handling errors in your application. These exceptions +have and optional status code and can include additional details for debugging. + +## Status Codes -## Examples +The basic Exception class has an optional status code that can be set when creating the exceptions and you can assign any number as a value. -You can optionally pass a message and a details object to provide further information. +However, Nimbus comes with some built-in exceptions that use the related HTTP status codes. As HTTP status codes are standardized and well-known we thought it would be a good idea to use them even though the Exceptions itself are transport agnostic. -All Exceptions have a `fromError()` method to convert a standard JavaScript error into a Nimbus exception. This takes care to keep the original error message and stack trace. +## Built-in Exception Types -For the `InvalidInputException` you can use the `fromZodError()` method to convert a Zod error into a Nimbus exception. This will keep the original error message and stack trace and also keeps the validation details. +| Exception | Status Code | Use Case | +| ----------------------- | ----------- | ------------------------------------------- | +| `GenericException` | 500 | Internal server errors, unexpected failures | +| `InvalidInputException` | 400 | Validation errors, malformed requests | +| `NotFoundException` | 404 | Resource not found | +| `UnauthorizedException` | 401 | Authentication required or failed | +| `ForbiddenException` | 403 | Authorization failed, access denied | -::: code-group +## Basic Usage -```typescript [Basics] +All exceptions accept an optional message and details object: + +```typescript import { ForbiddenException, GenericException, @@ -21,75 +42,148 @@ import { UnauthorizedException, } from "@nimbus/core"; -// Status code 500 +// Generic server error (500) throw new GenericException("Something went wrong"); -// Status code 400 -throw new InvalidInputException("The input is invalid", { foo: "bar" }); +// Invalid input with details (400) +throw new InvalidInputException("The input is invalid", { + field: "email", + reason: "Invalid email format", +}); -// Status code 401 +// Unauthorized (401) throw new UnauthorizedException(); -// Status code 403 +// Forbidden (403) throw new ForbiddenException(); -// Status code 404 -throw new NotFoundException("Account not found", { - errorCode: "ACCOUNT_NOT_FOUND", - reason: "The account with the provided id was not found", +// Not found with details (404) +throw new NotFoundException("User not found", { + errorCode: "USER_NOT_FOUND", + userId: "12345", }); ``` -```typescript [.fromError] -import { GenericException } from "@nimbus/core"; +## Converting from Standard Errors -const someError = new Error("Something went wrong"); +Use `fromError()` to convert a standard JavaScript error while preserving the stack trace: -const exception = new GenericException(); -exception.fromError(someError); +```typescript +import { GenericException } from "@nimbus/core"; -throw exception; +try { + await someExternalService.call(); +} catch (error) { + const exception = new GenericException(); + exception.fromError(error); + throw exception; +} ``` -```typescript [.fromZodError] +## Converting from Zod Errors + +If you need to manually handle Zod validation: + +```typescript import { InvalidInputException } from "@nimbus/core"; import { z } from "zod"; -const MyZodType = z.object({ - sub: z.string(), - groups: z.array(z.string()), +const UserSchema = z.object({ + email: z.email(), + name: z.string().min(1), }); try { - MyZodType.parse({ sub: 123, groups: ["bar"] }); + UserSchema.parse({ email: "invalid", name: "" }); } catch (error) { const exception = new InvalidInputException(); exception.fromZodError(error); - throw exception; } ``` -::: - -## Create a new exception +## Creating Custom Exceptions -In case you need to add other types of exceptions you can simply create a new exception by extending the `BaseException` class. +Create custom exceptions by extending the base `Exception` class: ```typescript import { Exception } from "@nimbus/core"; -export class MySpecialException extends Exception { +export class RateLimitException extends Exception { constructor(message?: string, details?: Record) { super( - "MY_SPECIAL_EXCEPTION", // The exception name - message ?? "Something Special", // provided message or fallback - details, // pass the provided details - 500 // the status code + "RATE_LIMIT_EXCEEDED", + message ?? "Rate limit exceeded", + details, + 429 // Too Many Requests ); } } // Usage -throw new MySpecialException("Something went wrong", { foo: "bar" }); +throw new RateLimitException("Too many requests", { + retryAfter: 60, + limit: 100, +}); +``` + +## HTTP Integration + +When using the `@nimbus/hono` package, exceptions are automatically converted to HTTP responses: + +```typescript +import { onError } from "@nimbus/hono"; +import { Hono } from "hono"; + +const app = new Hono(); + +// Configure error handler +app.onError(onError); + +// Exceptions thrown in routes are converted to JSON responses +app.get("/users/:id", async (c) => { + throw new NotFoundException("User not found", { + userId: c.req.param("id"), + }); + // Returns: { "error": "NOT_FOUND", "message": "User not found", "details": { "userId": "123" } } + // Status: 404 +}); +``` + +## Best Practices + +### Use Specific Exceptions + +Choose the most specific exception type for the situation: + +```typescript +// ✅ Good - Specific exception +throw new NotFoundException("Order not found"); + +// ❌ Bad - Generic exception for known error +throw new GenericException("Order not found"); +``` + +### Include Helpful Details + +Add details that help with debugging: + +```typescript +throw new InvalidInputException("Invalid order data", { + errorCode: "INVALID_ORDER", + field: "quantity", + value: -5, + constraint: "must be positive", +}); +``` + +### Use Error Codes + +Include machine- and human-readable error codes for client handling: + +```typescript +throw new NotFoundException("User not found", { + errorCode: "USER_NOT_FOUND", // Clients can check this AND translate it to a human-readable error message in multiple languages + userId: id, +}); ``` diff --git a/docs/guide/core/index.md b/docs/guide/core/index.md index 9eaa40e..c1efa02 100644 --- a/docs/guide/core/index.md +++ b/docs/guide/core/index.md @@ -1,19 +1,40 @@ --- prev: - text: "Project Structure" - link: "/guide/project-structure" + text: "Quickstart" + link: "/guide/quickstart" next: - text: "Commands" - link: "/guide/core/commands" + text: "Observability" + link: "/guide/observability" --- # Nimbus Core Package -The core package is the foundation of the entire framework. It provides the basic building blocks for all other packages to build upon. +The core package is the foundation of the entire framework. It provides the essential building blocks for building event-driven applications following the CloudEvents specification. [https://jsr.io/@nimbus/core](https://jsr.io/@nimbus/core) +::: info Example Application +The examples throughout the core documentation reference the hono-demo application. + +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) +::: + +## What's Included + +The core package provides: + +- **[Observability](/guide/observability)** - Built-in logging, tracing, and metrics using OpenTelemetry +- **[Commands](/guide/core/commands)** - Write operations following the CloudEvents specification +- **[Queries](/guide/core/queries)** - Read operations for fetching data +- **[Events](/guide/core/events)** - Domain events for reactive architectures +- **[Router](/guide/core/router)** - Message routing with validation and tracing +- **[Event Bus](/guide/core/event-bus)** - Publish/subscribe for in-process events +- **[Exceptions](/guide/core/exceptions)** - Structured error handling with HTTP status codes +- **[Logging](/guide/core/logging)** - Structured logging with configurable formatters + +## Installation + ### Deno ```bash diff --git a/docs/guide/core/logging.md b/docs/guide/core/logging.md index a0bdbd3..525d95c 100644 --- a/docs/guide/core/logging.md +++ b/docs/guide/core/logging.md @@ -1,122 +1,196 @@ -# Logging - -Nimbus provides a very simple logger that enables you to log messages for different severity levels to the console. - -It is basically a wrapper around the `console` object and the `console.debug()`, `console.info()`, `console.warn()`, `console.error()` and `console.critical()` methods. - -It helps to have consistent logs with important meta information (timestamp, log level,category, error stack traces, etc) across your application. - -No other transports or sinks are supported. As we want to keep the core as lightweight as possible and encourage the use of tools like [OpenTelemetry](https://opentelemetry.io/) to transport logs for monitoring and tracing. +--- +prev: + text: "Event Bus" + link: "/guide/core/event-bus" -As [Deno supports OpenTelemetry](https://docs.deno.com/runtime/fundamentals/open_telemetry/) out of the box, you can easily transport logs to any other monitoring system without the need to change the code of the application. +next: + text: "Exceptions" + link: "/guide/core/exceptions" +--- -## Log Levels - -Nimbus supports the following log levels for logging messages. - -- `debug` - Outputs a `console.debug()` -- `info` - Outputs a `console.info()` -- `warn` - Outputs a `console.warn()` -- `error` - Outputs a `console.error()` -- `critical` - Outputs a `console.error()` - -Also `silent` can be used in the setup to completely disable log output. +# Logging -## Setup +Nimbus provides a structured logger that outputs consistent, formatted log messages to the console. The logger integrates with Deno's native OpenTelemetry support for automatic log export to observability backends. -Nimbus provides a simple function to setup the logger. You can pass in the log level and the formatter you want to use. +::: info Example Application +The examples on this page reference the hono-demo application. -The `prettyLogFormatter` is recommended for development environments only. In production you should use the `jsonLogFormatter`. +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) +::: -For the pretty formatter the `useConsoleColors` option can be used to enable colors in the console output. +## Setup and Configuration -::: code-group +Configure the logger at application startup using `setupLogger()`: -```typescript [main.ts] +```typescript import { jsonLogFormatter, parseLogLevel, prettyLogFormatter, setupLogger, } from "@nimbus/core"; +import process from "node:process"; setupLogger({ logLevel: parseLogLevel(process.env.LOG_LEVEL), formatter: - process.env.NODE_ENV === "development" + process.env.LOG_FORMAT === "pretty" ? prettyLogFormatter : jsonLogFormatter, - useConsoleColors: process.env.NODE_ENV === "development", + useConsoleColors: process.env.LOG_FORMAT === "pretty", }); ``` -::: +### Configuration Options + +| Option | Type | Default | Description | +| ------------------ | -------------- | ------------------ | -------------------------------------------- | +| `logLevel` | `LogLevel` | `'silent'` | Minimum level to output | +| `formatter` | `LogFormatter` | `jsonLogFormatter` | Function to format log records | +| `useConsoleColors` | `boolean` | `false` | Enable colored output (for pretty formatter) | + +## Log Levels + +Nimbus supports the following log levels in order of severity: -## Usage +| Level | Method | Description | +| ---------- | ----------------- | -------------------------------------------- | +| `debug` | `console.debug()` | Detailed debugging information | +| `info` | `console.info()` | General information about application flow | +| `warn` | `console.warn()` | Warning conditions that should be reviewed | +| `error` | `console.error()` | Error conditions that need attention | +| `critical` | `console.error()` | Critical failures requiring immediate action | +| `silent` | _(none)_ | Disables all log output | -The logger can be accessed via the `getLogger` function. -The logger is a singleton and will return the same instance every time it is called. +Messages below the configured log level are silently ignored. -To create a new log you can use the `info`, `warn`, `error` or `critical` methods depending on the severity of the message. +### Parsing Log Levels -The log input is an object that can contain the following properties: +Use `parseLogLevel()` to safely parse environment variables: -- `message` - The message to log. -- `correlationId` - An optional correlation ID to keep track of commands, queries, and events that are related to each other. -- `category` - An optional category of the log, useful for grouping logs together. -- `data` - Optional additional data to log, can be an object with any properties. -- `error` - Optional error object to log. +```typescript +import { parseLogLevel } from "@nimbus/core"; + +// Returns 'info' if LOG_LEVEL is 'info', otherwise returns default 'silent' +const level = parseLogLevel(process.env.LOG_LEVEL); +``` -The error object is specified as a dedicated property and not as part of the `data` object to make sure all error properties and the stack trace are preserved and logged correctly. +## Basic Usage -::: code-group +Access the logger using `getLogger()`: -```typescript [logExample.ts] +```typescript import { getLogger } from "@nimbus/core"; const logger = getLogger(); logger.debug({ - message: "Hello World!", - correlationId: "1234567890", - data: { foo: "bar" }, + message: "Processing request", + category: "API", + data: { method: "POST", path: "/users" }, + correlationId: "550e8400-e29b-41d4-a716-446655440000", }); -logger.info({ message: "Hello World!" }); +logger.info({ + message: "User created successfully", + category: "Users", + data: { userId: "12345" }, +}); logger.warn({ - category: "MyCategory", - message: "Ding Dong!", + message: "Rate limit approaching", + category: "API", + data: { currentRate: 95, maxRate: 100 }, }); logger.error({ - message: "Ohh no!", - error: new Error("Something went wrong!"), + message: "Failed to process payment", + category: "Payments", + error: new Error("Payment gateway timeout"), + correlationId: "550e8400-e29b-41d4-a716-446655440000", }); logger.critical({ - category: "MyCategory", - message: "It is over, run!", - error: new Error("Something is burning!"), - data: { - accountId: "1234567890", - foo: "bar", - }, + message: "Database connection lost", + category: "Database", + error: new Error("Connection refused"), }); ``` -::: +## Log Input + +The log input object can contain the following properties: + +| Property | Type | Description | +| --------------- | ------------------------- | ------------------------------------------------------------- | +| `message` | `string` | **Required.** The log message | +| `category` | `string` | Optional category for grouping logs (defaults to `'Default'`) | +| `data` | `Record` | Optional structured data to include | +| `error` | `Error` | Optional error with stack trace | +| `correlationId` | `string` | Optional ID for tracing related operations | -## Nimbus Logs +## Formatters -As the various Nimbus features have implemented log statements as well it uses the same logger provided by the `getLogger()` function. +Nimbus provides two built-in formatters: -Therefore all log statements from Nimbus will respect the log level and formatter you have configured for the application. +### JSON Formatter (Production) -In case you do not configure the logger in your application the Nimbus logs will use the default settings. +Outputs structured JSON for easy parsing by log aggregation tools: + +```typescript +import { jsonLogFormatter, setupLogger } from "@nimbus/core"; + +setupLogger({ + logLevel: "info", + formatter: jsonLogFormatter, +}); + +// Output: +// {"timestamp":"2025-01-22T10:00:00.000Z","level":"info","category":"Users","message":"User created","data":{"userId":"123"}} +``` + +### Pretty Formatter (Development) + +Outputs human-readable colored logs for development: + +```typescript +import { prettyLogFormatter, setupLogger, getLogger } from "@nimbus/core"; + +setupLogger({ + logLevel: "debug", + formatter: prettyLogFormatter, + useConsoleColors: true, +}); + +getLogger().debug({ + message: "My message", + category: "Category", + data: { userId: "12345" }, +}); + +// Outputs: +// [Category] DEBUG :: My message +// { +// userId: '12345' +// } +``` + +## OpenTelemetry Integration + +When combined with Deno's native OpenTelemetry support, logs are automatically exported alongside traces and metrics. See the [Observability](/guide/core/observability) documentation for details on enabling OTEL export. + +```bash +export OTEL_DENO=true +export OTEL_EXPORTER_OTLP_ENDPOINT="https://your-otlp-endpoint.com/otlp" +export OTEL_SERVICE_NAME=your-service-name + +deno run src/main.ts +``` ## Default Settings +If `setupLogger()` is not called, the logger uses these defaults: + ```typescript const defaultSettings = { logLevel: "silent", @@ -124,3 +198,61 @@ const defaultSettings = { useConsoleColors: false, }; ``` + +This means logs are silent by default - you must explicitly configure the logger to see output. + +## Nimbus Internal Logs + +All Nimbus components (Router, EventBus, etc.) use the same logger configured via `setupLogger()`. This ensures consistent log formatting and level filtering across your application. + +## Best Practices + +### Use Categories + +Group related logs with consistent category names: + +```typescript +logger.info({ message: "Query executed", category: "Database" }); +logger.info({ message: "Request received", category: "API" }); +logger.info({ message: "Email sent", category: "Notifications" }); +``` + +### Include Correlation IDs + +Always include correlation IDs when available for distributed tracing: + +```typescript +logger.info({ + message: "Processing order", + category: "Orders", + data: { orderId: order.id }, + correlationId: command.correlationid, +}); +``` + +### Log Errors Properly + +Use the dedicated `error` property for errors to preserve stack traces: + +```typescript +// ✅ Good - Error is properly captured +logger.error({ + message: "Failed to save user", + error: error, + correlationId: command.correlationid, +}); + +// ❌ Bad - Stack trace is lost +logger.error({ + message: "Failed to save user", + data: { error: error.message }, +}); +``` + +### Use Appropriate Log Levels + +- `debug`: Detailed info for debugging (disabled in production) +- `info`: Normal application flow +- `warn`: Unexpected but recoverable situations +- `error`: Errors that need investigation +- `critical`: Failures requiring immediate action diff --git a/docs/guide/core/queries.md b/docs/guide/core/queries.md index ddfabd2..ba0318e 100644 --- a/docs/guide/core/queries.md +++ b/docs/guide/core/queries.md @@ -1,87 +1,106 @@ +--- +prev: + text: "Commands" + link: "/guide/core/commands" + +next: + text: "Events" + link: "/guide/core/events" +--- + # Queries -Queries are the messages that tell your application to give you some information. -Like "Hey give me the account with the ID 1234". +Queries represent read operations - requests for information without changing application state. + +Queries also fit perfectly into the CQRS pattern (Command Query Responsibility Segregation), where reads and writes are separated for better scalability and maintainability. But keep it simple for your use case and needs. CQRS in an option, but not required. ::: info Example Application -You can find the full example on GitHub [The Expense Repo](https://github.com/overlap-dev/Nimbus/tree/main/examples/the-expense) +The examples on this page reference the hono-demo application. -Check it out and run it with `deno task dev` +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) ::: -## Example +## Key Characteristics + +- **Read Operations**: Queries fetch data without modifying state +- **Idempotent**: Multiple executions return the same result (if data hasn't changed) +- **Type-Safe**: Queries are fully typed and validated using Zod +- **Optimized for Reading**: Can use specialized read models or databases -At first we define the query and the core functionality in a file called `getAccount.ts` in the `core/queries` folder. If you like you can also split the query definition and the function into separate files. Or add more functions to handle the core business logic involved when getting an account. +## Query Structure + +A query in Nimbus follows the CloudEvents specification and consists of: + +```typescript +type Query = { + specversion: "1.0"; + id: string; + correlationid: string; + time: string; + source: string; + type: string; + data: TData; + datacontenttype?: string; + dataschema?: string; +}; +``` -Next we add a query handler in a fille called `getAccount.handler.ts` in the `shell/queries` folder. This is the first function that is executed when the app receives this specific query. +| Property | Description | +| ----------------- | -------------------------------------------------------------------------------- | +| `specversion` | The CloudEvents specification version (always `'1.0'`) | +| `id` | A globally unique identifier for the query | +| `correlationid` | A unique identifier to correlate this query with related messages | +| `time` | ISO 8601 timestamp when the query was created | +| `source` | A URI reference identifying the system creating the query | +| `type` | The query type following CloudEvents naming (e.g., `at.overlap.nimbus.get-user`) | +| `data` | The query parameters (e.g., filters, pagination) | +| `datacontenttype` | Optional MIME type of the data (defaults to `application/json`) | +| `dataschema` | Optional URL to the schema the data adheres to | -The query handler contains all the glue needed to communicate with other parts of the application and to handle all the side-effects. In this example we first read the account from the database and then we call the core function to apply the business logic. Finally we return the account to the caller. +## Query Schema -::: code-group +Nimbus provides a base Zod schema for validating queries: -```typescript [Core] -import { - AuthContext, - InvalidInputException, - Query, - QueryMetadata, -} from "@nimbus/core"; +```typescript +import { querySchema } from "@nimbus/core"; import { z } from "zod"; -import { Account } from "../account.type.ts"; -// Define the Query with it's unique name, parameters and metadata -export const GetAccountQuery = Query( - z.literal("GET_ACCOUNT"), - z.object({ +// Extend the base schema with your specific query type and data +const getUserQuerySchema = querySchema.extend({ + type: z.literal("at.overlap.nimbus.get-user"), + data: z.object({ id: z.string().length(24), }), - QueryMetadata(AuthContext) // You can define you own meta data type if needed -); -export type GetAccountQuery = z.infer; - -export const getAccount = ( - data: Account, - authContext?: AuthContext -): Account => { - if (!authContext) { - throw new InvalidInputException(); - } - - // Apply more business logic if necessary. - // For example remove sensitive properties based on permission levels. - - return data; -}; +}); + +type GetUserQuery = z.infer; ``` -```typescript [Shell] -import { RouteHandler } from "@nimbus/core"; -import { ObjectId } from "mongodb"; -import { Account } from "../../core/account.type.ts"; -import { getAccount, GetAccountQuery } from "../../core/queries/getAccount.ts"; -import { accountRepository } from "../account.repository.ts"; - -export const getAccountHandler: RouteHandler = async ( - query -) => { - // Read the account from the database - let account = await accountRepository.findOne({ - filter: { _id: new ObjectId(query.params.id) }, - }); - - // Call the core function - account = getAccount(account, query.metadata.authContext); - - // Return the successful result - return { - statusCode: 200, - data: account, - }; -}; +## Create Queries + +You can create queries using the `createQuery()` helper: + +```typescript +import { createQuery } from "@nimbus/core"; +import { GetUserQuery } from "./getUser.query.ts"; + +const query = createQuery({ + type: "at.overlap.nimbus.get-user", + source: "nimbus.overlap.at", + data: { + id: "123", + }, +}); ``` -::: +The `createQuery()` helper automatically generates default values for: + +- `id` - A unique ULID +- `correlationid` - A unique ULID (if not provided) +- `time` - Current ISO timestamp +- `specversion` - Always `'1.0'` +- `datacontenttype` - Defaults to `'application/json'` -## Receive and Route Queries +## Routing Queries -Learn more about how to receive and route queries in the [Query Bus](/guide/core/event-bus.md) guide. +Queries are routed to handlers using the [MessageRouter](/guide/core/router). See the Router documentation for details on registering handlers and routing messages. diff --git a/docs/guide/core/router.md b/docs/guide/core/router.md index 18e1a93..de37650 100644 --- a/docs/guide/core/router.md +++ b/docs/guide/core/router.md @@ -1,79 +1,171 @@ -# Router +--- +prev: + text: "Events" + link: "/guide/core/events" -The Nimbus router is responsible to take any input and route it to the correct handler. It is the entry point for all incoming messages. +next: + text: "Event Bus" + link: "/guide/core/event-bus" +--- -## Example +# Router -In this example we create a router for the `addAccount` Command, the `getAccount` Query and the `accountAdded` Event from the previous examples. +The MessageRouter is responsible for routing incoming messages (commands, queries, and events) to their registered handlers. It provides automatic validation, type safety, and observability for all routed messages. -```typescript -import { createRouter } from "@nimbus/core"; +::: info Example Application +The examples on this page reference the hono-demo application. -import { getAccountHandler } from "./queries/getAccount.handler.ts"; -import { GetAccountQuery } from "../core/queries/getAccount.ts"; +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) +::: -import { addAccountHandler } from "./commands/addAccount.handler.ts"; -import { AddAccountCommand } from "../core/command/addAccount.ts"; +## Setup and Configuration -import { accountAddedHandler } from "./events/accountAdded.handler.ts"; -import { AccountAddedEvent } from "../core/events/accountAdded.ts"; +Configure the router at application startup using `setupRouter()`, then retrieve it anywhere using `getRouter()`. -const accountRouter = createRouter({ - handlerMap: { - GET_ACCOUNT: { - handler: getAccountHandler, - inputType: GetAccountQuery, - }, - ADD_ACCOUNT: { - handler: addAccountHandler, - inputType: AddAccountCommand, - }, - ACCOUNT_ADDED: { - handler: accountAddedHandler, - inputType: AccountAddedEvent, - }, - }, -}); +```typescript +import { getLogger, setupRouter } from "@nimbus/core"; -// Will result in a successful response -const result = await accountRouter({ - name: "GET_ACCOUNT", - params: { - id: "67580951d5260d05eaa7f913", +setupRouter("MyRouter", { + logInput: (input) => { + getLogger().debug({ + category: "MyRouter", + message: "Received input", + data: { input }, + ...(input?.correlationid + ? { correlationId: input.correlationid } + : {}), + }); }, - metadata: { - correlationId: "123", - authContext: { - sub: "admin@host.tld", - }, + logOutput: (output) => { + getLogger().debug({ + category: "MyRouter", + message: "Output", + data: { output }, + ...(output?.correlationid + ? { correlationId: output.correlationid } + : {}), + }); }, }); +``` + +### Configuration Options -// Will throw an InvalidInputException as the id parameter is missing -const result = await accountRouter({ - name: "UNKNOWN_QUERY", - params: {}, - metadata: { - correlationId: "123", - authContext: { - sub: "admin@host.tld", +| Option | Type | Description | +| ----------- | ----------------------- | ---------------------------------------------------- | +| `logInput` | `(input: any) => void` | Optional callback invoked when a message is received | +| `logOutput` | `(output: any) => void` | Optional callback invoked after successful handling | + +## Registering Handlers + +Register handlers for message types using the `register()` method: + +```typescript +import { getRouter } from "@nimbus/core"; + +export const registerUserMessages = () => { + const router = getRouter("MyRouter"); + + // Register a command + router.register( + "at.overlap.nimbus.add-user", + async (command: AddUserCommand) => { + // Process command and return result }, - }, -}); + addUserCommandSchema + ); -// Will throw an NotFoundException as no route for UNKNOWN_QUERY is defined. -const result = await accountRouter({ - name: "UNKNOWN_QUERY", - params: {}, - metadata: { - correlationId: "123", - authContext: { - sub: "admin@host.tld", + // Register an event + router.register( + "at.overlap.nimbus.user-added", + async (event: UserAddedEvent) => { + // Process event and return result }, - }, + addUserCommandSchema + ); + + // Register a query + router.register( + "at.overlap.nimbus.get-user", + async (query: GetUserQuery) => { + // Process query and return result + }, + getUserQuerySchema + ); +}; +``` + +The `register()` method takes three arguments: + +| Argument | Description | +| ------------- | ------------------------------------------------------------------ | +| `messageType` | The CloudEvents type string (e.g., `'at.overlap.nimbus.add-user'`) | +| `handler` | An async function that processes the message and returns a result | +| `schema` | A Zod schema used to validate the incoming message | + +## Routing Messages + +Route messages to their handlers using the `route()` method: + +```typescript +import { createCommand, getRouter } from "@nimbus/core"; + +const command = createCommand({ + type: "at.overlap.nimbus.add-user", + source: "nimbus.overlap.at", + correlationid: httpRequestCorrelationId, + data: httpRequestBody, }); + +const router = getRouter("MyRouter"); + +const result = await router.route(command); ``` -## Type Safety +## Validation + +The router automatically validates incoming messages against their registered schemas: + +1. **Message Type Check**: Verifies the message has a `type` attribute +2. **Handler Lookup**: Finds the registered handler for the message type +3. **Schema Validation**: Validates the message against the Zod schema +4. **Handler Execution**: Passes the validated message to the handler + +If validation fails, an `InvalidInputException` is thrown with details about the validation errors: + +```typescript +{ + name: 'INVALID_INPUT', + message: 'The provided input is invalid', + statusCode: 400, + details: { + issues: [ + { path: ['data', 'email'], message: 'Invalid email' } + ] + } +} +``` + +## Error Handling + +The router throws appropriate exceptions for different error conditions: + +| Error | Exception | Description | +| ------------------ | ----------------------- | --------------------------------------------- | +| Missing type | `InvalidInputException` | The message has no `type` attribute | +| Unknown type | `NotFoundException` | No handler registered for the message type | +| Validation failure | `InvalidInputException` | The message failed schema validation | +| Handler error | _(propagated)_ | Errors from handlers are propagated unchanged | + +## Observability + +The router is fully instrumented with OpenTelemetry tracing and metrics. See the [Observability](/guide/core/observability) documentation for details. + +**Tracing**: + +- Automatic spans for every routed message + +**Metrics**: -The router will validate the input against the input type defined in the handler map and will throw an `InvalidInputException` if the input is invalid. This ensures that the handler function will always receive the correct type checked input. +- `router_messages_routed_total` counter +- `router_routing_duration_seconds` histogram diff --git a/docs/guide/hono/correlationid.md b/docs/guide/hono/correlationid.md new file mode 100644 index 0000000..2678f4b --- /dev/null +++ b/docs/guide/hono/correlationid.md @@ -0,0 +1,88 @@ +--- +prev: + text: "Nimbus Hono" + link: "/guide/hono" + +next: + text: "Logger Middleware" + link: "/guide/hono/logger" +--- + +# CorrelationID Middleware + +The CorrelationID middleware extracts a correlation ID from incoming request headers or generates a new one using ULID. This ID is stored in the Hono context and optionally added to response headers, enabling request tracing across your application. + +## Basic Usage + +```typescript +import { Hono } from "hono"; +import { correlationId, getCorrelationId } from "@nimbus/hono"; + +const app = new Hono(); + +// Add the middleware +app.use(correlationId()); + +app.get("/", (c) => { + const id = getCorrelationId(c); + return c.json({ correlationId: id }); +}); +``` + +## Header Detection + +The middleware checks the following headers in order of priority: + +| Priority | Header Name | +| -------- | ------------------ | +| 1 | `x-correlation-id` | +| 2 | `x-request-id` | +| 3 | `request-id` | + +If none of these headers are present, a new ULID is generated. + +## Configuration Options + +| Option | Type | Default | Description | +| ---------------------- | --------- | -------------------- | ------------------------------------------ | +| `addToResponseHeaders` | `boolean` | `true` | Add the correlation ID to response headers | +| `responseHeaderName` | `string` | `"x-correlation-id"` | The header name to use in the response | + +```typescript +import { correlationId } from "@nimbus/hono"; + +// Custom configuration +app.use( + correlationId({ + addToResponseHeaders: true, + responseHeaderName: "x-request-id", + }) +); +``` + +## Retrieving the Correlation ID + +Use the `getCorrelationId()` helper function to retrieve the correlation ID from the Hono context: + +```typescript +import { getCorrelationId } from "@nimbus/hono"; + +app.get("/users/:id", async (c) => { + const correlationId = getCorrelationId(c); + + // Use in logging + logger.info({ + message: "Fetching user", + correlationId, + }); + + // Pass to commands/queries + const command = createCommand({ + type: "get-user", + correlationid: correlationId, + data: { id: c.req.param("id") }, + }); + + return c.json(await router.route(command)); +}); +``` diff --git a/docs/guide/hono/index.md b/docs/guide/hono/index.md new file mode 100644 index 0000000..286b42b --- /dev/null +++ b/docs/guide/hono/index.md @@ -0,0 +1,33 @@ +--- +prev: + text: "Exceptions" + link: "/guide/core/exceptions" + +next: + text: "CorrelationID Middleware" + link: "/guide/hono/correlationid" +--- + +# Nimbus Hono + +Adapters and useful functionality to bridge Nimbus and [Hono](https://hono.dev/). + +[https://jsr.io/@nimbus/oak](https://jsr.io/@nimbus/hono) + +### Deno + +```bash +deno add jsr:@nimbus/hono +``` + +### NPM + +```bash +npx jsr add @nimbus/hono +``` + +### Bun + +```bash +bunx jsr add @nimbus/hono +``` diff --git a/docs/guide/hono/logger.md b/docs/guide/hono/logger.md new file mode 100644 index 0000000..9053730 --- /dev/null +++ b/docs/guide/hono/logger.md @@ -0,0 +1,117 @@ +--- +prev: + text: "CorrelationID Middleware" + link: "/guide/hono/correlationid" + +next: + text: "onError Handler" + link: "/guide/hono/on-error" +--- + +# Logger Middleware + +The Logger middleware logs HTTP requests and responses with timing information using the Nimbus logger. It optionally integrates with OpenTelemetry for distributed tracing. + +## Basic Usage + +```typescript +import { Hono } from "hono"; +import { correlationId, logger } from "@nimbus/hono"; + +const app = new Hono(); + +// Use correlationId middleware first to enable correlation ID in logs +app.use(correlationId()); +app.use(logger()); +``` + +## Configuration Options + +| Option | Type | Default | Description | +| --------------- | --------- | ---------- | ----------------------------------------- | +| `enableTracing` | `boolean` | `true` | Enable OpenTelemetry tracing for requests | +| `tracerName` | `string` | `"nimbus"` | The name of the tracer for OpenTelemetry | + +```typescript +import { logger } from "@nimbus/hono"; + +app.use( + logger({ + enableTracing: true, + tracerName: "api", + }) +); +``` + +## Log Output + +The middleware logs each request and response using the Nimbus logger: + +**Request log:** + +``` +[API] INFO :: REQ: [GET] /users/123 +``` + +**Response log (with timing):** + +``` +[API] INFO :: RES: [GET] /users/123 - 45ms +``` + +Both logs include the correlation ID when the `correlationId` middleware is used. + +## OpenTelemetry Tracing + +When `enableTracing` is set to `true`, the middleware: + +1. **Extracts trace context** from incoming `traceparent` and `tracestate` headers +2. **Creates a server span** for the HTTP request +3. **Records span attributes** for observability +4. **Propagates context** so child spans can be created in handlers + +### Span Attributes + +| Attribute | Description | +| ------------------ | --------------------------------- | +| `http.method` | The HTTP method (GET, POST, etc.) | +| `url.path` | The request path | +| `http.target` | The full request URL | +| `correlation_id` | The correlation ID (if available) | +| `http.status_code` | The response status code | + +### Example with Tracing + +```typescript +import { Hono } from "hono"; +import { correlationId, logger } from "@nimbus/hono"; + +const app = new Hono(); + +app.use(correlationId()); +app.use( + logger({ + enableTracing: true, + tracerName: "api", + }) +); + +app.get("/users/:id", async (c) => { + // This handler runs within the HTTP span context + // Any spans created here will be children of the HTTP span + const user = await userRepository.findOne({ + filter: { _id: c.req.param("id") }, + }); + + return c.json(user); +}); +``` + +## Error Handling + +When an error occurs during request handling: + +- The span status is set to `ERROR` +- The error message is recorded in the span +- The exception is recorded for debugging +- The error is re-thrown for the error handler to process diff --git a/docs/guide/hono/on-error.md b/docs/guide/hono/on-error.md new file mode 100644 index 0000000..36fdc64 --- /dev/null +++ b/docs/guide/hono/on-error.md @@ -0,0 +1,159 @@ +--- +prev: + text: "Logger Middleware" + link: "/guide/hono/logger" + +next: + text: "Nimbus MongoDB" + link: "/guide/mongodb" +--- + +# onError Handler + +The `handleError` function is an error handler for Hono applications that converts Nimbus exceptions to structured HTTP JSON responses. + +## Basic Usage + +```typescript +import { Hono } from "hono"; +import { handleError } from "@nimbus/hono"; + +const app = new Hono(); + +// Register the error handler +app.onError(handleError); +``` + +## Response Format + +When a Nimbus exception is thrown, the handler returns a JSON response with the following structure: + +```json +{ + "error": "EXCEPTION_NAME", + "message": "Human-readable error message", + "details": { ... } +} +``` + +| Field | Description | +| --------- | -------------------------------------------------------- | +| `error` | The exception name (e.g., `NOT_FOUND`, `INVALID_INPUT`) | +| `message` | The error message provided when throwing the exception | +| `details` | Optional additional details (only included if provided) | + +## Status Code Mapping + +The HTTP status code is taken directly from the exception's `statusCode` property: + +| Exception | Status Code | Response `error` | +| ----------------------- | ----------- | ----------------------- | +| `GenericException` | 500 | `GENERIC_EXCEPTION` | +| `InvalidInputException` | 400 | `INVALID_INPUT` | +| `NotFoundException` | 404 | `NOT_FOUND` | +| `UnauthorizedException` | 401 | `UNAUTHORIZED` | +| `ForbiddenException` | 403 | `FORBIDDEN` | +| Custom exceptions | (custom) | (custom name) | + +## Logging Behavior + +The handler logs errors differently based on the status code: + +| Status Code | Log Level | Description | +| ----------- | ---------- | ---------------------------------------- | +| 5xx | `error` | Server errors that need investigation | +| 4xx | `debug` | Client errors, typically expected | +| Unhandled | `critical` | Non-Nimbus errors, unexpected failures | + +## Example: Exception Handling + +```typescript +import { Hono } from "hono"; +import { handleError } from "@nimbus/hono"; +import { NotFoundException, InvalidInputException } from "@nimbus/core"; + +const app = new Hono(); + +app.get("/users/:id", async (c) => { + const user = await findUser(c.req.param("id")); + + if (!user) { + throw new NotFoundException("User not found", { + userId: c.req.param("id"), + }); + } + + return c.json(user); +}); + +app.post("/users", async (c) => { + const body = await c.req.json(); + + if (!body.email) { + throw new InvalidInputException("Email is required", { + field: "email", + }); + } + + const user = await createUser(body); + return c.json(user, 201); +}); + +app.onError(handleError); +``` + +### Response Examples + +**NotFoundException (404):** +```json +{ + "error": "NOT_FOUND", + "message": "User not found", + "details": { + "userId": "123" + } +} +``` + +**InvalidInputException (400):** +```json +{ + "error": "INVALID_INPUT", + "message": "Email is required", + "details": { + "field": "email" + } +} +``` + +**Unhandled Error (500):** +```json +{ + "error": "INTERNAL_SERVER_ERROR" +} +``` + +## Complete Application Setup + +```typescript +import { Hono } from "hono"; +import { correlationId, handleError, logger } from "@nimbus/hono"; +import { setupLogger, parseLogLevel } from "@nimbus/core"; + +setupLogger({ + logLevel: parseLogLevel(process.env.LOG_LEVEL), +}); + +const app = new Hono(); + +app.use(correlationId()); +app.use(logger({ enableTracing: true })); + +// Your routes here +app.get("/health", (c) => c.json({ status: "ok" })); + +// Error handler must be registered last +app.onError(handleError); + +export default app; +``` diff --git a/docs/guide/mongodb/connection-manager.md b/docs/guide/mongodb/connection-manager.md new file mode 100644 index 0000000..c1c69b6 --- /dev/null +++ b/docs/guide/mongodb/connection-manager.md @@ -0,0 +1,208 @@ +--- +prev: + text: "Nimbus MongoDB" + link: "/guide/mongodb" + +next: + text: "Repository" + link: "/guide/mongodb/repository" +--- + +# Connection Manager + +The `MongoConnectionManager` is a singleton class that manages MongoDB connections with automatic reconnection, health checks, and cleanup of inactive connections. + +## Basic Usage + +```typescript +import { MongoConnectionManager } from "@nimbus/mongodb"; +import { ServerApiVersion } from "mongodb"; + +const mongoManager = MongoConnectionManager.getInstance( + process.env.MONGO_URL ?? "", + { + mongoClientOptions: { + appName: "my-app", + serverApi: { + version: ServerApiVersion.v1, + strict: false, + deprecationErrors: true, + }, + }, + } +); + +// Get a collection +const collection = await mongoManager.getCollection("myDatabase", "users"); +``` + +## Configuration Options + +| Option | Type | Default | Description | +| -------------------- | -------------------- | ------------ | -------------------------------------------------- | +| `connectionTimeout` | `number` | `1800000` | Inactivity timeout in ms before cleanup (30 min) | +| `mongoClientOptions` | `MongoClientOptions` | _(required)_ | MongoDB driver client options | + +### Recommended Configuration + +```typescript +import { MongoConnectionManager } from "@nimbus/mongodb"; +import { ServerApiVersion } from "mongodb"; + +const mongoManager = MongoConnectionManager.getInstance( + process.env.MONGO_URL ?? "", + { + connectionTimeout: 1000 * 60 * 5, // 5 minutes + mongoClientOptions: { + appName: "my-app", + serverApi: { + version: ServerApiVersion.v1, + strict: false, + deprecationErrors: true, + }, + maxPoolSize: 10, + minPoolSize: 0, + maxIdleTimeMS: 1000 * 60 * 1, // 1 minute idle timeout + connectTimeoutMS: 1000 * 15, // 15 seconds connection timeout + socketTimeoutMS: 1000 * 30, // 30 seconds socket timeout + }, + } +); +``` + +## Available Methods + +| Method | Return Type | Description | +| ------------------------------------- | --------------------- | -------------------------------------------- | +| `getInstance(uri, options)` | `MongoConnectionManager` | Get the singleton instance | +| `getClient()` | `Promise` | Get a connected MongoDB client | +| `getDatabase(dbName)` | `Promise` | Get a database instance | +| `getCollection(dbName, collection)` | `Promise` | Get a collection instance | +| `healthCheck()` | `Promise<{ status, details? }>` | Check connection health | +| `cleanup()` | `Promise` | Close inactive connections | + +## Connection Management + +The manager automatically handles: + +- **Connection pooling**: Reuses existing connections when available +- **Reconnection**: Automatically reconnects when the connection is lost +- **Connection testing**: Verifies connections with a ping before returning + +### Getting Resources + +```typescript +// Get a connected client +const client = await mongoManager.getClient(); + +// Get a database +const db = await mongoManager.getDatabase("myDatabase"); + +// Get a collection (most common) +const usersCollection = await mongoManager.getCollection("myDatabase", "users"); +``` + +## Health Checks + +Use `healthCheck()` to verify the database connection: + +```typescript +app.get("/health", async (c) => { + const dbHealth = await mongoManager.healthCheck(); + + return c.json({ + status: dbHealth.status === "healthy" ? "ok" : "error", + database: dbHealth, + }); +}); +``` + +**Response format:** + +```typescript +// Healthy +{ status: "healthy" } + +// Error +{ status: "error", details: "Failed to ping MongoDB server" } +``` + +## Cleanup + +The `cleanup()` method closes connections that have been inactive longer than the configured `connectionTimeout`. Set up an interval to call this periodically: + +```typescript +import { getLogger } from "@nimbus/core"; + +// Check every minute for inactive connections +setInterval(() => { + mongoManager.cleanup().catch((error) => { + getLogger().error({ + message: "Failed to cleanup MongoDB connections", + error, + }); + }); +}, 1000 * 60); +``` + +## Complete Setup Example + +```typescript +import { getLogger } from "@nimbus/core"; +import { MongoConnectionManager } from "@nimbus/mongodb"; +import { ServerApiVersion } from "mongodb"; + +export const mongoManager = MongoConnectionManager.getInstance( + process.env.MONGO_URL ?? "", + { + connectionTimeout: 1000 * 60 * 5, + mongoClientOptions: { + appName: "my-app", + serverApi: { + version: ServerApiVersion.v1, + strict: false, + deprecationErrors: true, + }, + maxPoolSize: 10, + minPoolSize: 0, + maxIdleTimeMS: 1000 * 60 * 1, + connectTimeoutMS: 1000 * 15, + socketTimeoutMS: 1000 * 30, + }, + } +); + +export const initMongoConnectionManager = () => { + // Periodic cleanup of inactive connections + setInterval(() => { + mongoManager.cleanup().catch((error) => { + getLogger().error({ + message: error.message, + error, + }); + }); + }, 1000 * 60); +}; +``` + +## Using with Repository + +The connection manager integrates seamlessly with the `MongoDBRepository` class: + +```typescript +import { MongoDBRepository } from "@nimbus/mongodb"; +import { mongoManager } from "./mongodb.ts"; +import { User, UserSchema } from "./user.ts"; + +class UserRepository extends MongoDBRepository { + constructor() { + super( + () => mongoManager.getCollection("myDatabase", "users"), + UserSchema, + "User" + ); + } +} + +export const userRepository = new UserRepository(); +``` diff --git a/docs/guide/mongodb/crud.md b/docs/guide/mongodb/crud.md index 653422f..63d6b08 100644 --- a/docs/guide/mongodb/crud.md +++ b/docs/guide/mongodb/crud.md @@ -1 +1,294 @@ -# Nimbus MongoDB - CRUD +--- +prev: + text: "Repository" + link: "/guide/mongodb/repository" + +next: + text: "MongoJSON" + link: "/guide/mongodb/mongo-json" +--- + +# CRUD+ + +The MongoDB package provides low-level CRUD functions for direct database operations. These functions are fully instrumented with OpenTelemetry tracing and metrics, and handle errors using Nimbus exceptions. + +## When to Use + +Use these low-level functions when: + +- You need operations not provided by `MongoDBRepository` +- You want direct control over MongoDB operations +- You're building custom repository methods + +For standard CRUD operations, prefer using the [Repository](/guide/mongodb/repository) class. + +## Available Functions + +| Function | Description | +| ------------------- | ----------------------------------------- | +| `find` | Find multiple documents matching a filter | +| `findOne` | Find a single document matching a filter | +| `insertOne` | Insert a single document | +| `insertMany` | Insert multiple documents | +| `replaceOne` | Replace a single document | +| `updateOne` | Update a single document | +| `updateMany` | Update multiple documents | +| `deleteOne` | Delete a single document | +| `deleteMany` | Delete multiple documents | +| `countDocuments` | Count documents matching a filter | +| `bulkWrite` | Execute multiple write operations | +| `aggregate` | Execute an aggregation pipeline | +| `findOneAndUpdate` | Find and update a document atomically | +| `findOneAndReplace` | Find and replace a document atomically | +| `findOneAndDelete` | Find and delete a document atomically | + +## Usage Examples + +### Find Operations + +Functions that return typed data require `mapDocument` and `outputType` parameters for type-safe results. + +```typescript +import { find, findOne } from "@nimbus/mongodb"; + +type User = { _id: string; email: string; name: string }; + +// Find multiple documents +const users = await find({ + collection, + filter: { status: "active" }, + limit: 10, + skip: 0, + sort: { createdAt: -1 }, + mapDocument: (doc) => ({ + _id: doc._id.toString(), + email: doc.email, + name: doc.name, + }), + outputType: UserSchema, +}); + +// Find a single document +const user = await findOne({ + collection, + filter: { email: "john@example.com" }, + mapDocument: (doc) => ({ + _id: doc._id.toString(), + email: doc.email, + name: doc.name, + }), + outputType: UserSchema, +}); +``` + +### Insert Operations + +```typescript +import { insertOne, insertMany } from "@nimbus/mongodb"; + +// Insert a single document +const result = await insertOne({ + collection, + document: { + email: "john@example.com", + name: "John Doe", + createdAt: new Date(), + }, +}); + +// Insert multiple documents +const results = await insertMany({ + collection, + documents: [ + { email: "john@example.com", name: "John" }, + { email: "jane@example.com", name: "Jane" }, + ], +}); +``` + +### Update Operations + +```typescript +import { updateOne, updateMany, replaceOne } from "@nimbus/mongodb"; + +// Update a single document +const result = await updateOne({ + collection, + filter: { _id: userId }, + update: { $set: { name: "New Name", updatedAt: new Date() } }, +}); + +// Update multiple documents +const results = await updateMany({ + collection, + filter: { status: "pending" }, + update: { $set: { status: "processed" } }, +}); + +// Replace a document entirely +const replaced = await replaceOne({ + collection, + filter: { _id: userId }, + replacement: { + email: "new@example.com", + name: "New Name", + updatedAt: new Date(), + }, +}); +``` + +### Delete Operations + +```typescript +import { deleteOne, deleteMany } from "@nimbus/mongodb"; + +// Delete a single document +const result = await deleteOne({ + collection, + filter: { _id: userId }, +}); + +// Delete multiple documents +const results = await deleteMany({ + collection, + filter: { status: "deleted" }, +}); +``` + +### Atomic Find-and-Modify Operations + +These functions return the document before or after modification, requiring `mapDocument` and `outputType` for type safety. + +```typescript +import { + findOneAndUpdate, + findOneAndReplace, + findOneAndDelete, +} from "@nimbus/mongodb"; + +type User = { _id: string; email: string; loginCount: number }; + +// Find and update atomically +const updated = await findOneAndUpdate({ + collection, + filter: { _id: userId }, + update: { $inc: { loginCount: 1 } }, + mapDocument: (doc) => ({ + _id: doc._id.toString(), + email: doc.email, + loginCount: doc.loginCount, + }), + outputType: UserSchema, + options: { returnDocument: "after" }, +}); + +// Find and replace atomically +const replaced = await findOneAndReplace({ + collection, + filter: { _id: userId }, + replacement: newDocument, + mapDocument: (doc) => ({ + _id: doc._id.toString(), + email: doc.email, + loginCount: doc.loginCount, + }), + outputType: UserSchema, + options: { returnDocument: "after" }, +}); + +// Find and delete atomically +const deleted = await findOneAndDelete({ + collection, + filter: { _id: userId }, + mapDocument: (doc) => ({ + _id: doc._id.toString(), + email: doc.email, + loginCount: doc.loginCount, + }), + outputType: UserSchema, +}); +``` + +### Aggregation + +The `aggregate` function executes a pipeline and maps results to typed output. + +```typescript +import { aggregate } from "@nimbus/mongodb"; + +type CategoryCount = { category: string; count: number }; + +const results = await aggregate({ + collection, + aggregation: [ + { $match: { status: "active" } }, + { $group: { _id: "$category", count: { $sum: 1 } } }, + { $sort: { count: -1 } }, + ], + mapDocument: (doc) => ({ + category: doc._id, + count: doc.count, + }), + outputType: CategoryCountSchema, +}); +``` + +### Bulk Write + +```typescript +import { bulkWrite } from "@nimbus/mongodb"; + +const result = await bulkWrite({ + collection, + operations: [ + { insertOne: { document: { name: "New Item" } } }, + { + updateOne: { + filter: { _id: id1 }, + update: { $set: { status: "updated" } }, + }, + }, + { deleteOne: { filter: { _id: id2 } } }, + ], +}); +``` + +### Count Documents + +```typescript +import { countDocuments } from "@nimbus/mongodb"; + +const count = await countDocuments({ + collection, + filter: { status: "active" }, +}); +``` + +## Observability + +All CRUD functions are automatically instrumented with OpenTelemetry tracing and metrics. + +### Tracing + +Each operation creates a span with the following attributes: + +| Attribute | Description | +| ----------------------- | ---------------------------------------- | +| `db.system` | Always `mongodb` | +| `db.operation` | The operation name (e.g., `find`) | +| `db.mongodb.collection` | The collection name | + +### Metrics + +Two metrics are recorded for every operation: + +| Metric | Type | Labels | Description | +| ----------------------------------- | --------- | -------------------------------- | ---------------------------------- | +| `mongodb_operation_total` | Counter | `operation`, `collection`, `status` | Total number of operations | +| `mongodb_operation_duration_seconds`| Histogram | `operation`, `collection` | Duration of operations in seconds | + +The `status` label is either `success` or `error`. + +## Error Handling + +All functions use `handleMongoError` internally to convert MongoDB errors to Nimbus exceptions. See [handleMongoError](/guide/mongodb/handle-mongo-error) for details on error mapping. diff --git a/docs/guide/mongodb/deploy-collection.md b/docs/guide/mongodb/deploy-collection.md new file mode 100644 index 0000000..d565b9c --- /dev/null +++ b/docs/guide/mongodb/deploy-collection.md @@ -0,0 +1,149 @@ +--- +prev: + text: "handleMongoError" + link: "/guide/mongodb/handle-mongo-error" + +next: + text: "Nimbus Utils" + link: "/guide/utils" +--- + +# Deploy Collection + +The `deployMongoCollection` function creates or updates MongoDB collections with schema validation and indexes. It provides a declarative way to manage your database schema. + +## Basic Usage + +```typescript +import { deployMongoCollection } from "@nimbus/mongodb"; +import { mongoManager } from "./mongodb.ts"; + +const USERS_COLLECTION = { + name: "users", + options: { + validator: { + $jsonSchema: { + bsonType: "object", + required: ["email", "firstName", "lastName"], + properties: { + email: { bsonType: "string" }, + firstName: { bsonType: "string" }, + lastName: { bsonType: "string" }, + }, + }, + }, + }, + indexes: [ + { key: { email: 1 }, unique: true }, + { key: { lastName: 1, firstName: 1 } }, + ], +}; + +const client = await mongoManager.getClient(); + +await deployMongoCollection({ + mongoClient: client, + dbName: "myDatabase", + collectionDefinition: USERS_COLLECTION, + allowUpdateIndexes: true, +}); +``` + +## Function Parameters + +| Parameter | Type | Description | +| ---------------------- | --------------------------- | ------------------------------------------------- | +| `mongoClient` | `MongoClient` | A connected MongoDB client instance | +| `dbName` | `string` | The name of the database | +| `collectionDefinition` | `MongoCollectionDefinition` | The collection definition object | +| `allowUpdateIndexes` | `boolean` | Whether to update indexes on existing collections | + +## Collection Definition + +The `MongoCollectionDefinition` type defines the structure of a collection: + +```typescript +type MongoCollectionDefinition = { + name: string; + options?: CreateCollectionOptions; + indexes?: IndexDescription[]; +}; +``` + +| Property | Type | Description | +| --------- | ------------------------- | --------------------------------------------- | +| `name` | `string` | The name of the collection | +| `options` | `CreateCollectionOptions` | MongoDB collection options (validation, etc.) | +| `indexes` | `IndexDescription[]` | Array of index definitions | + +## Behavior + +The function handles two scenarios: + +### New Collection + +When the collection does not exist: + +1. Creates the collection with the specified options +2. Creates all defined indexes + +### Existing Collection + +When the collection already exists: + +1. Updates collection options using `collMod` +2. If `allowUpdateIndexes` is `true`: + - Creates any new indexes not present in the database + - Drops any indexes not defined in the collection definition (except `_id_`) + +## Index Management + +Indexes are automatically named based on their key fields if no name is provided: + +```typescript +// This index will be named "email_1" +{ key: { email: 1 } } + +// This index will be named "lastName_1_firstName_1" +{ key: { lastName: 1, firstName: 1 } } + +// Explicit name +{ key: { email: 1 }, name: "email_unique_idx", unique: true } +``` + +## Deployment Script + +Create a script to deploy all collections: + +```typescript +import { deployMongoCollection } from "@nimbus/mongodb"; +import { setupLogger, parseLogLevel } from "@nimbus/core"; +import { mongoManager } from "./mongodb.ts"; +import { USERS_COLLECTION } from "./collections/users.ts"; +import { ORDERS_COLLECTION } from "./collections/orders.ts"; + +// Configure logging to see deployment progress +setupLogger({ + logLevel: parseLogLevel("info"), +}); + +const collections = [USERS_COLLECTION, ORDERS_COLLECTION]; + +const deployCollections = async () => { + const client = await mongoManager.getClient(); + const dbName = process.env.MONGO_DB ?? "myDatabase"; + + for (const collection of collections) { + await deployMongoCollection({ + mongoClient: client, + dbName, + collectionDefinition: collection, + allowUpdateIndexes: true, + }); + } + + console.log("All collections deployed successfully"); +}; + +deployCollections().catch(console.error); +``` diff --git a/docs/guide/mongodb/handle-mongo-error.md b/docs/guide/mongodb/handle-mongo-error.md new file mode 100644 index 0000000..ac45176 --- /dev/null +++ b/docs/guide/mongodb/handle-mongo-error.md @@ -0,0 +1,155 @@ +--- +prev: + text: "MongoJSON" + link: "/guide/mongodb/mongo-json" + +next: + text: "Deploy Collection" + link: "/guide/mongodb/deploy-collection" +--- + +# handleMongoError + +The `handleMongoError` function converts MongoDB errors to Nimbus exceptions based on the error code. This provides consistent error handling across your application. + +## Basic Usage + +```typescript +import { handleMongoError } from "@nimbus/mongodb"; + +try { + await collection.insertOne(document); +} catch (error) { + throw handleMongoError(error); +} +``` + +## Error Code Mappings + +| MongoDB Code | Error Type | Nimbus Exception | Details Included | +| ------------ | ------------------- | ----------------------- | ----------------- | +| 121 | Document validation | `InvalidInputException` | `code`, `details` | +| 2 | Bad value | `InvalidInputException` | Error message | +| 11000 | Duplicate key | `InvalidInputException` | `keyValue` | +| Other | Various | `GenericException` | Original error | + +## Error Examples + +### Duplicate Key Error (Code 11000) + +When inserting a document that violates a unique index: + +```typescript +try { + await collection.insertOne({ email: "existing@example.com" }); +} catch (error) { + const exception = handleMongoError(error); + // InvalidInputException with: + // - message: "E11000 duplicate key error..." + // - details: { keyValue: { email: "existing@example.com" } } +} +``` + +### Document Validation Error (Code 121) + +When a document fails schema validation: + +```typescript +try { + await collection.insertOne({ name: 123 }); // name should be string +} catch (error) { + const exception = handleMongoError(error); + // InvalidInputException with: + // - message: "Document failed validation" + // - details: { code: 121, details: { ... validation errors ... } } +} +``` + +### Bad Value Error (Code 2) + +When a query contains invalid values: + +```typescript +try { + await collection.find({ $invalid: true }).toArray(); +} catch (error) { + const exception = handleMongoError(error); + // InvalidInputException with original error message +} +``` + +### Other Errors + +All other MongoDB errors are wrapped in a `GenericException`: + +```typescript +try { + await collection.find({}).toArray(); +} catch (error) { + const exception = handleMongoError(error); + // GenericException with original error stack trace +} +``` + +## Integration with CRUD Functions + +All [CRUD functions](/guide/mongodb/crud) use `handleMongoError` internally: + +```typescript +import { insertOne } from "@nimbus/mongodb"; + +try { + await insertOne({ + collection, + document: { email: "duplicate@example.com" }, + }); +} catch (error) { + // Error is already a Nimbus exception + if (error.name === "INVALID_INPUT") { + // Handle duplicate key or validation error + } +} +``` + +## Custom Error Handling + +You can use `handleMongoError` in your own database operations: + +```typescript +import { handleMongoError } from "@nimbus/mongodb"; +import { GenericException } from "@nimbus/core"; + +const customDatabaseOperation = async (collection: Collection) => { + try { + // Custom MongoDB operation + const result = await collection + .aggregate([ + { $match: { status: "active" } }, + { $group: { _id: "$category", total: { $sum: "$amount" } } }, + ]) + .toArray(); + + return result; + } catch (error) { + throw handleMongoError(error); + } +}; +``` + +## Error Response in API + +When combined with the Hono [error handler](/guide/hono/on-error), MongoDB errors are automatically converted to HTTP responses: + +```typescript +// Duplicate key error becomes: +// HTTP 400 +{ + "error": "INVALID_INPUT", + "message": "E11000 duplicate key error collection: db.users index: email_1 dup key: { email: \"existing@example.com\" }", + "details": { + "keyValue": { + "email": "existing@example.com" + } + } +} +``` diff --git a/docs/guide/mongodb/index.md b/docs/guide/mongodb/index.md index 9bb7446..7173de4 100644 --- a/docs/guide/mongodb/index.md +++ b/docs/guide/mongodb/index.md @@ -1,11 +1,11 @@ --- prev: - text: "Middleware" - link: "/guide/oak/middleware" + text: "onError Handler" + link: "/guide/hono/on-error" next: - text: "Repository" - link: "/guide/mongodb/repository" + text: "Connection Manager" + link: "/guide/mongodb/connection-manager" --- # Nimbus MongoDB Package diff --git a/docs/guide/mongodb/mongo-json.md b/docs/guide/mongodb/mongo-json.md new file mode 100644 index 0000000..3b0f0a8 --- /dev/null +++ b/docs/guide/mongodb/mongo-json.md @@ -0,0 +1,127 @@ +--- +prev: + text: "CRUD+" + link: "/guide/mongodb/crud" + +next: + text: "handleMongoError" + link: "/guide/mongodb/handle-mongo-error" +--- + +# MongoJSON + +`MongoJSON` provides parse and stringify functions with support for MongoDB data types. It allows you to serialize MongoDB filters and documents as JSON strings while preserving type information. + +## Basic Usage + +```typescript +import { MongoJSON } from "@nimbus/mongodb"; + +// Parse a JSON string with MongoDB type prefixes +const filter = MongoJSON.parse('{"_id": "objectId::507f1f77bcf86cd799439011"}'); +// Result: { _id: ObjectId("507f1f77bcf86cd799439011") } + +// Stringify an object to JSON +const json = MongoJSON.stringify({ name: "John", age: 30 }); +// Result: '{"name":"John","age":30}' +``` + +## Type Prefixes + +The `parse` function recognizes special prefixes to convert strings to MongoDB types: + +| Prefix | Converts To | Example | +| ------------ | ----------- | -------------------------------------- | +| `objectId::` | `ObjectId` | `"objectId::507f1f77bcf86cd799439011"` | +| `date::` | `Date` | `"date::2024-01-15T10:30:00Z"` | +| `int::` | `number` | `"int::42"` | +| `double::` | `number` | `"double::19.99"` | + +## Parse Examples + +```typescript +import { MongoJSON } from "@nimbus/mongodb"; + +// ObjectId conversion +const idFilter = MongoJSON.parse( + '{"_id": "objectId::507f1f77bcf86cd799439011"}' +); +// { _id: ObjectId("507f1f77bcf86cd799439011") } + +// Date conversion +const dateFilter = MongoJSON.parse( + '{"createdAt": {"$gte": "date::2024-01-01T00:00:00Z"}}' +); +// { createdAt: { $gte: Date("2024-01-01T00:00:00Z") } } + +// Integer conversion +const countFilter = MongoJSON.parse('{"count": {"$gt": "int::100"}}'); +// { count: { $gt: 100 } } + +// Multiple types +const complexFilter = MongoJSON.parse(`{ + "_id": "objectId::507f1f77bcf86cd799439011", + "price": {"$lte": "double::49.99"}, + "createdAt": {"$gte": "date::2024-01-01T00:00:00Z"}, + "quantity": {"$gt": "int::0"} +}`); +``` + +## Operator Blacklist + +For security, `MongoJSON.parse` blocks certain MongoDB operators by default: + +```typescript +// This will throw an error +MongoJSON.parse('{"$where": "this.name === \\"admin\\""}'); +// Error: Operator '$where' is not allowed + +// Custom blacklist +MongoJSON.parse(jsonString, ["$where", "$expr"]); +``` + +The default blacklist includes `$where` to prevent code injection attacks. + +## Use Case: API Filters + +`MongoJSON` is useful when accepting MongoDB filters from API requests: + +```typescript +import { MongoJSON } from "@nimbus/mongodb"; + +app.get("/users", async (c) => { + const filterParam = c.req.query("filter"); + + // Parse the filter from query string + const filter = filterParam ? MongoJSON.parse(filterParam) : {}; + + const users = await userRepository.find({ filter }); + return c.json(users); +}); + +// Example request: +// GET /users?filter={"status":"active","createdAt":{"$gte":"date::2024-01-01T00:00:00Z"}} +``` + +## Error Handling + +`MongoJSON.parse` throws an `InvalidInputException` for: + +- Invalid JSON syntax +- Blacklisted operators + +```typescript +import { MongoJSON } from "@nimbus/mongodb"; + +try { + const filter = MongoJSON.parse('{"invalid json}'); +} catch (error) { + // InvalidInputException with JSON parse error details +} + +try { + const filter = MongoJSON.parse('{"$where": "1===1"}'); +} catch (error) { + // Error: Operator '$where' is not allowed +} +``` diff --git a/docs/guide/mongodb/repository.md b/docs/guide/mongodb/repository.md index c669169..30fc0be 100644 --- a/docs/guide/mongodb/repository.md +++ b/docs/guide/mongodb/repository.md @@ -1 +1,260 @@ -# Nimbus MongoDB - Repository +--- +prev: + text: "Connection Manager" + link: "/guide/mongodb/connection-manager" + +next: + text: "CRUD+" + link: "/guide/mongodb/crud" +--- + +# Repository + +The `MongoDBRepository` is a type-safe base class for MongoDB CRUD operations. It provides a consistent interface for interacting with MongoDB collections while handling validation, error conversion, and document mapping. + +## Basic Usage + +Create a repository by extending `MongoDBRepository`: + +```typescript +import { MongoDBRepository } from "@nimbus/mongodb"; +import { z } from "zod"; +import { mongoManager } from "./mongodb.ts"; + +// Define your entity schema +const User = z.object({ + _id: z.string(), + email: z.string().email(), + firstName: z.string(), + lastName: z.string(), + createdAt: z.string(), + updatedAt: z.string(), +}); + +type User = z.infer; + +// Create the repository +class UserRepository extends MongoDBRepository { + constructor() { + super( + () => mongoManager.getCollection("myDatabase", "users"), + User, + "User" + ); + } +} + +export const userRepository = new UserRepository(); +``` + +## Constructor Parameters + +| Parameter | Type | Description | +| --------------- | --------------------------- | ------------------------------------------------- | +| `getCollection` | `() => Promise` | Function that returns a MongoDB collection | +| `entityType` | `ZodType` | Zod schema for validating and typing entities | +| `entityName` | `string` (optional) | Name used in error messages (default: "Document") | + +## Available Methods + +| Method | Parameters | Return Type | Description | +| ---------------- | ------------------------------------------------------ | ----------------- | --------------------------- | +| `findOne` | `{ filter }` | `Promise` | Find a single document | +| `find` | `{ filter, limit?, skip?, sort?, project?, options? }` | `Promise` | Find multiple documents | +| `countDocuments` | `{ filter, options? }` | `Promise` | Count matching documents | +| `insertOne` | `{ item }` | `Promise` | Insert a single document | +| `insertMany` | `{ items, options? }` | `Promise` | Insert multiple documents | +| `replaceOne` | `{ item, options? }` | `Promise` | Replace a document by `_id` | +| `replaceMany` | `{ items, options? }` | `Promise` | Replace multiple documents | +| `deleteOne` | `{ item, options? }` | `Promise` | Delete a document by `_id` | +| `deleteMany` | `{ items, options? }` | `Promise` | Delete multiple documents | + +## Document Mapping + +Override the mapping methods to control how documents are converted between MongoDB format and your entity format: + +```typescript +import { Document, ObjectId } from "mongodb"; + +class UserRepository extends MongoDBRepository { + constructor() { + super( + () => mongoManager.getCollection("myDatabase", "users"), + User, + "User" + ); + } + + // Convert MongoDB document to entity + override _mapDocumentToEntity(doc: Document): User { + return User.parse({ + _id: doc._id.toString(), + email: doc.email, + firstName: doc.firstName, + lastName: doc.lastName, + createdAt: doc.createdAt.toISOString(), + updatedAt: doc.updatedAt.toISOString(), + }); + } + + // Convert entity to MongoDB document + override _mapEntityToDocument(user: User): Document { + return { + _id: new ObjectId(user._id), + email: user.email, + firstName: user.firstName, + lastName: user.lastName, + createdAt: new Date(user.createdAt), + updatedAt: new Date(user.updatedAt), + }; + } +} +``` + +## Query Examples + +### Finding Documents + +```typescript +// Find one by filter +const user = await userRepository.findOne({ + filter: { email: "john@example.com" }, +}); + +// Find multiple with options +const users = await userRepository.find({ + filter: { lastName: "Doe" }, + limit: 10, + skip: 0, + sort: { createdAt: -1 }, +}); + +// Count documents +const count = await userRepository.countDocuments({ + filter: { lastName: "Doe" }, +}); +``` + +### Creating Documents + +```typescript +// Insert one +const newUser = await userRepository.insertOne({ + item: { + _id: new ObjectId().toString(), + email: "jane@example.com", + firstName: "Jane", + lastName: "Doe", + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + }, +}); + +// Insert many +const users = await userRepository.insertMany({ + items: [user1, user2, user3], +}); +``` + +### Updating Documents + +```typescript +// Replace a document (must include _id) +const updatedUser = await userRepository.replaceOne({ + item: { + ...existingUser, + firstName: "Jonathan", + updatedAt: new Date().toISOString(), + }, +}); +``` + +### Deleting Documents + +```typescript +// Delete one +const deletedUser = await userRepository.deleteOne({ + item: user, +}); + +// Delete many +const deletedUsers = await userRepository.deleteMany({ + items: [user1, user2], +}); +``` + +## Error Handling + +The repository automatically throws `NotFoundException` when: + +- `findOne` returns no results +- `replaceOne` matches no documents +- `deleteOne` deletes no documents + +The exception includes entity-specific error codes: + +```typescript +try { + const user = await userRepository.findOne({ + filter: { _id: "nonexistent" }, + }); +} catch (error) { + // NotFoundException with: + // - message: "User not found" + // - details.errorCode: "USER_NOT_FOUND" +} +``` + +## Add Custom Methods + +Just add new methods to the repository class as needed for your use cases. +For example if you need specific access patterns and want the consumer to be able to use them explicitly without having to provide filter logic. + +Also aggregation pipelines can be added to the repository as custom methods. + +::: tip +User the [CRUD+](/guide/mongodb/crud) methods provided by Nimbus to still have observability and error handling features baked in. +::: + +```typescript +import { aggregate, MongoDBRepository } from "@nimbus/mongodb"; + +class UserRepository extends MongoDBRepository { + // ... existing code ... + + // Add custom methods to find a user by email + public async findByEmail(email: string): Promise { + return this.findOne({ filter: { email } }); + } + + // Add custom methods which uses an aggregation pipeline + public async getUserGroups(): Promise { + const collection = await this._getCollection(); + + const result = await aggregate({ + collection, + aggregation: [ + { + $group: { + _id: "$group", + users: { $push: "$$ROOT" }, + }, + }, + ], + mapDocument: (doc: Document) => { + return { + name: doc._id, + users: doc.users.map((user: Document) => + this._mapDocumentToEntity(user) + ), + }; + }, + outputType: UserGroup, + }); + + return result; + } +} + +export const userRepository = new UserRepository(); +``` diff --git a/docs/guide/oak/index.md b/docs/guide/oak/index.md deleted file mode 100644 index 513682f..0000000 --- a/docs/guide/oak/index.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -prev: - text: "Logging" - link: "/guide/core/logging" - -next: - text: "Router" - link: "/guide/oak/router" ---- - -# Nimbus Oak Package - -The Oak package provides a simple and easy-to-use interface for working with the Oak web framework. - -[https://jsr.io/@nimbus/oak](https://jsr.io/@nimbus/oak) - -### Deno - -```bash -deno add jsr:@nimbus/oak -``` - -### NPM - -```bash -npx jsr add @nimbus/oak -``` - -### Bun - -```bash -bunx jsr add @nimbus/oak -``` diff --git a/docs/guide/oak/middleware.md b/docs/guide/oak/middleware.md deleted file mode 100644 index 29caa56..0000000 --- a/docs/guide/oak/middleware.md +++ /dev/null @@ -1 +0,0 @@ -# Nimbus Oak - Middleware diff --git a/docs/guide/oak/router.md b/docs/guide/oak/router.md deleted file mode 100644 index ccd32ef..0000000 --- a/docs/guide/oak/router.md +++ /dev/null @@ -1 +0,0 @@ -# Nimbus Oak - Router diff --git a/docs/guide/observability.md b/docs/guide/observability.md new file mode 100644 index 0000000..3c2db89 --- /dev/null +++ b/docs/guide/observability.md @@ -0,0 +1,226 @@ +--- +prev: + text: "Quickstart" + link: "/guide/quickstart" + +next: + text: "Commands" + link: "/guide/core/commands" +--- + +# Observability + +Observability is a first-class citizen in Nimbus. The framework is designed so that developers can focus on business logic without implementing logging, tracing, and metrics from the ground up. + +## Philosophy + +Nimbus follows the principle that observability should be built-in, not bolted-on. Every core component - from message routing to event handling - comes with automatic instrumentation. This means: + +- **Zero boilerplate** - Tracing spans and metrics are created automatically +- **Consistent structure** - All logs follow the same format across your application +- **Correlation built-in** - Every message carries a correlation ID for distributed tracing + +The three pillars of observability in Nimbus: + +1. **Logging** - Structured console output with configurable formatters +2. **Tracing** - Distributed traces via OpenTelemetry spans +3. **Metrics** - Counters and histograms for monitoring + +## OpenTelemetry Standards + +Nimbus uses the [OpenTelemetry API](https://opentelemetry.io/) (`@opentelemetry/api`) for all observability instrumentation. This provides: + +- **Vendor-agnostic** - Export to any OTLP-compatible backend (Jaeger, Zipkin, Grafana, Honeycomb, Datadog, etc.) +- **Industry standard** - Wide ecosystem support and community adoption +- **Future-proof** - Backed by CNCF with active development + +## Deno Native Observability + +Nimbus builds upon [Deno's native OpenTelemetry support](https://docs.deno.com/runtime/fundamentals/open_telemetry/). With Deno 2.x, you can enable OTEL export with zero additional dependencies. + +### Enabling OpenTelemetry + +Set environment variables to enable OTEL export: + +```bash +export OTEL_DENO=true +export OTEL_EXPORTER_OTLP_PROTOCOL="http/protobuf" +export OTEL_EXPORTER_OTLP_ENDPOINT="https://your-otlp-endpoint.com/otlp" +export OTEL_SERVICE_NAME=your-service-name +export OTEL_RESOURCE_ATTRIBUTES=deployment.environment=production + +deno run -A src/main.ts +``` + +All traces, metrics, and logs from Nimbus will automatically be exported to your configured backend. + +## Built-in Instrumentation + +### MessageRouter + +The [MessageRouter](/guide/core/router) automatically creates spans for every routed message: + +**Tracing:** + +- Span name: `router.route` +- Attributes: `messaging.system`, `messaging.router_name`, `messaging.destination`, `correlation_id` + +**Metrics:** + +- `router_messages_routed_total` - Counter for total messages routed (with `status: success|error`) +- `router_routing_duration_seconds` - Histogram of routing duration + +### EventBus + +The [NimbusEventBus](/guide/core/event-bus) instruments both publishing and handling: + +**Tracing:** + +- `eventbus.publish` span for event publishing +- `eventbus.handle` span for event handling with retry tracking + +**Metrics:** + +- `eventbus_events_published_total` - Counter for published events +- `eventbus_events_delivered_total` - Counter for delivered events (with `status: success|error`) +- `eventbus_event_handling_duration_seconds` - Histogram of handler execution time +- `eventbus_retry_attempts_total` - Counter for retry attempts +- `eventbus_event_size_bytes` - Histogram of event sizes + +### Logger + +The [Logger](/guide/core/logging) outputs structured logs to the console. When combined with Deno's OTEL support, logs are automatically exported alongside traces and metrics. + +## Custom Tracing with withSpan() + +For business logic that needs custom tracing, use the `withSpan()` higher-order function: + +```typescript +import { withSpan } from "@nimbus/core"; + +const fetchUser = withSpan( + { + name: "fetchUser", + attributes: { + "user.source": "database", + }, + }, + async (userId: string) => { + return await db.users.findById(userId); + } +); + +// Usage - automatically traced +const user = await fetchUser("123"); +``` + +### Adding Dynamic Attributes + +Access the span within your function to add attributes based on runtime data: + +```typescript +import { withSpan } from "@nimbus/core"; +import { Span } from "@opentelemetry/api"; + +const processOrder = withSpan( + { name: "processOrder" }, + async (orderId: string, span: Span) => { + const order = await db.orders.findById(orderId); + + // Add attributes based on the order + span.setAttribute("order.total", order.total); + span.setAttribute("order.items", order.items.length); + + return await processPayment(order); + } +); +``` + +### Options + +| Option | Type | Description | +| ------------ | ------------ | ------------------------------------------ | +| `name` | `string` | The span name displayed in your tracing UI | +| `tracerName` | `string` | Tracer name (defaults to `"nimbus"`) | +| `kind` | `SpanKind` | Span kind (defaults to `INTERNAL`) | +| `attributes` | `Attributes` | Initial attributes to set on the span | + +## Correlation IDs + +All messages in Nimbus (Commands, Queries, Events) carry a `correlationid` field. This enables: + +- **Request tracing** - Follow a request through commands, events, and queries +- **Log correlation** - Group related logs together +- **Distributed tracing** - Track requests across services + +The correlation ID is automatically: + +- Generated when creating messages with `createCommand()`, `createQuery()`, or `createEvent()` +- Propagated from commands to events they produce +- Included in log output when provided +- Added as a span attribute for tracing + +```typescript +// Correlation ID is passed from command to event +const command = createCommand({ + type: ADD_USER_COMMAND_TYPE, + source: "nimbus.overlap.at", + correlationid: getCorrelationId(c), // From HTTP request + data: body, +}); + +// In the handler, create event with same correlation ID +const event = createEvent({ + type: USER_ADDED_EVENT_TYPE, + source: "nimbus.overlap.at", + correlationid: command.correlationid, // Propagate + data: state, +}); +``` + +## Best Practices + +### Use Structured Logging + +Always use the structured logger instead of `console.log`: + +```typescript +import { getLogger } from "@nimbus/core"; + +// Good - structured and traceable +getLogger().info({ + message: "User created", + category: "Users", + data: { userId: user.id }, + correlationId: command.correlationid, +}); + +// Avoid - unstructured +console.log("User created:", user.id); +``` + +### Propagate Correlation IDs + +Always pass correlation IDs when creating events from commands: + +```typescript +const event = createEvent({ + type: USER_ADDED_EVENT_TYPE, + source: "nimbus.overlap.at", + correlationid: command.correlationid, // Always propagate + data: state, +}); +``` + +### Use withSpan for Important Operations + +Wrap critical business logic with `withSpan()` for visibility: + +```typescript +const validatePayment = withSpan( + { name: "validatePayment" }, + async (paymentDetails: PaymentDetails) => { + // Critical logic is now traced + } +); +``` diff --git a/docs/guide/project-structure.md b/docs/guide/project-structure.md deleted file mode 100644 index b02abe3..0000000 --- a/docs/guide/project-structure.md +++ /dev/null @@ -1,40 +0,0 @@ -# Project Structure - -Nimbus is not opinionated about the project structure and you can adjust it to your needs. But here is a suggestion on how to structure your project. - -Let's say we are building an application to track expenses so we can come up with the following structure based on a Domain-Driven-Design (DDD) approach. - -::: info Example Application -You can find the full example on GitHub [The Expense Repo](https://github.com/overlap-dev/Nimbus/tree/main/examples/the-expense) - -Check it out and run it with `deno task dev` -::: - -``` -/- - |- src - |- account - |- core - |- shell - |- auth - |- core - |- shell - |- another-domain - |- context-one - |- core - |- shell - |- context-two - |- core - |- shell - |- shared - |- ... - |- main.ts - |- .gitignore - |- deno.json - |- deno.lock - |- README.md -``` - -At first we want to separate the different domains and contexts of our problem. So we create a directory for each domain like `account` and `auth`. We also have a `shared` directory for things that are used across multiple domains. As seen in the example above, we can separate multiple contexts within a domain into their own directories like `context-one` and `context-two` under `another-domain`. - -And secondly we want to separate the core logic from the shell implementation. So we create a `core` and a `shell` directory in each context. diff --git a/docs/guide/quickstart.md b/docs/guide/quickstart.md index 2f8e22c..22f1e7c 100644 --- a/docs/guide/quickstart.md +++ b/docs/guide/quickstart.md @@ -4,9 +4,11 @@ To get started with Nimbus you need to install the [@nimbus/core](https://jsr.io ## Dependencies -Nimbus tries to keep dependencies as low as possible, but there are some packages that are necessary to run Nimbus. +Nimbus tries to keep dependencies as low as possible. +Theses are the dependencies Nimbus relies on. -For type safety at runtime Nimbus relies on [Zod](https://zod.dev/). +- [Deno Standard Library](https://docs.deno.com/runtime/fundamentals/standard_library/) ([@std](https://jsr.io/@std)) +- [Zod](https://zod.dev/) ## Installation @@ -15,19 +17,17 @@ Depending on your runtime you can install Nimbus with the following commands. ### Deno ```bash -deno add jsr:@nimbus/core npm:zod +deno add jsr:@nimbus/core ``` ### NPM ```bash -npm install zod npx jsr add @nimbus/core ``` ### Bun ```bash -bun add zod bunx jsr add @nimbus/core ``` diff --git a/docs/guide/utils/get-env.md b/docs/guide/utils/get-env.md index dcdfe7e..830ad2f 100644 --- a/docs/guide/utils/get-env.md +++ b/docs/guide/utils/get-env.md @@ -1 +1,133 @@ -# getEnv() +--- +prev: + text: "Nimbus Utils" + link: "/guide/utils" + +next: false +--- + +# getEnv + +The `getEnv` function retrieves environment variables with validation. It throws an exception if any requested variables are missing, ensuring your application fails fast with clear error messages. + +## Basic Usage + +```typescript +import { getEnv } from "@nimbus/utils"; + +const env = getEnv({ + variables: ["DATABASE_URL", "API_KEY", "PORT"], +}); + +console.log(env.PORT); +``` + +## Function Signature + +```typescript +getEnv({ variables: string[] }): Record +``` + +| Parameter | Type | Description | +| ----------- | ---------- | ----------------------------------- | +| `variables` | `string[]` | Array of environment variable names | + +| Returns | Description | +| ------------------------ | --------------------------------------------- | +| `Record` | Object with variable names as keys and values | + +## Error Handling + +If any requested variables are undefined, `getEnv` throws a `GenericException` with details about all missing variables: + +```typescript +import { getEnv } from "@nimbus/utils"; + +try { + const env = getEnv({ + variables: ["MISSING_VAR_1", "MISSING_VAR_2"], + }); +} catch (error) { + // GenericException with: + // - message: "Undefined environment variables" + // - details: { undefinedVariables: ["MISSING_VAR_1", "MISSING_VAR_2"] } +} +``` + +The error is also logged before throwing: + +``` +[Nimbus] ERROR :: Undefined environment variables +{ undefinedVariables: ["MISSING_VAR_1", "MISSING_VAR_2"] } +``` + +## Use Cases + +### Application Configuration + +```typescript +import { getEnv } from "@nimbus/utils"; + +const env = getEnv({ + variables: ["NODE_ENV", "PORT", "DATABASE_URL", "REDIS_URL", "JWT_SECRET"], +}); + +export const config = { + nodeEnv: env.NODE_ENV, + port: parseInt(env.PORT, 10), + databaseUrl: env.DATABASE_URL, + redisUrl: env.REDIS_URL, + jwtSecret: env.JWT_SECRET, +}; +``` + +### MongoDB Connection + +```typescript +import { MongoConnectionManager } from "@nimbus/mongodb"; +import { getEnv } from "@nimbus/utils"; + +const env = getEnv({ + variables: ["MONGO_URL", "MONGO_DB"], +}); + +const mongoManager = MongoConnectionManager.getInstance(env.MONGO_URL, { + mongoClientOptions: { appName: "my-app" }, +}); + +export const getCollection = (name: string) => + mongoManager.getCollection(env.MONGO_DB, name); +``` + +### Repository Configuration + +```typescript +import { MongoDBRepository } from "@nimbus/mongodb"; +import { getEnv } from "@nimbus/utils"; +import { mongoManager } from "./mongodb.ts"; + +class UserRepository extends MongoDBRepository { + constructor() { + const env = getEnv({ variables: ["MONGO_DB"] }); + + super( + () => mongoManager.getCollection(env.MONGO_DB, "users"), + UserSchema, + "User" + ); + } +} +``` + +### External Service Configuration + +```typescript +import { getEnv } from "@nimbus/utils"; + +const env = getEnv({ + variables: ["STRIPE_API_KEY", "STRIPE_WEBHOOK_SECRET"], +}); + +export const stripeClient = new Stripe(env.STRIPE_API_KEY); +export const webhookSecret = env.STRIPE_WEBHOOK_SECRET; +``` diff --git a/docs/guide/utils/index.md b/docs/guide/utils/index.md index 6ebe4cb..eae9871 100644 --- a/docs/guide/utils/index.md +++ b/docs/guide/utils/index.md @@ -1,7 +1,7 @@ --- prev: - text: "CRUD+" - link: "/guide/mongodb/crud" + text: "handleMongoError" + link: "/guide/mongodb/handle-mongo-error" next: text: "getEnv" diff --git a/docs/guide/what-is-nimbus.md b/docs/guide/what-is-nimbus.md index 515a787..26080d9 100644 --- a/docs/guide/what-is-nimbus.md +++ b/docs/guide/what-is-nimbus.md @@ -1,64 +1,131 @@ # What is Nimbus? -:::tip The Goal: Keep it simple all the way! +Nimbus is a lightweight TypeScript framework for building event-driven applications. It provides type-safe messaging patterns (Commands, Queries, Events) following the [CloudEvents](https://cloudevents.io/) specification, with built-in observability powered by [OpenTelemetry](https://opentelemetry.io/). -No complex object-oriented patterns, no clunky abstractions, no magic. -Just easily understandable and type-safe code. -::: +## Philosophy -Nimbus aims to be a simple framework for building event-driven applications in Typescript. +Nimbus is built on a few core principles that set it apart from other TypeScript frameworks. -## Pure Core - Imperative Shell +**Simplicity** + +Nimbus aims to keep things simple and to avoid overly complex OOP or FP principles. No complex inheritance hierarchies, no dependency injection, no decorators. Just explicit code that is easy to understand and reason about. + +**No Framework Magic** + +Three lines of code to build a whole API is great, until something goes wrong and you have no clue why the magic stopped working. + +**Flat and easy learning curve** + +There are already great Frameworks like [NestJS](https://nestjs.com/) and [Effect](https://effect.website/) out there for building TypeScript applications. + +While those frameworks heavily emphasize either object-oriented or functional programming patterns this comes with the cost of a steep learning curve. Nimbus aims to have a learning curve that is as flat as possible. + +Be productive right from the start. + +## Who Is This For? + +Nimbus is a good fit if you are: + +- Building event-driven applications +- Looking for explicit, traceable code without hidden magic +- Wanting built-in observability without complex setup +- Preferring a lightweight framework over heavyweight solutions + +## Key Features + +- **CloudEvents-based messaging** - Commands, Queries, and Events following the industry-standard [CloudEvents](https://cloudevents.io/) specification +- **Built-in observability** - Logging, tracing, and metrics via [OpenTelemetry](https://opentelemetry.io/) with zero boilerplate +- **Type-safe validation** - Message validation with [Zod](https://zod.dev/) schemas +- **MongoDB integration** - Repository pattern and CRUD operations with automatic tracing +- **Hono middleware** - Ready-to-use middleware for HTTP APIs +- **Runtime flexibility** - Deno-first with NPM and Bun support + +## A Taste of Nimbus + +Here's a quick look at how you define and handle a command in Nimbus: + +```typescript +import { commandSchema, createCommand, getRouter } from "@nimbus/core"; +import { z } from "zod"; + +// Define a type-safe command schema +const addUserCommandSchema = commandSchema.extend({ + type: z.literal("com.example.add-user"), + data: z.object({ + email: z.string().email(), + name: z.string(), + }), +}); + +type AddUserCommand = z.infer; + +// Register a handler with automatic validation and tracing +const router = getRouter("MyRouter"); + +router.register( + "com.example.add-user", + async (command: AddUserCommand) => { + // Your business logic here + return { userId: "123", email: command.data.email }; + }, + addUserCommandSchema +); +``` + +## Architecture Recommendation + +It would be valuable to build your application around the idea of a **Pure Core** and an **Imperative Shell**. It aligns well with Hexagonal Architecture (Ports & Adapters) and is a good foundation for patterns like CQRS and Event Sourcing. ![Illustration of the pure core imperative shell architecture](/nimbus-pure-core-imperative-shell.svg) ### The Pure Core -As our business logic - the things that make our application unique - is the most valuable part of our code, we should be able to focus on it without worrying about outside dependencies or side effects interfering with it. +The business logic represents the most valuable part of any application. It should be focused, testable, and free from external dependencies. -So the main goal is to keep all the business logic inside the pure core of our application. +The pure core contains domain logic that: -The core only accepts type safe inputs and returns type safe outputs. It is side-effect free and can be tested easily by running functions with different inputs and comparing the outputs. +- Accepts type-safe inputs and returns type-safe outputs +- Has no side effects (no I/O operations) +- Can be tested by running functions with different inputs and comparing outputs - no mocking needed! +- Represents the unique value proposition of the application ### The Imperative Shell -As we for sure need to interact with the outside world, we need to have a place that is responsible for all the I/O operations like HTTP calls, database interactions, or filesystem operations. - -This place is in the shell of our application. It is responsible for the side effects and connects all external interactions with the pure core. +The shell handles all interactions with the outside world - HTTP requests, database operations, file system access, and other I/O operations. It orchestrates the pure core by providing it with data and persisting the results. -Nimbus goal is to reduce the shell overhead in the first place and do the heavy lifting for you if necessary. +The shell's responsibilities include: -### Shell to Core to Shell +- Receiving external input (HTTP requests, messages, etc.) +- Fetching data from external sources +- Calling pure core functions +- Persisting results +- Sending responses -The flow of information always goes from the shell to the core and back to the shell. -This means the shell can call the core at any time but the core will never call the shell. +### Flow of Information -![Illustration of the flow of information](/nimbus-flow-of-information.svg) - -When we look at an example of an HTTP API, the shell handles the incoming HTTP request, reads from the Database, calls the core, writes changes to the Database, and sends the response back to the client. +Information flows in one direction: **Shell → Core → Shell** -Sometimes it is necessary to run business logic with some information from the database before executing another database query based on the logic's result. In this case the core functions can be split into multiple parts so the shell can call them in the right order. +The shell can call the core at any time, but the core never calls the shell. This unidirectional flow ensures that business logic remains pure and testable. -### Thoughts? +![Illustration of the flow of information](/nimbus-flow-of-information.svg) -:::info Isn't it called Functional Core, Imperative Shell? -As stated above Nimbus goal is to keep it simple and therefore avoid overly complex OOP (Object-Oriented-Programming) principles. The same goes for overly complex FP (Functional Programming) principles. +In an HTTP API scenario: -That is why Nimbus prefers the term **Pure Core** as it can, but not have to follow FP patterns. -::: +1. Shell receives HTTP request +2. Shell fetches necessary data from database +3. Shell calls core business logic +4. Shell persists results to database +5. Shell sends HTTP response -:::info Isn't it called a Hexagonal Architecture? -Nimbus can fit nicely into an App with Hexagonal Architecture. -But if you do not want to follow this pattern, you are still able to use Nimbus without defining ports and adapters for everything. -::: +For complex scenarios requiring multiple database queries with business logic in between, core functions can be composed and called sequentially by the shell. -## Event Driven +## Testing Recommendation -If the real world is asynchronous, why should your application be synchronous? +- Unit tests for the pure core. +- E2E tests to ensure the whole system works. -## Deno & JSR +As the name "pure" core already implies, no side effects are allowed. This makes it easy to test the core by running functions with different inputs and comparing outputs - no mocking needed! -**"Keep it simple all the way."** -That is why Nimbus is built with [Deno](https://deno.com) and published on [jsr.io](https://jsr.io/packages?search=@nimbus) +Also the core is the most important part of your application as it hold your whole business logic. So fast and easy to write unit tests give you the most bang for your buck. -Nimbus is a Typescript framework and runs with every Node.js compatible runtime. But it is recommended to try it with Deno. +End to end tests will ensure all parts of your application work together as expected. diff --git a/docs/index.md b/docs/index.md index 83a2443..ae728d1 100644 --- a/docs/index.md +++ b/docs/index.md @@ -25,9 +25,9 @@ features: - title: Core Concepts details: Compose an application with commands, events, and queries. - title: Simple Design - details: No need for clunky and complex object-oriented design principles. - - title: Pure Core - details: The only rule to follow is to keep your core logic pure and side-effect free. - - title: Type Safety - details: Ensure type safety for all I/O sources. + details: No clunky and complex OOP or FP design principles. + - title: No Framework Magic + details: Just explicit code without any magic. + - title: Observability Built-In + details: Logging, tracing, and metrics. Batteries included. --- diff --git a/docs/package-lock.json b/docs/package-lock.json index f6dfb17..0a67793 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -6,7 +6,22 @@ "": { "name": "@nimbus/docs", "dependencies": { - "vitepress": "^1.5.0" + "vitepress": "1.6.4" + } + }, + "node_modules/@algolia/abtesting": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@algolia/abtesting/-/abtesting-1.5.0.tgz", + "integrity": "sha512-W/ohRkbKQsqDWALJg28X15KF7Tcyg53L1MfdOkLgvkcCcofdzGHSimHHeNG05ojjFw9HK8+VPhe/Vwq4MozIJg==", + "license": "MIT", + "dependencies": { + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" + }, + "engines": { + "node": ">= 14.0.0" } }, "node_modules/@algolia/autocomplete-core": { @@ -55,210 +70,210 @@ } }, "node_modules/@algolia/client-abtesting": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-abtesting/-/client-abtesting-5.15.0.tgz", - "integrity": "sha512-FaEM40iuiv1mAipYyiptP4EyxkJ8qHfowCpEeusdHUC4C7spATJYArD2rX3AxkVeREkDIgYEOuXcwKUbDCr7Nw==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-abtesting/-/client-abtesting-5.39.0.tgz", + "integrity": "sha512-Vf0ZVe+qo3sHDrCinouJqlg8VoxM4Qo/KxNIqMYybkuctutfnp3kIY9OmESplOQ/9NGBthU9EG+4d5fBibWK/A==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-analytics": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-5.15.0.tgz", - "integrity": "sha512-lho0gTFsQDIdCwyUKTtMuf9nCLwq9jOGlLGIeQGKDxXF7HbiAysFIu5QW/iQr1LzMgDyM9NH7K98KY+BiIFriQ==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-5.39.0.tgz", + "integrity": "sha512-V16ITZxYIwcv1arNce65JZmn94Ft6vKlBZ//gXw8AvIH32glJz1KcbaVAUr9p7PYlGZ/XVHP6LxDgrpNdtwgcA==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-common": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.15.0.tgz", - "integrity": "sha512-IofrVh213VLsDkPoSKMeM9Dshrv28jhDlBDLRcVJQvlL8pzue7PEB1EZ4UoJFYS3NSn7JOcJ/V+olRQzXlJj1w==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.39.0.tgz", + "integrity": "sha512-UCJTuwySEQeiKPWV3wruhuI/wHbDYenHzgL9pYsvh6r/u5Z+g61ip1iwdAlFp02CnywzI9O7+AQPh2ManYyHmQ==", "license": "MIT", "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-insights": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-insights/-/client-insights-5.15.0.tgz", - "integrity": "sha512-bDDEQGfFidDi0UQUCbxXOCdphbVAgbVmxvaV75cypBTQkJ+ABx/Npw7LkFGw1FsoVrttlrrQbwjvUB6mLVKs/w==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-insights/-/client-insights-5.39.0.tgz", + "integrity": "sha512-s0ia8M/ZZR+iO2uLNTBrlQdEb6ZMAMcKMHckp5mcoglxrf8gHifL4LmdhGKdAxAn3UIagtqIP0RCnIymHUbm7A==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-personalization": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-5.15.0.tgz", - "integrity": "sha512-LfaZqLUWxdYFq44QrasCDED5bSYOswpQjSiIL7Q5fYlefAAUO95PzBPKCfUhSwhb4rKxigHfDkd81AvEicIEoA==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-5.39.0.tgz", + "integrity": "sha512-vZPIt7Lw+toNsHZUiPhNIc1Z3vUjDp7nzn6AMOaPC73gEuTq2iLPNvM06CSB6aHePo5eMeJIP5YEKBUQUA/PJA==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-query-suggestions": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-query-suggestions/-/client-query-suggestions-5.15.0.tgz", - "integrity": "sha512-wu8GVluiZ5+il8WIRsGKu8VxMK9dAlr225h878GGtpTL6VBvwyJvAyLdZsfFIpY0iN++jiNb31q2C1PlPL+n/A==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-query-suggestions/-/client-query-suggestions-5.39.0.tgz", + "integrity": "sha512-jcPQr3iKTWNVli2NYHPv02aNLwixDjPCpOgMp9CZTvEiPI6Ec4jHX+oFr3LDZagOFY9e1xJhc/JrgMGGW1sHnw==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-search": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.15.0.tgz", - "integrity": "sha512-Z32gEMrRRpEta5UqVQA612sLdoqY3AovvUPClDfMxYrbdDAebmGDVPtSogUba1FZ4pP5dx20D3OV3reogLKsRA==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.39.0.tgz", + "integrity": "sha512-/IYpF10BpthGZEJQZMhMqV4AqWr5avcWfZm/SIKK1RvUDmzGqLoW/+xeJVX9C8ZnNkIC8hivbIQFaNaRw0BFZQ==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/ingestion": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/@algolia/ingestion/-/ingestion-1.15.0.tgz", - "integrity": "sha512-MkqkAxBQxtQ5if/EX2IPqFA7LothghVyvPoRNA/meS2AW2qkHwcxjuiBxv4H6mnAVEPfJlhu9rkdVz9LgCBgJg==", + "version": "1.39.0", + "resolved": "https://registry.npmjs.org/@algolia/ingestion/-/ingestion-1.39.0.tgz", + "integrity": "sha512-IgSHKUiuecqLfBlXiuCSdRTdsO3/yvpmXrMFz8fAJ8M4QmDtHkOuD769dmybRYqsbYMHivw+lir4BgbRGMtOIQ==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/monitoring": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/@algolia/monitoring/-/monitoring-1.15.0.tgz", - "integrity": "sha512-QPrFnnGLMMdRa8t/4bs7XilPYnoUXDY8PMQJ1sf9ZFwhUysYYhQNX34/enoO0LBjpoOY6rLpha39YQEFbzgKyQ==", + "version": "1.39.0", + "resolved": "https://registry.npmjs.org/@algolia/monitoring/-/monitoring-1.39.0.tgz", + "integrity": "sha512-8Xnd4+609SKC/hqVsuFc4evFBmvA2765/4NcH+Dpr756SKPbL1BY0X8kVxlmM3YBLNqnduSQxHxpDJUK58imCA==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/recommend": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-5.15.0.tgz", - "integrity": "sha512-5eupMwSqMLDObgSMF0XG958zR6GJP3f7jHDQ3/WlzCM9/YIJiWIUoJFGsko9GYsA5xbLDHE/PhWtq4chcCdaGQ==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-5.39.0.tgz", + "integrity": "sha512-D7Ye2Ss/5xqUkQUxKm/VqEJLt5kARd9IMmjdzlxaKhGgNlOemTay0lwBmOVFuJRp7UODjp5c9+K+B8g0ORObIw==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/requester-browser-xhr": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.15.0.tgz", - "integrity": "sha512-Po/GNib6QKruC3XE+WKP1HwVSfCDaZcXu48kD+gwmtDlqHWKc7Bq9lrS0sNZ456rfCKhXksOmMfUs4wRM/Y96w==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.39.0.tgz", + "integrity": "sha512-mgPte1ZJqpk9dkVs44J3wKAbHATvHZNlSpzhMdjMLIg/3qTycSZyDiomLiSlxE8CLsxyBAOJWnyKRHfom+Z1rg==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0" + "@algolia/client-common": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/requester-fetch": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.15.0.tgz", - "integrity": "sha512-rOZ+c0P7ajmccAvpeeNrUmEKoliYFL8aOR5qGW5pFq3oj3Iept7Y5mEtEsOBYsRt6qLnaXn4zUKf+N8nvJpcIw==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.39.0.tgz", + "integrity": "sha512-LIrCkrxu1WnO3ev1+w6NnZ12JZL/o+2H9w6oWnZAjQZIlA/Ym6M9QHkt+OQ/SwkuoiNkW3DAo+Pi4A2V9FPtqg==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0" + "@algolia/client-common": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/requester-node-http": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.15.0.tgz", - "integrity": "sha512-b1jTpbFf9LnQHEJP5ddDJKE2sAlhYd7EVSOWgzo/27n/SfCoHfqD0VWntnWYD83PnOKvfe8auZ2+xCb0TXotrQ==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.39.0.tgz", + "integrity": "sha512-6beG+egPwXmvhAg+m0STCj+ZssDcjrLzf4L05aKm2nGglMXSSPz0cH/rM+kVD9krNfldiMctURd4wjojW1fV0w==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0" + "@algolia/client-common": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", - "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.26.3", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.3.tgz", - "integrity": "sha512-WJ/CvmY8Mea8iDXo6a7RK2wbmJITT5fN3BEkRuFlxVyNx8jOKIIhmC4fSkTcPcf8JyavbBwIe6OpiCOBXt/IcA==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", "license": "MIT", "dependencies": { - "@babel/types": "^7.26.3" + "@babel/types": "^7.28.4" }, "bin": { "parser": "bin/babel-parser.js" @@ -268,44 +283,44 @@ } }, "node_modules/@babel/types": { - "version": "7.26.3", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.3.tgz", - "integrity": "sha512-vN5p+1kl59GVKMvTHt55NzzmYVxprfJD+ql7U9NFIfKCBkYE55LYtS+WtPlaYOyzydrKI8Nezd+aZextrd+FMA==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@docsearch/css": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.8.0.tgz", - "integrity": "sha512-pieeipSOW4sQ0+bE5UFC51AOZp9NGxg89wAlZ1BAQFaiRAGK1IKUaPQ0UGZeNctJXyqZ1UvBtOQh2HH+U5GtmA==", + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.8.2.tgz", + "integrity": "sha512-y05ayQFyUmCXze79+56v/4HpycYF3uFqB78pLPrSV5ZKAlDuIAAJNhaRi8tTdRNXh05yxX/TyNnzD6LwSM89vQ==", "license": "MIT" }, "node_modules/@docsearch/js": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/@docsearch/js/-/js-3.8.0.tgz", - "integrity": "sha512-PVuV629f5UcYRtBWqK7ID6vNL5647+2ADJypwTjfeBIrJfwPuHtzLy39hMGMfFK+0xgRyhTR0FZ83EkdEraBlg==", + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/@docsearch/js/-/js-3.8.2.tgz", + "integrity": "sha512-Q5wY66qHn0SwA7Taa0aDbHiJvaFJLOJyHmooQ7y8hlwwQLQ/5WwCcoX0g7ii04Qi2DJlHsd0XXzJ8Ypw9+9YmQ==", "license": "MIT", "dependencies": { - "@docsearch/react": "3.8.0", + "@docsearch/react": "3.8.2", "preact": "^10.0.0" } }, "node_modules/@docsearch/react": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.8.0.tgz", - "integrity": "sha512-WnFK720+iwTVt94CxY3u+FgX6exb3BfN5kE9xUY6uuAH/9W/UFboBZFLlrw/zxFRHoHZCOXRtOylsXF+6LHI+Q==", + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.8.2.tgz", + "integrity": "sha512-xCRrJQlTt8N9GU0DG4ptwHRkfnSnD/YpdeaXe02iKfqs97TkZJv60yE+1eq/tjPcVnTW8dP5qLP7itifFVV5eg==", "license": "MIT", "dependencies": { "@algolia/autocomplete-core": "1.17.7", "@algolia/autocomplete-preset-algolia": "1.17.7", - "@docsearch/css": "3.8.0", - "algoliasearch": "^5.12.0" + "@docsearch/css": "3.8.2", + "algoliasearch": "^5.14.2" }, "peerDependencies": { "@types/react": ">= 16.8.0 < 19.0.0", @@ -697,9 +712,9 @@ } }, "node_modules/@iconify-json/simple-icons": { - "version": "1.2.14", - "resolved": "https://registry.npmjs.org/@iconify-json/simple-icons/-/simple-icons-1.2.14.tgz", - "integrity": "sha512-zLqb48pM1B5vegMBDouyv7FzrROV5HRIjDpl+/PKjY3P7AeSySaOeT6mzutF6hDZCJvn1J7qQ7lug3FOgegiiA==", + "version": "1.2.54", + "resolved": "https://registry.npmjs.org/@iconify-json/simple-icons/-/simple-icons-1.2.54.tgz", + "integrity": "sha512-OQQYl8yC5j3QklZOYnK31QYe5h47IhyCoxSLd53f0e0nA4dgi8VOZS30SgSAbsecQ+S0xlGJMjXIHTIqZ+ML3w==", "license": "CC0-1.0", "dependencies": { "@iconify/types": "*" @@ -712,15 +727,15 @@ "license": "MIT" }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", - "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", "license": "MIT" }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.28.0.tgz", - "integrity": "sha512-wLJuPLT6grGZsy34g4N1yRfYeouklTgPhH1gWXCYspenKYD0s3cR99ZevOGw5BexMNywkbV3UkjADisozBmpPQ==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.52.4.tgz", + "integrity": "sha512-BTm2qKNnWIQ5auf4deoetINJm2JzvihvGb9R6K/ETwKLql/Bb3Eg2H1FBp1gUb4YGbydMA3jcmQTR73q7J+GAA==", "cpu": [ "arm" ], @@ -731,9 +746,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.28.0.tgz", - "integrity": "sha512-eiNkznlo0dLmVG/6wf+Ifi/v78G4d4QxRhuUl+s8EWZpDewgk7PX3ZyECUXU0Zq/Ca+8nU8cQpNC4Xgn2gFNDA==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.52.4.tgz", + "integrity": "sha512-P9LDQiC5vpgGFgz7GSM6dKPCiqR3XYN1WwJKA4/BUVDjHpYsf3iBEmVz62uyq20NGYbiGPR5cNHI7T1HqxNs2w==", "cpu": [ "arm64" ], @@ -744,9 +759,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.28.0.tgz", - "integrity": "sha512-lmKx9yHsppblnLQZOGxdO66gT77bvdBtr/0P+TPOseowE7D9AJoBw8ZDULRasXRWf1Z86/gcOdpBrV6VDUY36Q==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.52.4.tgz", + "integrity": "sha512-QRWSW+bVccAvZF6cbNZBJwAehmvG9NwfWHwMy4GbWi/BQIA/laTIktebT2ipVjNncqE6GLPxOok5hsECgAxGZg==", "cpu": [ "arm64" ], @@ -757,9 +772,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.28.0.tgz", - "integrity": "sha512-8hxgfReVs7k9Js1uAIhS6zq3I+wKQETInnWQtgzt8JfGx51R1N6DRVy3F4o0lQwumbErRz52YqwjfvuwRxGv1w==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.52.4.tgz", + "integrity": "sha512-hZgP05pResAkRJxL1b+7yxCnXPGsXU0fG9Yfd6dUaoGk+FhdPKCJ5L1Sumyxn8kvw8Qi5PvQ8ulenUbRjzeCTw==", "cpu": [ "x64" ], @@ -770,9 +785,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.28.0.tgz", - "integrity": "sha512-lA1zZB3bFx5oxu9fYud4+g1mt+lYXCoch0M0V/xhqLoGatbzVse0wlSQ1UYOWKpuSu3gyN4qEc0Dxf/DII1bhQ==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.52.4.tgz", + "integrity": "sha512-xmc30VshuBNUd58Xk4TKAEcRZHaXlV+tCxIXELiE9sQuK3kG8ZFgSPi57UBJt8/ogfhAF5Oz4ZSUBN77weM+mQ==", "cpu": [ "arm64" ], @@ -783,9 +798,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.28.0.tgz", - "integrity": "sha512-aI2plavbUDjCQB/sRbeUZWX9qp12GfYkYSJOrdYTL/C5D53bsE2/nBPuoiJKoWp5SN78v2Vr8ZPnB+/VbQ2pFA==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.52.4.tgz", + "integrity": "sha512-WdSLpZFjOEqNZGmHflxyifolwAiZmDQzuOzIq9L27ButpCVpD7KzTRtEG1I0wMPFyiyUdOO+4t8GvrnBLQSwpw==", "cpu": [ "x64" ], @@ -796,9 +811,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.28.0.tgz", - "integrity": "sha512-WXveUPKtfqtaNvpf0iOb0M6xC64GzUX/OowbqfiCSXTdi/jLlOmH0Ba94/OkiY2yTGTwteo4/dsHRfh5bDCZ+w==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.52.4.tgz", + "integrity": "sha512-xRiOu9Of1FZ4SxVbB0iEDXc4ddIcjCv2aj03dmW8UrZIW7aIQ9jVJdLBIhxBI+MaTnGAKyvMwPwQnoOEvP7FgQ==", "cpu": [ "arm" ], @@ -809,9 +824,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.28.0.tgz", - "integrity": "sha512-yLc3O2NtOQR67lI79zsSc7lk31xjwcaocvdD1twL64PK1yNaIqCeWI9L5B4MFPAVGEVjH5k1oWSGuYX1Wutxpg==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.52.4.tgz", + "integrity": "sha512-FbhM2p9TJAmEIEhIgzR4soUcsW49e9veAQCziwbR+XWB2zqJ12b4i/+hel9yLiD8pLncDH4fKIPIbt5238341Q==", "cpu": [ "arm" ], @@ -822,9 +837,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.28.0.tgz", - "integrity": "sha512-+P9G9hjEpHucHRXqesY+3X9hD2wh0iNnJXX/QhS/J5vTdG6VhNYMxJ2rJkQOxRUd17u5mbMLHM7yWGZdAASfcg==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.52.4.tgz", + "integrity": "sha512-4n4gVwhPHR9q/g8lKCyz0yuaD0MvDf7dV4f9tHt0C73Mp8h38UCtSCSE6R9iBlTbXlmA8CjpsZoujhszefqueg==", "cpu": [ "arm64" ], @@ -835,9 +850,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.28.0.tgz", - "integrity": "sha512-1xsm2rCKSTpKzi5/ypT5wfc+4bOGa/9yI/eaOLW0oMs7qpC542APWhl4A37AENGZ6St6GBMWhCCMM6tXgTIplw==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.52.4.tgz", + "integrity": "sha512-u0n17nGA0nvi/11gcZKsjkLj1QIpAuPFQbR48Subo7SmZJnGxDpspyw2kbpuoQnyK+9pwf3pAoEXerJs/8Mi9g==", "cpu": [ "arm64" ], @@ -847,10 +862,23 @@ "linux" ] }, - "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.28.0.tgz", - "integrity": "sha512-zgWxMq8neVQeXL+ouSf6S7DoNeo6EPgi1eeqHXVKQxqPy1B2NvTbaOUWPn/7CfMKL7xvhV0/+fq/Z/J69g1WAQ==", + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.52.4.tgz", + "integrity": "sha512-0G2c2lpYtbTuXo8KEJkDkClE/+/2AFPdPAbmaHoE870foRFs4pBrDehilMcrSScrN/fB/1HTaWO4bqw+ewBzMQ==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.52.4.tgz", + "integrity": "sha512-teSACug1GyZHmPDv14VNbvZFX779UqWTsd7KtTM9JIZRDI5NUwYSIS30kzI8m06gOPB//jtpqlhmraQ68b5X2g==", "cpu": [ "ppc64" ], @@ -861,9 +889,22 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.28.0.tgz", - "integrity": "sha512-VEdVYacLniRxbRJLNtzwGt5vwS0ycYshofI7cWAfj7Vg5asqj+pt+Q6x4n+AONSZW/kVm+5nklde0qs2EUwU2g==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.52.4.tgz", + "integrity": "sha512-/MOEW3aHjjs1p4Pw1Xk4+3egRevx8Ji9N6HUIA1Ifh8Q+cg9dremvFCUbOX2Zebz80BwJIgCBUemjqhU5XI5Eg==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.52.4.tgz", + "integrity": "sha512-1HHmsRyh845QDpEWzOFtMCph5Ts+9+yllCrREuBR/vg2RogAQGGBRC8lDPrPOMnrdOJ+mt1WLMOC2Kao/UwcvA==", "cpu": [ "riscv64" ], @@ -874,9 +915,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.28.0.tgz", - "integrity": "sha512-LQlP5t2hcDJh8HV8RELD9/xlYtEzJkm/aWGsauvdO2ulfl3QYRjqrKW+mGAIWP5kdNCBheqqqYIGElSRCaXfpw==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.52.4.tgz", + "integrity": "sha512-seoeZp4L/6D1MUyjWkOMRU6/iLmCU2EjbMTyAG4oIOs1/I82Y5lTeaxW0KBfkUdHAWN7j25bpkt0rjnOgAcQcA==", "cpu": [ "s390x" ], @@ -887,9 +928,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.28.0.tgz", - "integrity": "sha512-Nl4KIzteVEKE9BdAvYoTkW19pa7LR/RBrT6F1dJCV/3pbjwDcaOq+edkP0LXuJ9kflW/xOK414X78r+K84+msw==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.52.4.tgz", + "integrity": "sha512-Wi6AXf0k0L7E2gteNsNHUs7UMwCIhsCTs6+tqQ5GPwVRWMaflqGec4Sd8n6+FNFDw9vGcReqk2KzBDhCa1DLYg==", "cpu": [ "x64" ], @@ -900,9 +941,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.28.0.tgz", - "integrity": "sha512-eKpJr4vBDOi4goT75MvW+0dXcNUqisK4jvibY9vDdlgLx+yekxSm55StsHbxUsRxSTt3JEQvlr3cGDkzcSP8bw==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.52.4.tgz", + "integrity": "sha512-dtBZYjDmCQ9hW+WgEkaffvRRCKm767wWhxsFW3Lw86VXz/uJRuD438/XvbZT//B96Vs8oTA8Q4A0AfHbrxP9zw==", "cpu": [ "x64" ], @@ -912,10 +953,23 @@ "linux" ] }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.52.4.tgz", + "integrity": "sha512-1ox+GqgRWqaB1RnyZXL8PD6E5f7YyRUJYnCqKpNzxzP0TkaUh112NDrR9Tt+C8rJ4x5G9Mk8PQR3o7Ku2RKqKA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.28.0.tgz", - "integrity": "sha512-Vi+WR62xWGsE/Oj+mD0FNAPY2MEox3cfyG0zLpotZdehPFXwz6lypkGs5y38Jd/NVSbOD02aVad6q6QYF7i8Bg==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.52.4.tgz", + "integrity": "sha512-8GKr640PdFNXwzIE0IrkMWUNUomILLkfeHjXBi/nUvFlpZP+FA8BKGKpacjW6OUUHaNI6sUURxR2U2g78FOHWQ==", "cpu": [ "arm64" ], @@ -926,9 +980,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.28.0.tgz", - "integrity": "sha512-kN/Vpip8emMLn/eOza+4JwqDZBL6MPNpkdaEsgUtW1NYN3DZvZqSQrbKzJcTL6hd8YNmFTn7XGWMwccOcJBL0A==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.52.4.tgz", + "integrity": "sha512-AIy/jdJ7WtJ/F6EcfOb2GjR9UweO0n43jNObQMb6oGxkYTfLcnN7vYYpG+CN3lLxrQkzWnMOoNSHTW54pgbVxw==", "cpu": [ "ia32" ], @@ -938,10 +992,23 @@ "win32" ] }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.52.4.tgz", + "integrity": "sha512-UF9KfsH9yEam0UjTwAgdK0anlQ7c8/pWPU2yVjyWcF1I1thABt6WXE47cI71pGiZ8wGvxohBoLnxM04L/wj8mQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.28.0.tgz", - "integrity": "sha512-Bvno2/aZT6usSa7lRDL2+hMjVAGjuqaymF1ApZm31JXzniR/hvr14jpU+/z4X6Gt5BPlzosscyJZGUvguXIqeQ==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.52.4.tgz", + "integrity": "sha512-bf9PtUa0u8IXDVxzRToFQKsNCRz9qLYfR/MpECxl4mRoWYjAeFjgxj1XdZr2M/GNVpT05p+LgQOHopYDlUu6/w==", "cpu": [ "x64" ], @@ -952,69 +1019,88 @@ ] }, "node_modules/@shikijs/core": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-1.24.0.tgz", - "integrity": "sha512-6pvdH0KoahMzr6689yh0QJ3rCgF4j1XsXRHNEeEN6M4xJTfQ6QPWrmHzIddotg+xPJUPEPzYzYCKzpYyhTI6Gw==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-2.5.0.tgz", + "integrity": "sha512-uu/8RExTKtavlpH7XqnVYBrfBkUc20ngXiX9NSrBhOVZYv/7XQRKUyhtkeflY5QsxC0GbJThCerruZfsUaSldg==", "license": "MIT", "dependencies": { - "@shikijs/engine-javascript": "1.24.0", - "@shikijs/engine-oniguruma": "1.24.0", - "@shikijs/types": "1.24.0", - "@shikijs/vscode-textmate": "^9.3.0", + "@shikijs/engine-javascript": "2.5.0", + "@shikijs/engine-oniguruma": "2.5.0", + "@shikijs/types": "2.5.0", + "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4", - "hast-util-to-html": "^9.0.3" + "hast-util-to-html": "^9.0.4" } }, "node_modules/@shikijs/engine-javascript": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-1.24.0.tgz", - "integrity": "sha512-ZA6sCeSsF3Mnlxxr+4wGEJ9Tto4RHmfIS7ox8KIAbH0MTVUkw3roHPHZN+LlJMOHJJOVupe6tvuAzRpN8qK1vA==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-2.5.0.tgz", + "integrity": "sha512-VjnOpnQf8WuCEZtNUdjjwGUbtAVKuZkVQ/5cHy/tojVVRIRtlWMYVjyWhxOmIq05AlSOv72z7hRNRGVBgQOl0w==", "license": "MIT", "dependencies": { - "@shikijs/types": "1.24.0", - "@shikijs/vscode-textmate": "^9.3.0", - "oniguruma-to-es": "0.7.0" + "@shikijs/types": "2.5.0", + "@shikijs/vscode-textmate": "^10.0.2", + "oniguruma-to-es": "^3.1.0" } }, "node_modules/@shikijs/engine-oniguruma": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-1.24.0.tgz", - "integrity": "sha512-Eua0qNOL73Y82lGA4GF5P+G2+VXX9XnuUxkiUuwcxQPH4wom+tE39kZpBFXfUuwNYxHSkrSxpB1p4kyRW0moSg==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-2.5.0.tgz", + "integrity": "sha512-pGd1wRATzbo/uatrCIILlAdFVKdxImWJGQ5rFiB5VZi2ve5xj3Ax9jny8QvkaV93btQEwR/rSz5ERFpC5mKNIw==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "2.5.0", + "@shikijs/vscode-textmate": "^10.0.2" + } + }, + "node_modules/@shikijs/langs": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-2.5.0.tgz", + "integrity": "sha512-Qfrrt5OsNH5R+5tJ/3uYBBZv3SuGmnRPejV9IlIbFH3HTGLDlkqgHymAlzklVmKBjAaVmkPkyikAV/sQ1wSL+w==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "2.5.0" + } + }, + "node_modules/@shikijs/themes": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-2.5.0.tgz", + "integrity": "sha512-wGrk+R8tJnO0VMzmUExHR+QdSaPUl/NKs+a4cQQRWyoc3YFbUzuLEi/KWK1hj+8BfHRKm2jNhhJck1dfstJpiw==", "license": "MIT", "dependencies": { - "@shikijs/types": "1.24.0", - "@shikijs/vscode-textmate": "^9.3.0" + "@shikijs/types": "2.5.0" } }, "node_modules/@shikijs/transformers": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/@shikijs/transformers/-/transformers-1.24.0.tgz", - "integrity": "sha512-Qf/hby+PRPkoHncjYnJf5svK1aCsOUtQhuLzKPnmeXJtuUZCmbH0pTpdNtXe9tgln/RHlyRJnv7q46HHS1sO0Q==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/transformers/-/transformers-2.5.0.tgz", + "integrity": "sha512-SI494W5X60CaUwgi8u4q4m4s3YAFSxln3tzNjOSYqq54wlVgz0/NbbXEb3mdLbqMBztcmS7bVTaEd2w0qMmfeg==", "license": "MIT", "dependencies": { - "shiki": "1.24.0" + "@shikijs/core": "2.5.0", + "@shikijs/types": "2.5.0" } }, "node_modules/@shikijs/types": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-1.24.0.tgz", - "integrity": "sha512-aptbEuq1Pk88DMlCe+FzXNnBZ17LCiLIGWAeCWhoFDzia5Q5Krx3DgnULLiouSdd6+LUM39XwXGppqYE0Ghtug==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-2.5.0.tgz", + "integrity": "sha512-ygl5yhxki9ZLNuNpPitBWvcy9fsSKKaRuO4BAlMyagszQidxcpLAr0qiW/q43DtSIDxO6hEbtYLiFZNXO/hdGw==", "license": "MIT", "dependencies": { - "@shikijs/vscode-textmate": "^9.3.0", + "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "node_modules/@shikijs/vscode-textmate": { - "version": "9.3.0", - "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-9.3.0.tgz", - "integrity": "sha512-jn7/7ky30idSkd/O5yDBfAnVt+JJpepofP/POZ1iMOxK59cOfqIgg/Dj0eFsjOTMw+4ycJN0uhZH/Eb0bs/EUA==", + "version": "10.0.2", + "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz", + "integrity": "sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==", "license": "MIT" }, "node_modules/@types/estree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", - "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "license": "MIT" }, "node_modules/@types/hast": { @@ -1064,21 +1150,21 @@ "license": "MIT" }, "node_modules/@types/web-bluetooth": { - "version": "0.0.20", - "resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.20.tgz", - "integrity": "sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==", + "version": "0.0.21", + "resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.21.tgz", + "integrity": "sha512-oIQLCGWtcFZy2JW77j9k8nHzAOpqMHLQejDA48XXMWH6tjCQHz5RCFz1bzsmROyL6PUm+LLnUiI4BCn221inxA==", "license": "MIT" }, "node_modules/@ungap/structured-clone": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", - "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", "license": "ISC" }, "node_modules/@vitejs/plugin-vue": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-5.2.1.tgz", - "integrity": "sha512-cxh314tzaWwOLqVes2gnnCtvBDcM1UMdn+iFR+UjAn411dPT3tOmqrJjbMd7koZpMAmBM/GqeV4n9ge7JSiJJQ==", + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-5.2.4.tgz", + "integrity": "sha512-7Yx/SXSOcQq5HiiV3orevHUFn+pmMB4cgbEkDYgnkUWb0WfeQ/wa2yFv6D5ICiCQOVpjA7vYDXrC7AGO8yjDHA==", "license": "MIT", "engines": { "node": "^18.0.0 || >=20.0.0" @@ -1089,188 +1175,162 @@ } }, "node_modules/@vue/compiler-core": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.13.tgz", - "integrity": "sha512-oOdAkwqUfW1WqpwSYJce06wvt6HljgY3fGeM9NcVA1HaYOij3mZG9Rkysn0OHuyUAGMbEbARIpsG+LPVlBJ5/Q==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.22.tgz", + "integrity": "sha512-jQ0pFPmZwTEiRNSb+i9Ow/I/cHv2tXYqsnHKKyCQ08irI2kdF5qmYedmF8si8mA7zepUFmJ2hqzS8CQmNOWOkQ==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.25.3", - "@vue/shared": "3.5.13", + "@babel/parser": "^7.28.4", + "@vue/shared": "3.5.22", "entities": "^4.5.0", "estree-walker": "^2.0.2", - "source-map-js": "^1.2.0" + "source-map-js": "^1.2.1" } }, "node_modules/@vue/compiler-dom": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.13.tgz", - "integrity": "sha512-ZOJ46sMOKUjO3e94wPdCzQ6P1Lx/vhp2RSvfaab88Ajexs0AHeV0uasYhi99WPaogmBlRHNRuly8xV75cNTMDA==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.22.tgz", + "integrity": "sha512-W8RknzUM1BLkypvdz10OVsGxnMAuSIZs9Wdx1vzA3mL5fNMN15rhrSCLiTm6blWeACwUwizzPVqGJgOGBEN/hA==", "license": "MIT", "dependencies": { - "@vue/compiler-core": "3.5.13", - "@vue/shared": "3.5.13" + "@vue/compiler-core": "3.5.22", + "@vue/shared": "3.5.22" } }, "node_modules/@vue/compiler-sfc": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.13.tgz", - "integrity": "sha512-6VdaljMpD82w6c2749Zhf5T9u5uLBWKnVue6XWxprDobftnletJ8+oel7sexFfM3qIxNmVE7LSFGTpv6obNyaQ==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.22.tgz", + "integrity": "sha512-tbTR1zKGce4Lj+JLzFXDq36K4vcSZbJ1RBu8FxcDv1IGRz//Dh2EBqksyGVypz3kXpshIfWKGOCcqpSbyGWRJQ==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.25.3", - "@vue/compiler-core": "3.5.13", - "@vue/compiler-dom": "3.5.13", - "@vue/compiler-ssr": "3.5.13", - "@vue/shared": "3.5.13", + "@babel/parser": "^7.28.4", + "@vue/compiler-core": "3.5.22", + "@vue/compiler-dom": "3.5.22", + "@vue/compiler-ssr": "3.5.22", + "@vue/shared": "3.5.22", "estree-walker": "^2.0.2", - "magic-string": "^0.30.11", - "postcss": "^8.4.48", - "source-map-js": "^1.2.0" + "magic-string": "^0.30.19", + "postcss": "^8.5.6", + "source-map-js": "^1.2.1" } }, "node_modules/@vue/compiler-ssr": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.13.tgz", - "integrity": "sha512-wMH6vrYHxQl/IybKJagqbquvxpWCuVYpoUJfCqFZwa/JY1GdATAQ+TgVtgrwwMZ0D07QhA99rs/EAAWfvG6KpA==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.22.tgz", + "integrity": "sha512-GdgyLvg4R+7T8Nk2Mlighx7XGxq/fJf9jaVofc3IL0EPesTE86cP/8DD1lT3h1JeZr2ySBvyqKQJgbS54IX1Ww==", "license": "MIT", "dependencies": { - "@vue/compiler-dom": "3.5.13", - "@vue/shared": "3.5.13" + "@vue/compiler-dom": "3.5.22", + "@vue/shared": "3.5.22" } }, "node_modules/@vue/devtools-api": { - "version": "7.6.7", - "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-7.6.7.tgz", - "integrity": "sha512-PV4I31WaV2rfA8RGauM+69uFEzWkqtP561RiLU2wK+Ce85u3zyKW3aoESlLCNzkc4y0JaJyskH6zAE3xWOP8+Q==", + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-7.7.7.tgz", + "integrity": "sha512-lwOnNBH2e7x1fIIbVT7yF5D+YWhqELm55/4ZKf45R9T8r9dE2AIOy8HKjfqzGsoTHFbWbr337O4E0A0QADnjBg==", "license": "MIT", "dependencies": { - "@vue/devtools-kit": "^7.6.7" + "@vue/devtools-kit": "^7.7.7" } }, "node_modules/@vue/devtools-kit": { - "version": "7.6.7", - "resolved": "https://registry.npmjs.org/@vue/devtools-kit/-/devtools-kit-7.6.7.tgz", - "integrity": "sha512-V8/jrXY/swHgnblABG9U4QCbE60c6RuPasmv2d9FvVqc5d94t1vDiESuvRmdNJBdWz4/D3q6ffgyAfRVjwHYEw==", + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@vue/devtools-kit/-/devtools-kit-7.7.7.tgz", + "integrity": "sha512-wgoZtxcTta65cnZ1Q6MbAfePVFxfM+gq0saaeytoph7nEa7yMXoi6sCPy4ufO111B9msnw0VOWjPEFCXuAKRHA==", "license": "MIT", "dependencies": { - "@vue/devtools-shared": "^7.6.7", - "birpc": "^0.2.19", + "@vue/devtools-shared": "^7.7.7", + "birpc": "^2.3.0", "hookable": "^5.5.3", "mitt": "^3.0.1", "perfect-debounce": "^1.0.0", "speakingurl": "^14.0.1", - "superjson": "^2.2.1" + "superjson": "^2.2.2" } }, "node_modules/@vue/devtools-shared": { - "version": "7.6.7", - "resolved": "https://registry.npmjs.org/@vue/devtools-shared/-/devtools-shared-7.6.7.tgz", - "integrity": "sha512-QggO6SviAsolrePAXZ/sA1dSicSPt4TueZibCvydfhNDieL1lAuyMTgQDGst7TEvMGb4vgYv2I+1sDkO4jWNnw==", + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@vue/devtools-shared/-/devtools-shared-7.7.7.tgz", + "integrity": "sha512-+udSj47aRl5aKb0memBvcUG9koarqnxNM5yjuREvqwK6T3ap4mn3Zqqc17QrBFTqSMjr3HK1cvStEZpMDpfdyw==", "license": "MIT", "dependencies": { "rfdc": "^1.4.1" } }, "node_modules/@vue/reactivity": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.13.tgz", - "integrity": "sha512-NaCwtw8o48B9I6L1zl2p41OHo/2Z4wqYGGIK1Khu5T7yxrn+ATOixn/Udn2m+6kZKB/J7cuT9DbWWhRxqixACg==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.22.tgz", + "integrity": "sha512-f2Wux4v/Z2pqc9+4SmgZC1p73Z53fyD90NFWXiX9AKVnVBEvLFOWCEgJD3GdGnlxPZt01PSlfmLqbLYzY/Fw4A==", "license": "MIT", "dependencies": { - "@vue/shared": "3.5.13" + "@vue/shared": "3.5.22" } }, "node_modules/@vue/runtime-core": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.13.tgz", - "integrity": "sha512-Fj4YRQ3Az0WTZw1sFe+QDb0aXCerigEpw418pw1HBUKFtnQHWzwojaukAs2X/c9DQz4MQ4bsXTGlcpGxU/RCIw==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.22.tgz", + "integrity": "sha512-EHo4W/eiYeAzRTN5PCextDUZ0dMs9I8mQ2Fy+OkzvRPUYQEyK9yAjbasrMCXbLNhF7P0OUyivLjIy0yc6VrLJQ==", "license": "MIT", "dependencies": { - "@vue/reactivity": "3.5.13", - "@vue/shared": "3.5.13" + "@vue/reactivity": "3.5.22", + "@vue/shared": "3.5.22" } }, "node_modules/@vue/runtime-dom": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.13.tgz", - "integrity": "sha512-dLaj94s93NYLqjLiyFzVs9X6dWhTdAlEAciC3Moq7gzAc13VJUdCnjjRurNM6uTLFATRHexHCTu/Xp3eW6yoog==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.22.tgz", + "integrity": "sha512-Av60jsryAkI023PlN7LsqrfPvwfxOd2yAwtReCjeuugTJTkgrksYJJstg1e12qle0NarkfhfFu1ox2D+cQotww==", "license": "MIT", "dependencies": { - "@vue/reactivity": "3.5.13", - "@vue/runtime-core": "3.5.13", - "@vue/shared": "3.5.13", + "@vue/reactivity": "3.5.22", + "@vue/runtime-core": "3.5.22", + "@vue/shared": "3.5.22", "csstype": "^3.1.3" } }, "node_modules/@vue/server-renderer": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.13.tgz", - "integrity": "sha512-wAi4IRJV/2SAW3htkTlB+dHeRmpTiVIK1OGLWV1yeStVSebSQQOwGwIq0D3ZIoBj2C2qpgz5+vX9iEBkTdk5YA==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.22.tgz", + "integrity": "sha512-gXjo+ao0oHYTSswF+a3KRHZ1WszxIqO7u6XwNHqcqb9JfyIL/pbWrrh/xLv7jeDqla9u+LK7yfZKHih1e1RKAQ==", "license": "MIT", "dependencies": { - "@vue/compiler-ssr": "3.5.13", - "@vue/shared": "3.5.13" + "@vue/compiler-ssr": "3.5.22", + "@vue/shared": "3.5.22" }, "peerDependencies": { - "vue": "3.5.13" + "vue": "3.5.22" } }, "node_modules/@vue/shared": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.13.tgz", - "integrity": "sha512-/hnE/qP5ZoGpol0a5mDi45bOd7t3tjYJBjsgCsivow7D48cJeV5l05RD82lPqi7gRiphZM37rnhW1l6ZoCNNnQ==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.22.tgz", + "integrity": "sha512-F4yc6palwq3TT0u+FYf0Ns4Tfl9GRFURDN2gWG7L1ecIaS/4fCIuFOjMTnCyjsu/OK6vaDKLCrGAa+KvvH+h4w==", "license": "MIT" }, "node_modules/@vueuse/core": { - "version": "11.3.0", - "resolved": "https://registry.npmjs.org/@vueuse/core/-/core-11.3.0.tgz", - "integrity": "sha512-7OC4Rl1f9G8IT6rUfi9JrKiXy4bfmHhZ5x2Ceojy0jnd3mHNEvV4JaRygH362ror6/NZ+Nl+n13LPzGiPN8cKA==", + "version": "12.8.2", + "resolved": "https://registry.npmjs.org/@vueuse/core/-/core-12.8.2.tgz", + "integrity": "sha512-HbvCmZdzAu3VGi/pWYm5Ut+Kd9mn1ZHnn4L5G8kOQTPs/IwIAmJoBrmYk2ckLArgMXZj0AW3n5CAejLUO+PhdQ==", "license": "MIT", "dependencies": { - "@types/web-bluetooth": "^0.0.20", - "@vueuse/metadata": "11.3.0", - "@vueuse/shared": "11.3.0", - "vue-demi": ">=0.14.10" + "@types/web-bluetooth": "^0.0.21", + "@vueuse/metadata": "12.8.2", + "@vueuse/shared": "12.8.2", + "vue": "^3.5.13" }, "funding": { "url": "https://github.com/sponsors/antfu" } }, - "node_modules/@vueuse/core/node_modules/vue-demi": { - "version": "0.14.10", - "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.10.tgz", - "integrity": "sha512-nMZBOwuzabUO0nLgIcc6rycZEebF6eeUfaiQx9+WSk8e29IbLvPU9feI6tqW4kTo3hvoYAJkMh8n8D0fuISphg==", - "hasInstallScript": true, - "license": "MIT", - "bin": { - "vue-demi-fix": "bin/vue-demi-fix.js", - "vue-demi-switch": "bin/vue-demi-switch.js" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/antfu" - }, - "peerDependencies": { - "@vue/composition-api": "^1.0.0-rc.1", - "vue": "^3.0.0-0 || ^2.6.0" - }, - "peerDependenciesMeta": { - "@vue/composition-api": { - "optional": true - } - } - }, "node_modules/@vueuse/integrations": { - "version": "11.3.0", - "resolved": "https://registry.npmjs.org/@vueuse/integrations/-/integrations-11.3.0.tgz", - "integrity": "sha512-5fzRl0apQWrDezmobchoiGTkGw238VWESxZHazfhP3RM7pDSiyXy18QbfYkILoYNTd23HPAfQTJpkUc5QbkwTw==", + "version": "12.8.2", + "resolved": "https://registry.npmjs.org/@vueuse/integrations/-/integrations-12.8.2.tgz", + "integrity": "sha512-fbGYivgK5uBTRt7p5F3zy6VrETlV9RtZjBqd1/HxGdjdckBgBM4ugP8LHpjolqTj14TXTxSK1ZfgPbHYyGuH7g==", "license": "MIT", "dependencies": { - "@vueuse/core": "11.3.0", - "@vueuse/shared": "11.3.0", - "vue-demi": ">=0.14.10" + "@vueuse/core": "12.8.2", + "@vueuse/shared": "12.8.2", + "vue": "^3.5.13" }, "funding": { "url": "https://github.com/sponsors/antfu" @@ -1328,107 +1388,56 @@ } } }, - "node_modules/@vueuse/integrations/node_modules/vue-demi": { - "version": "0.14.10", - "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.10.tgz", - "integrity": "sha512-nMZBOwuzabUO0nLgIcc6rycZEebF6eeUfaiQx9+WSk8e29IbLvPU9feI6tqW4kTo3hvoYAJkMh8n8D0fuISphg==", - "hasInstallScript": true, - "license": "MIT", - "bin": { - "vue-demi-fix": "bin/vue-demi-fix.js", - "vue-demi-switch": "bin/vue-demi-switch.js" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/antfu" - }, - "peerDependencies": { - "@vue/composition-api": "^1.0.0-rc.1", - "vue": "^3.0.0-0 || ^2.6.0" - }, - "peerDependenciesMeta": { - "@vue/composition-api": { - "optional": true - } - } - }, "node_modules/@vueuse/metadata": { - "version": "11.3.0", - "resolved": "https://registry.npmjs.org/@vueuse/metadata/-/metadata-11.3.0.tgz", - "integrity": "sha512-pwDnDspTqtTo2HwfLw4Rp6yywuuBdYnPYDq+mO38ZYKGebCUQC/nVj/PXSiK9HX5otxLz8Fn7ECPbjiRz2CC3g==", + "version": "12.8.2", + "resolved": "https://registry.npmjs.org/@vueuse/metadata/-/metadata-12.8.2.tgz", + "integrity": "sha512-rAyLGEuoBJ/Il5AmFHiziCPdQzRt88VxR+Y/A/QhJ1EWtWqPBBAxTAFaSkviwEuOEZNtW8pvkPgoCZQ+HxqW1A==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/antfu" } }, "node_modules/@vueuse/shared": { - "version": "11.3.0", - "resolved": "https://registry.npmjs.org/@vueuse/shared/-/shared-11.3.0.tgz", - "integrity": "sha512-P8gSSWQeucH5821ek2mn/ciCk+MS/zoRKqdQIM3bHq6p7GXDAJLmnRRKmF5F65sAVJIfzQlwR3aDzwCn10s8hA==", + "version": "12.8.2", + "resolved": "https://registry.npmjs.org/@vueuse/shared/-/shared-12.8.2.tgz", + "integrity": "sha512-dznP38YzxZoNloI0qpEfpkms8knDtaoQ6Y/sfS0L7Yki4zh40LFHEhur0odJC6xTHG5dxWVPiUWBXn+wCG2s5w==", "license": "MIT", "dependencies": { - "vue-demi": ">=0.14.10" + "vue": "^3.5.13" }, "funding": { "url": "https://github.com/sponsors/antfu" } }, - "node_modules/@vueuse/shared/node_modules/vue-demi": { - "version": "0.14.10", - "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.10.tgz", - "integrity": "sha512-nMZBOwuzabUO0nLgIcc6rycZEebF6eeUfaiQx9+WSk8e29IbLvPU9feI6tqW4kTo3hvoYAJkMh8n8D0fuISphg==", - "hasInstallScript": true, - "license": "MIT", - "bin": { - "vue-demi-fix": "bin/vue-demi-fix.js", - "vue-demi-switch": "bin/vue-demi-switch.js" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/antfu" - }, - "peerDependencies": { - "@vue/composition-api": "^1.0.0-rc.1", - "vue": "^3.0.0-0 || ^2.6.0" - }, - "peerDependenciesMeta": { - "@vue/composition-api": { - "optional": true - } - } - }, "node_modules/algoliasearch": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-5.15.0.tgz", - "integrity": "sha512-Yf3Swz1s63hjvBVZ/9f2P1Uu48GjmjCN+Esxb6MAONMGtZB1fRX8/S1AhUTtsuTlcGovbYLxpHgc7wEzstDZBw==", - "license": "MIT", - "dependencies": { - "@algolia/client-abtesting": "5.15.0", - "@algolia/client-analytics": "5.15.0", - "@algolia/client-common": "5.15.0", - "@algolia/client-insights": "5.15.0", - "@algolia/client-personalization": "5.15.0", - "@algolia/client-query-suggestions": "5.15.0", - "@algolia/client-search": "5.15.0", - "@algolia/ingestion": "1.15.0", - "@algolia/monitoring": "1.15.0", - "@algolia/recommend": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-5.39.0.tgz", + "integrity": "sha512-DzTfhUxzg9QBNGzU/0kZkxEV72TeA4MmPJ7RVfLnQwHNhhliPo7ynglEWJS791rNlLFoTyrKvkapwr/P3EXV9A==", + "license": "MIT", + "dependencies": { + "@algolia/abtesting": "1.5.0", + "@algolia/client-abtesting": "5.39.0", + "@algolia/client-analytics": "5.39.0", + "@algolia/client-common": "5.39.0", + "@algolia/client-insights": "5.39.0", + "@algolia/client-personalization": "5.39.0", + "@algolia/client-query-suggestions": "5.39.0", + "@algolia/client-search": "5.39.0", + "@algolia/ingestion": "1.39.0", + "@algolia/monitoring": "1.39.0", + "@algolia/recommend": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/birpc": { - "version": "0.2.19", - "resolved": "https://registry.npmjs.org/birpc/-/birpc-0.2.19.tgz", - "integrity": "sha512-5WeXXAvTmitV1RqJFppT5QtUiz2p1mRSYU000Jkft5ZUCLJIk4uQriYNO50HknxKwM6jd8utNc66K1qGIwwWBQ==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/birpc/-/birpc-2.6.1.tgz", + "integrity": "sha512-LPnFhlDpdSH6FJhJyn4M0kFO7vtQ5iPw24FnG0y21q09xC7e8+1LeR31S1MAIrDAHp4m7aas4bEkTDTvMAtebQ==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/antfu" @@ -1580,9 +1589,9 @@ "license": "MIT" }, "node_modules/focus-trap": { - "version": "7.6.2", - "resolved": "https://registry.npmjs.org/focus-trap/-/focus-trap-7.6.2.tgz", - "integrity": "sha512-9FhUxK1hVju2+AiQIDJ5Dd//9R2n2RAfJ0qfhF4IHGHgcoEUTMpbTeG/zbEuwaiYXfuAH6XE0/aCyxDdRM+W5w==", + "version": "7.6.5", + "resolved": "https://registry.npmjs.org/focus-trap/-/focus-trap-7.6.5.tgz", + "integrity": "sha512-7Ke1jyybbbPZyZXFxEftUtxFGLMpE2n6A+z//m4CRDlj0hW+o3iYSmh8nFlYMurOiJVDmJRilUQtJr08KfIxlg==", "license": "MIT", "dependencies": { "tabbable": "^6.2.0" @@ -1603,9 +1612,9 @@ } }, "node_modules/hast-util-to-html": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.3.tgz", - "integrity": "sha512-M17uBDzMJ9RPCqLMO92gNNUDuBSq10a25SDBI08iCCxmorf4Yy6sYHK57n9WAbRAAaU+DuR4W6GN9K4DFZesYg==", + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz", + "integrity": "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==", "license": "MIT", "dependencies": { "@types/hast": "^3.0.0", @@ -1615,7 +1624,7 @@ "hast-util-whitespace": "^3.0.0", "html-void-elements": "^3.0.0", "mdast-util-to-hast": "^13.0.0", - "property-information": "^6.0.0", + "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "stringify-entities": "^4.0.0", "zwitch": "^2.0.4" @@ -1667,12 +1676,12 @@ } }, "node_modules/magic-string": { - "version": "0.30.14", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.14.tgz", - "integrity": "sha512-5c99P1WKTed11ZC0HMJOj6CDIue6F8ySu+bJL+85q1zBEIY8IklrJ1eiKC2NDRh3Ct3FcvmJPyQHb9erXMTJNw==", + "version": "0.30.19", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.19.tgz", + "integrity": "sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==", "license": "MIT", "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0" + "@jridgewell/sourcemap-codec": "^1.5.5" } }, "node_modules/mark.js": { @@ -1776,9 +1785,9 @@ "license": "MIT" }, "node_modules/micromark-util-types": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.1.tgz", - "integrity": "sha512-534m2WhVTddrcKVepwmVEVnUAmtrx9bfIjNoQHRqfnvdaHQiFytEhJoTgpWJvDEXCO5gLTQh3wYC1PgOJA4NSQ==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", + "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", "funding": [ { "type": "GitHub Sponsors", @@ -1792,9 +1801,9 @@ "license": "MIT" }, "node_modules/minisearch": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/minisearch/-/minisearch-7.1.1.tgz", - "integrity": "sha512-b3YZEYCEH4EdCAtYP7OlDyx7FdPwNzuNwLQ34SfJpM9dlbBZzeXndGavTrC+VCiRWomL21SWfMc6SCKO/U2ZNw==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/minisearch/-/minisearch-7.2.0.tgz", + "integrity": "sha512-dqT2XBYUOZOiC5t2HRnwADjhNS2cecp9u+TJRiJ1Qp/f5qjkeT5APcGPjHw+bz89Ms8Jp+cG4AlE+QZ/QnDglg==", "license": "MIT" }, "node_modules/mitt": { @@ -1804,9 +1813,9 @@ "license": "MIT" }, "node_modules/nanoid": { - "version": "3.3.8", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", - "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", "funding": [ { "type": "github", @@ -1822,14 +1831,14 @@ } }, "node_modules/oniguruma-to-es": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-0.7.0.tgz", - "integrity": "sha512-HRaRh09cE0gRS3+wi2zxekB+I5L8C/gN60S+vb11eADHUaB/q4u8wGGOX3GvwvitG8ixaeycZfeoyruKQzUgNg==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-3.1.1.tgz", + "integrity": "sha512-bUH8SDvPkH3ho3dvwJwfonjlQ4R80vjyvrU8YpxuROddv55vAEJrTuCuCVUhhsHbtlD9tGGbaNApGQckXhS8iQ==", "license": "MIT", "dependencies": { "emoji-regex-xs": "^1.0.0", - "regex": "^5.0.2", - "regex-recursion": "^4.3.0" + "regex": "^6.0.1", + "regex-recursion": "^6.0.2" } }, "node_modules/perfect-debounce": { @@ -1845,9 +1854,9 @@ "license": "ISC" }, "node_modules/postcss": { - "version": "8.4.49", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.49.tgz", - "integrity": "sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==", + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "funding": [ { "type": "opencollective", @@ -1864,7 +1873,7 @@ ], "license": "MIT", "dependencies": { - "nanoid": "^3.3.7", + "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" }, @@ -1873,9 +1882,9 @@ } }, "node_modules/preact": { - "version": "10.25.1", - "resolved": "https://registry.npmjs.org/preact/-/preact-10.25.1.tgz", - "integrity": "sha512-frxeZV2vhQSohQwJ7FvlqC40ze89+8friponWUFeVEkaCfhC6Eu4V0iND5C9CXz8JLndV07QRDeXzH1+Anz5Og==", + "version": "10.27.2", + "resolved": "https://registry.npmjs.org/preact/-/preact-10.27.2.tgz", + "integrity": "sha512-5SYSgFKSyhCbk6SrXyMpqjb5+MQBgfvEKE/OC+PujcY34sOpqtr+0AZQtPYx5IA6VxynQ7rUPCtKzyovpj9Bpg==", "license": "MIT", "funding": { "type": "opencollective", @@ -1883,9 +1892,9 @@ } }, "node_modules/property-information": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", - "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", + "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", "license": "MIT", "funding": { "type": "github", @@ -1893,18 +1902,18 @@ } }, "node_modules/regex": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/regex/-/regex-5.0.2.tgz", - "integrity": "sha512-/pczGbKIQgfTMRV0XjABvc5RzLqQmwqxLHdQao2RTXPk+pmTXB2P0IaUHYdYyk412YLwUIkaeMd5T+RzVgTqnQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/regex/-/regex-6.0.1.tgz", + "integrity": "sha512-uorlqlzAKjKQZ5P+kTJr3eeJGSVroLKoHmquUj4zHWuR+hEyNqlXsSKlYYF5F4NI6nl7tWCs0apKJ0lmfsXAPA==", "license": "MIT", "dependencies": { "regex-utilities": "^2.3.0" } }, "node_modules/regex-recursion": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/regex-recursion/-/regex-recursion-4.3.0.tgz", - "integrity": "sha512-5LcLnizwjcQ2ALfOj95MjcatxyqF5RPySx9yT+PaXu3Gox2vyAtLDjHB8NTJLtMGkvyau6nI3CfpwFCjPUIs/A==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/regex-recursion/-/regex-recursion-6.0.2.tgz", + "integrity": "sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==", "license": "MIT", "dependencies": { "regex-utilities": "^2.3.0" @@ -1923,12 +1932,12 @@ "license": "MIT" }, "node_modules/rollup": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.28.0.tgz", - "integrity": "sha512-G9GOrmgWHBma4YfCcX8PjH0qhXSdH8B4HDE2o4/jaxj93S4DPCIDoLcXz99eWMji4hB29UFCEd7B2gwGJDR9cQ==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.52.4.tgz", + "integrity": "sha512-CLEVl+MnPAiKh5pl4dEWSyMTpuflgNQiLGhMv8ezD5W/qP8AKvmYpCOKRRNOh7oRKnauBZ4SyeYkMS+1VSyKwQ==", "license": "MIT", "dependencies": { - "@types/estree": "1.0.6" + "@types/estree": "1.0.8" }, "bin": { "rollup": "dist/bin/rollup" @@ -1938,24 +1947,28 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.28.0", - "@rollup/rollup-android-arm64": "4.28.0", - "@rollup/rollup-darwin-arm64": "4.28.0", - "@rollup/rollup-darwin-x64": "4.28.0", - "@rollup/rollup-freebsd-arm64": "4.28.0", - "@rollup/rollup-freebsd-x64": "4.28.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.28.0", - "@rollup/rollup-linux-arm-musleabihf": "4.28.0", - "@rollup/rollup-linux-arm64-gnu": "4.28.0", - "@rollup/rollup-linux-arm64-musl": "4.28.0", - "@rollup/rollup-linux-powerpc64le-gnu": "4.28.0", - "@rollup/rollup-linux-riscv64-gnu": "4.28.0", - "@rollup/rollup-linux-s390x-gnu": "4.28.0", - "@rollup/rollup-linux-x64-gnu": "4.28.0", - "@rollup/rollup-linux-x64-musl": "4.28.0", - "@rollup/rollup-win32-arm64-msvc": "4.28.0", - "@rollup/rollup-win32-ia32-msvc": "4.28.0", - "@rollup/rollup-win32-x64-msvc": "4.28.0", + "@rollup/rollup-android-arm-eabi": "4.52.4", + "@rollup/rollup-android-arm64": "4.52.4", + "@rollup/rollup-darwin-arm64": "4.52.4", + "@rollup/rollup-darwin-x64": "4.52.4", + "@rollup/rollup-freebsd-arm64": "4.52.4", + "@rollup/rollup-freebsd-x64": "4.52.4", + "@rollup/rollup-linux-arm-gnueabihf": "4.52.4", + "@rollup/rollup-linux-arm-musleabihf": "4.52.4", + "@rollup/rollup-linux-arm64-gnu": "4.52.4", + "@rollup/rollup-linux-arm64-musl": "4.52.4", + "@rollup/rollup-linux-loong64-gnu": "4.52.4", + "@rollup/rollup-linux-ppc64-gnu": "4.52.4", + "@rollup/rollup-linux-riscv64-gnu": "4.52.4", + "@rollup/rollup-linux-riscv64-musl": "4.52.4", + "@rollup/rollup-linux-s390x-gnu": "4.52.4", + "@rollup/rollup-linux-x64-gnu": "4.52.4", + "@rollup/rollup-linux-x64-musl": "4.52.4", + "@rollup/rollup-openharmony-arm64": "4.52.4", + "@rollup/rollup-win32-arm64-msvc": "4.52.4", + "@rollup/rollup-win32-ia32-msvc": "4.52.4", + "@rollup/rollup-win32-x64-gnu": "4.52.4", + "@rollup/rollup-win32-x64-msvc": "4.52.4", "fsevents": "~2.3.2" } }, @@ -1967,16 +1980,18 @@ "peer": true }, "node_modules/shiki": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/shiki/-/shiki-1.24.0.tgz", - "integrity": "sha512-qIneep7QRwxRd5oiHb8jaRzH15V/S8F3saCXOdjwRLgozZJr5x2yeBhQtqkO3FSzQDwYEFAYuifg4oHjpDghrg==", - "license": "MIT", - "dependencies": { - "@shikijs/core": "1.24.0", - "@shikijs/engine-javascript": "1.24.0", - "@shikijs/engine-oniguruma": "1.24.0", - "@shikijs/types": "1.24.0", - "@shikijs/vscode-textmate": "^9.3.0", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/shiki/-/shiki-2.5.0.tgz", + "integrity": "sha512-mI//trrsaiCIPsja5CNfsyNOqgAZUb6VpJA+340toL42UpzQlXpwRV9nch69X6gaUxrr9kaOOa6e3y3uAkGFxQ==", + "license": "MIT", + "dependencies": { + "@shikijs/core": "2.5.0", + "@shikijs/engine-javascript": "2.5.0", + "@shikijs/engine-oniguruma": "2.5.0", + "@shikijs/langs": "2.5.0", + "@shikijs/themes": "2.5.0", + "@shikijs/types": "2.5.0", + "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, @@ -2023,9 +2038,9 @@ } }, "node_modules/superjson": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/superjson/-/superjson-2.2.1.tgz", - "integrity": "sha512-8iGv75BYOa0xRJHK5vRLEjE2H/i4lulTjzpUXic3Eg8akftYjkmQDa8JARQ42rlczXyFR3IeRoeFCc7RxHsYZA==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/superjson/-/superjson-2.2.2.tgz", + "integrity": "sha512-5JRxVqC8I8NuOUjzBbvVJAKNM8qoVuH0O77h4WInc/qC2q5IreqKxYwgkga3PfA22OayK2ikceb/B26dztPl+Q==", "license": "MIT", "dependencies": { "copy-anything": "^3.0.2" @@ -2133,9 +2148,9 @@ } }, "node_modules/vfile-message": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", - "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz", + "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==", "license": "MIT", "dependencies": { "@types/unist": "^3.0.0", @@ -2147,9 +2162,9 @@ } }, "node_modules/vite": { - "version": "5.4.11", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.11.tgz", - "integrity": "sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==", + "version": "5.4.20", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.20.tgz", + "integrity": "sha512-j3lYzGC3P+B5Yfy/pfKNgVEg4+UtcIJcVRt2cDjIOmhLourAqPqf8P7acgxeiSgUB7E3p2P8/3gNIgDLpwzs4g==", "license": "MIT", "dependencies": { "esbuild": "^0.21.3", @@ -2206,29 +2221,29 @@ } }, "node_modules/vitepress": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/vitepress/-/vitepress-1.5.0.tgz", - "integrity": "sha512-q4Q/G2zjvynvizdB3/bupdYkCJe2umSAMv9Ju4d92E6/NXJ59z70xB0q5p/4lpRyAwflDsbwy1mLV9Q5+nlB+g==", + "version": "1.6.4", + "resolved": "https://registry.npmjs.org/vitepress/-/vitepress-1.6.4.tgz", + "integrity": "sha512-+2ym1/+0VVrbhNyRoFFesVvBvHAVMZMK0rw60E3X/5349M1GuVdKeazuksqopEdvkKwKGs21Q729jX81/bkBJg==", "license": "MIT", "dependencies": { - "@docsearch/css": "^3.6.2", - "@docsearch/js": "^3.6.2", - "@iconify-json/simple-icons": "^1.2.10", - "@shikijs/core": "^1.22.2", - "@shikijs/transformers": "^1.22.2", - "@shikijs/types": "^1.22.2", + "@docsearch/css": "3.8.2", + "@docsearch/js": "3.8.2", + "@iconify-json/simple-icons": "^1.2.21", + "@shikijs/core": "^2.1.0", + "@shikijs/transformers": "^2.1.0", + "@shikijs/types": "^2.1.0", "@types/markdown-it": "^14.1.2", - "@vitejs/plugin-vue": "^5.1.4", - "@vue/devtools-api": "^7.5.4", - "@vue/shared": "^3.5.12", - "@vueuse/core": "^11.1.0", - "@vueuse/integrations": "^11.1.0", - "focus-trap": "^7.6.0", + "@vitejs/plugin-vue": "^5.2.1", + "@vue/devtools-api": "^7.7.0", + "@vue/shared": "^3.5.13", + "@vueuse/core": "^12.4.0", + "@vueuse/integrations": "^12.4.0", + "focus-trap": "^7.6.4", "mark.js": "8.11.1", - "minisearch": "^7.1.0", - "shiki": "^1.22.2", - "vite": "^5.4.10", - "vue": "^3.5.12" + "minisearch": "^7.1.1", + "shiki": "^2.1.0", + "vite": "^5.4.14", + "vue": "^3.5.13" }, "bin": { "vitepress": "bin/vitepress.js" @@ -2247,16 +2262,16 @@ } }, "node_modules/vue": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.13.tgz", - "integrity": "sha512-wmeiSMxkZCSc+PM2w2VRsOYAZC8GdipNFRTsLSfodVqI9mbejKeXEGr8SckuLnrQPGe3oJN5c3K0vpoU9q/wCQ==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.22.tgz", + "integrity": "sha512-toaZjQ3a/G/mYaLSbV+QsQhIdMo9x5rrqIpYRObsJ6T/J+RyCSFwN2LHNVH9v8uIcljDNa3QzPVdv3Y6b9hAJQ==", "license": "MIT", "dependencies": { - "@vue/compiler-dom": "3.5.13", - "@vue/compiler-sfc": "3.5.13", - "@vue/runtime-dom": "3.5.13", - "@vue/server-renderer": "3.5.13", - "@vue/shared": "3.5.13" + "@vue/compiler-dom": "3.5.22", + "@vue/compiler-sfc": "3.5.22", + "@vue/runtime-dom": "3.5.22", + "@vue/server-renderer": "3.5.22", + "@vue/shared": "3.5.22" }, "peerDependencies": { "typescript": "*" diff --git a/docs/package.json b/docs/package.json index c9348f5..83dc38c 100644 --- a/docs/package.json +++ b/docs/package.json @@ -6,6 +6,6 @@ "preview": "vitepress preview" }, "dependencies": { - "vitepress": "^1.5.0" + "vitepress": "1.6.4" } -} +} \ No newline at end of file diff --git a/examples/the-expense/deno.json b/examples/hono-demo/deno.json similarity index 57% rename from examples/the-expense/deno.json rename to examples/hono-demo/deno.json index 3ec6640..fa54ead 100644 --- a/examples/the-expense/deno.json +++ b/examples/hono-demo/deno.json @@ -6,17 +6,25 @@ "database:seed": "deno run -A src/seedCollections.ts" }, "lint": { - "include": ["src/"], + "include": [ + "src/" + ], "exclude": [], "rules": { - "tags": ["recommended"], + "tags": [ + "recommended" + ], "include": [], - "exclude": ["no-explicit-any"] + "exclude": [ + "no-explicit-any" + ] } }, "fmt": { - "include": ["src/"], - "exclude": ["src/shared/mailing/core/*.html"], + "include": [ + "src/" + ], + "exclude": [], "useTabs": false, "lineWidth": 80, "indentWidth": 4, @@ -25,13 +33,15 @@ "proseWrap": "always" }, "test": { - "include": ["src/"] + "include": [ + "src/" + ] }, "imports": { - "@oak/oak": "jsr:@oak/oak@^17.1.4", + "@std/dotenv": "jsr:@std/dotenv@^0.225.6", "@std/ulid": "jsr:@std/ulid@^1.0.0", - "@tajpouria/cors": "jsr:@tajpouria/cors@^1.2.1", - "mongodb": "npm:mongodb@^6.12.0", - "zod": "npm:zod@^3.24.1" + "hono": "npm:hono@^4.11.4", + "mongodb": "npm:mongodb@^7.0.0", + "zod": "npm:zod@^4.3.5" } -} +} \ No newline at end of file diff --git a/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts b/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts new file mode 100644 index 0000000..4c01f24 --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts @@ -0,0 +1,41 @@ +import { commandSchema, InvalidInputException } from '@nimbus/core'; +import { ObjectId } from 'mongodb'; +import { z } from 'zod'; +import { UserState } from '../domain/user.ts'; + +export const ADD_USER_COMMAND_TYPE = 'at.overlap.nimbus.add-user'; + +export const addUserInputSchema = z.object({ + email: z.email(), + firstName: z.string(), + lastName: z.string(), + group: z.string(), +}); + +export const addUserCommandSchema = commandSchema.extend({ + type: z.literal(ADD_USER_COMMAND_TYPE), + data: addUserInputSchema, +}); +export type AddUserCommand = z.infer; + +export const addUser = ( + state: UserState, + command: AddUserCommand, +): UserState => { + // Always make sure to cast all user emails to lowercase + const email = command.data.email.toLowerCase(); + + if (state && state.email === email) { + throw new InvalidInputException('User with this email already exists'); + } + + return { + _id: new ObjectId().toString(), + email: email, + firstName: command.data.firstName, + lastName: command.data.lastName, + group: command.data.group, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + }; +}; diff --git a/examples/hono-demo/src/iam/users/core/domain/user.ts b/examples/hono-demo/src/iam/users/core/domain/user.ts new file mode 100644 index 0000000..bfb8f2c --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/domain/user.ts @@ -0,0 +1,15 @@ +import { z } from 'zod'; + +export const User = z.object({ + _id: z.string().length(24), + email: z.email(), + group: z.string(), + firstName: z.string(), + lastName: z.string(), + createdAt: z.iso.datetime(), + updatedAt: z.iso.datetime(), +}); + +export type User = z.infer; + +export type UserState = User | null; diff --git a/examples/hono-demo/src/iam/users/core/domain/userGroup.ts b/examples/hono-demo/src/iam/users/core/domain/userGroup.ts new file mode 100644 index 0000000..5160298 --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/domain/userGroup.ts @@ -0,0 +1,9 @@ +import { z } from 'zod'; +import { User } from './user.ts'; + +export const UserGroup = z.object({ + name: z.string(), + users: z.array(User), +}); + +export type UserGroup = z.infer; diff --git a/examples/hono-demo/src/iam/users/core/events/userAdded.event.ts b/examples/hono-demo/src/iam/users/core/events/userAdded.event.ts new file mode 100644 index 0000000..99050d1 --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/events/userAdded.event.ts @@ -0,0 +1,8 @@ +import { Event } from '@nimbus/core'; +import { UserState } from '../domain/user.ts'; + +export const USER_ADDED_EVENT_TYPE = 'at.overlap.nimbus.user-added'; + +export type UserAddedEvent = Event & { + type: typeof USER_ADDED_EVENT_TYPE; +}; diff --git a/examples/hono-demo/src/iam/users/core/queries/getUser.query.ts b/examples/hono-demo/src/iam/users/core/queries/getUser.query.ts new file mode 100644 index 0000000..429c57b --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/queries/getUser.query.ts @@ -0,0 +1,12 @@ +import { querySchema } from '@nimbus/core'; +import { z } from 'zod'; + +export const GET_USER_QUERY_TYPE = 'at.overlap.nimbus.get-user'; + +export const getUserQuerySchema = querySchema.extend({ + type: z.literal(GET_USER_QUERY_TYPE), + data: z.object({ + id: z.string().length(24), + }), +}); +export type GetUserQuery = z.infer; diff --git a/examples/hono-demo/src/iam/users/core/queries/getUserGroups.ts b/examples/hono-demo/src/iam/users/core/queries/getUserGroups.ts new file mode 100644 index 0000000..31bddb4 --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/queries/getUserGroups.ts @@ -0,0 +1,10 @@ +import { querySchema } from '@nimbus/core'; +import { z } from 'zod'; + +export const GET_USER_GROUPS_QUERY_TYPE = 'at.overlap.nimbus.get-user-groups'; + +export const getUserGroupsQuerySchema = querySchema.extend({ + type: z.literal(GET_USER_GROUPS_QUERY_TYPE), + data: z.object({}), +}); +export type GetUserGroupsQuery = z.infer; diff --git a/examples/hono-demo/src/iam/users/shell/http/router.ts b/examples/hono-demo/src/iam/users/shell/http/router.ts new file mode 100644 index 0000000..874561c --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/http/router.ts @@ -0,0 +1,77 @@ +import { createCommand, createQuery, getRouter } from '@nimbus/core'; +import { getCorrelationId } from '@nimbus/hono'; +import { Hono } from 'hono'; +import { + ADD_USER_COMMAND_TYPE, + AddUserCommand, +} from '../../core/commands/addUser.command.ts'; +import { + GET_USER_QUERY_TYPE, + GetUserQuery, +} from '../../core/queries/getUser.query.ts'; +import { + GET_USER_GROUPS_QUERY_TYPE, + GetUserGroupsQuery, +} from '../../core/queries/getUserGroups.ts'; + +const usersRouter = new Hono(); + +usersRouter.post( + '/add-user', + async (c) => { + const body = await c.req.json(); + const correlationId = getCorrelationId(c); + + const command = createCommand({ + type: ADD_USER_COMMAND_TYPE, + source: 'nimbus.overlap.at', + correlationid: correlationId, + data: body, + }); + + const result = await getRouter('default').route(command); + + return c.json(result); + }, +); + +usersRouter.get( + '/groups', + async (c) => { + const correlationId = getCorrelationId(c); + + const query = createQuery({ + type: GET_USER_GROUPS_QUERY_TYPE, + source: 'nimbus.overlap.at', + correlationid: correlationId, + data: {}, + }); + + const result = await getRouter('default').route(query); + + return c.json(result); + }, +); + +usersRouter.get( + '/:id', + async (c) => { + const id = c.req.param('id'); + const correlationId = getCorrelationId(c); + + const query = createQuery({ + type: GET_USER_QUERY_TYPE, + source: 'nimbus.overlap.at', + correlationid: correlationId, + data: { + id: id, + }, + }); + + const result = await getRouter('default').route(query); + + return c.json(result); + }, +); + +export default usersRouter; diff --git a/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts b/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts new file mode 100644 index 0000000..8b5d0b9 --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts @@ -0,0 +1,48 @@ +import { createEvent, getEventBus, NotFoundException } from '@nimbus/core'; +import { + addUser, + AddUserCommand, +} from '../../../core/commands/addUser.command.ts'; +import { UserState } from '../../../core/domain/user.ts'; +import { + USER_ADDED_EVENT_TYPE, + UserAddedEvent, +} from '../../../core/events/userAdded.event.ts'; +import { userRepository } from '../../mongodb/user.repository.ts'; + +export const addUserCommandHandler = async (command: AddUserCommand) => { + const eventBus = getEventBus('default'); + let state: UserState = null; + + try { + state = await userRepository.findOne({ + filter: { email: command.data.email }, + }); + } catch (_error) { + if (_error instanceof NotFoundException) { + state = null; + } else { + throw _error; + } + } + + state = addUser(state, command); + + if (state !== null) { + state = await userRepository.insertOne({ + item: state, + }); + + const event = createEvent({ + type: USER_ADDED_EVENT_TYPE, + source: 'nimbus.overlap.at', + correlationid: command.correlationid, + subject: `/users/${state._id}`, + data: state, + }); + + eventBus.putEvent(event); + } + + return state; +}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/events/userAdded.event.ts b/examples/hono-demo/src/iam/users/shell/messages/events/userAdded.event.ts new file mode 100644 index 0000000..0961729 --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/events/userAdded.event.ts @@ -0,0 +1,11 @@ +import { getLogger } from '@nimbus/core'; +import { UserAddedEvent } from '../../../core/events/userAdded.event.ts'; + +export const userAddedEventHandler = async (event: UserAddedEvent) => { + await Promise.resolve(); + + getLogger().info({ + message: 'User added', + data: event.data ?? {}, + }); +}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/queries/getUser.query.ts b/examples/hono-demo/src/iam/users/shell/messages/queries/getUser.query.ts new file mode 100644 index 0000000..c74d1a1 --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/queries/getUser.query.ts @@ -0,0 +1,11 @@ +import { ObjectId } from 'mongodb'; +import { GetUserQuery } from '../../../core/queries/getUser.query.ts'; +import { userRepository } from '../../mongodb/user.repository.ts'; + +export const getUserQueryHandler = async (query: GetUserQuery) => { + const state = await userRepository.findOne({ + filter: { _id: new ObjectId(query.data.id) }, + }); + + return state; +}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/queries/getUserGroups.query.ts b/examples/hono-demo/src/iam/users/shell/messages/queries/getUserGroups.query.ts new file mode 100644 index 0000000..2844f3c --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/queries/getUserGroups.query.ts @@ -0,0 +1,7 @@ +import { userRepository } from '../../mongodb/user.repository.ts'; + +export const getUserGroupsQueryHandler = async () => { + const result = await userRepository.getUserGroups(); + + return result; +}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts new file mode 100644 index 0000000..a5fdeea --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts @@ -0,0 +1,45 @@ +import { getEventBus, getRouter } from '@nimbus/core'; +import { + ADD_USER_COMMAND_TYPE, + addUserCommandSchema, +} from '../../core/commands/addUser.command.ts'; +import { USER_ADDED_EVENT_TYPE } from '../../core/events/userAdded.event.ts'; +import { + GET_USER_QUERY_TYPE, + getUserQuerySchema, +} from '../../core/queries/getUser.query.ts'; +import { + GET_USER_GROUPS_QUERY_TYPE, + getUserGroupsQuerySchema, +} from '../../core/queries/getUserGroups.ts'; +import { addUserCommandHandler } from './commands/addUser.command.ts'; +import { userAddedEventHandler } from './events/userAdded.event.ts'; +import { getUserQueryHandler } from './queries/getUser.query.ts'; +import { getUserGroupsQueryHandler } from './queries/getUserGroups.query.ts'; + +export const registerUserMessages = () => { + const eventBus = getEventBus('default'); + const router = getRouter('default'); + + eventBus.subscribeEvent({ + type: USER_ADDED_EVENT_TYPE, + handler: userAddedEventHandler, + }); + + router.register( + ADD_USER_COMMAND_TYPE, + addUserCommandHandler, + addUserCommandSchema, + ); + + router.register( + GET_USER_QUERY_TYPE, + getUserQueryHandler, + getUserQuerySchema, + ); + router.register( + GET_USER_GROUPS_QUERY_TYPE, + getUserGroupsQueryHandler, + getUserGroupsQuerySchema, + ); +}; diff --git a/examples/hono-demo/src/iam/users/shell/mongodb/user.collection.ts b/examples/hono-demo/src/iam/users/shell/mongodb/user.collection.ts new file mode 100644 index 0000000..516c396 --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/mongodb/user.collection.ts @@ -0,0 +1,48 @@ +import { MongoCollectionDefinition } from '@nimbus/mongodb'; + +export const USERS_COLLECTION: MongoCollectionDefinition = { + name: 'users', + options: { + validator: { + $jsonSchema: { + bsonType: 'object', + required: [ + 'email', + 'firstName', + 'lastName', + 'group', + 'createdAt', + 'updatedAt', + ], + properties: { + email: { + bsonType: 'string', + }, + firstName: { + bsonType: 'string', + }, + lastName: { + bsonType: 'string', + }, + group: { + bsonType: 'string', + }, + createdAt: { + bsonType: 'date', + }, + updatedAt: { + bsonType: 'date', + }, + }, + }, + }, + }, + indexes: [ + { key: { email: 1 }, unique: true }, + { key: { firstName: 1 } }, + { key: { lastName: 1 } }, + { key: { group: 1 } }, + { key: { createdAt: 1 } }, + { key: { updatedAt: 1 } }, + ], +}; diff --git a/examples/hono-demo/src/iam/users/shell/mongodb/user.repository.ts b/examples/hono-demo/src/iam/users/shell/mongodb/user.repository.ts new file mode 100644 index 0000000..fc1c3bd --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/mongodb/user.repository.ts @@ -0,0 +1,77 @@ +import { aggregate, MongoDBRepository } from '@nimbus/mongodb'; +import { getEnv } from '@nimbus/utils'; +import { Document, ObjectId } from 'mongodb'; +import { mongoManager } from '../../../../shared/shell/mongodb.ts'; +import { User } from '../../core/domain/user.ts'; +import { UserGroup } from '../../core/domain/userGroup.ts'; +import { USERS_COLLECTION } from './user.collection.ts'; + +class UserRepository extends MongoDBRepository { + constructor() { + const env = getEnv({ variables: ['MONGO_DB'] }); + + super( + () => { + return mongoManager.getCollection( + env.MONGO_DB, + USERS_COLLECTION.name, + ); + }, + User, + 'User', + ); + } + + override _mapDocumentToEntity(doc: Document): User { + return User.parse({ + _id: doc._id.toString(), + email: doc.email, + firstName: doc.firstName, + lastName: doc.lastName, + group: doc.group, + createdAt: doc.createdAt.toISOString(), + updatedAt: doc.updatedAt.toISOString(), + }); + } + + override _mapEntityToDocument(user: User): Document { + return { + _id: new ObjectId(user._id), + email: user.email, + firstName: user.firstName, + lastName: user.lastName, + group: user.group, + createdAt: new Date(user.createdAt), + updatedAt: new Date(user.updatedAt), + }; + } + + public async getUserGroups(): Promise { + const collection = await this._getCollection(); + + const result = await aggregate({ + collection, + aggregation: [ + { + $group: { + _id: '$group', + users: { $push: '$$ROOT' }, + }, + }, + ], + mapDocument: (doc: Document) => { + return { + name: doc._id, + users: doc.users.map((user: Document) => + this._mapDocumentToEntity(user) + ), + }; + }, + outputType: UserGroup, + }); + + return result; + } +} + +export const userRepository = new UserRepository(); diff --git a/examples/hono-demo/src/main.ts b/examples/hono-demo/src/main.ts new file mode 100644 index 0000000..1f5be91 --- /dev/null +++ b/examples/hono-demo/src/main.ts @@ -0,0 +1,83 @@ +import { + getLogger, + jsonLogFormatter, + parseLogLevel, + prettyLogFormatter, + setupEventBus, + setupLogger, + setupRouter, +} from '@nimbus/core'; +import '@std/dotenv/load'; +import process from 'node:process'; +import { app } from './shared/shell/http.ts'; +import { initMessages } from './shared/shell/messages.ts'; +import { initMongoConnectionManager } from './shared/shell/mongodb.ts'; + +setupLogger({ + logLevel: parseLogLevel(process.env.LOG_LEVEL), + formatter: process.env.LOG_FORMAT === 'pretty' + ? prettyLogFormatter + : jsonLogFormatter, + useConsoleColors: process.env.LOG_FORMAT === 'pretty', +}); + +setupEventBus('default', { + maxRetries: 3, + baseDelay: 1000, + maxDelay: 30000, + useJitter: true, + logPublish: (event) => { + getLogger().debug({ + category: 'EventBus', + message: 'Published event', + data: { event }, + ...(event?.correlationid + ? { correlationId: event.correlationid } + : {}), + }); + }, +}); + +setupRouter('default', { + logInput: (input) => { + getLogger().debug({ + category: 'MessageRouter', + message: 'Received input', + data: { input }, + ...(input?.correlationid + ? { correlationId: input.correlationid } + : {}), + }); + }, + logOutput: (output) => { + getLogger().debug({ + category: 'MessageRouter', + message: 'Output', + data: { output }, + ...(output?.correlationid + ? { correlationId: output.correlationid } + : {}), + }); + }, +}); + +initMessages(); + +initMongoConnectionManager(); + +if (process.env.PORT) { + const port = Number.parseInt(process.env.PORT); + + Deno.serve({ hostname: '0.0.0.0', port }, app.fetch); + + getLogger().info({ + category: 'API', + message: `Started application on port ${port}`, + }); +} else { + getLogger().critical({ + category: 'API', + message: + `Could not start the application! Please define a valid port environment variable.`, + }); +} diff --git a/examples/the-expense/src/seedCollections.ts b/examples/hono-demo/src/seedCollections.ts similarity index 70% rename from examples/the-expense/src/seedCollections.ts rename to examples/hono-demo/src/seedCollections.ts index 02eec2b..0c96e02 100644 --- a/examples/the-expense/src/seedCollections.ts +++ b/examples/hono-demo/src/seedCollections.ts @@ -1,8 +1,11 @@ import { deployMongoCollection } from '@nimbus/mongodb'; -import 'jsr:@std/dotenv/load'; +import '@std/dotenv/load'; import process from 'node:process'; -import { ACCOUNT_COLLECTION } from './account/shell/account.collection.ts'; -import { initMongoConnectionManager, mongoManager } from './mongodb.ts'; +import { USERS_COLLECTION } from './iam/users/shell/mongodb/user.collection.ts'; +import { + initMongoConnectionManager, + mongoManager, +} from './shared/shell/mongodb.ts'; const { MONGO_DB } = process.env; @@ -15,7 +18,7 @@ try { deployMongoCollection({ mongoClient: mongoClient, dbName: MONGO_DB ?? '', - collectionDefinition: ACCOUNT_COLLECTION, + collectionDefinition: USERS_COLLECTION, allowUpdateIndexes: true, }), ]); diff --git a/examples/hono-demo/src/shared/shell/http.ts b/examples/hono-demo/src/shared/shell/http.ts new file mode 100644 index 0000000..37f872b --- /dev/null +++ b/examples/hono-demo/src/shared/shell/http.ts @@ -0,0 +1,29 @@ +import { correlationId, handleError, logger } from '@nimbus/hono'; +import { Hono } from 'hono'; +import { compress } from 'hono/compress'; +import { cors } from 'hono/cors'; +import { secureHeaders } from 'hono/secure-headers'; +import usersRouter from '../../iam/users/shell/http/router.ts'; + +export const app = new Hono(); + +app.use(correlationId()); + +app.use(logger({ + enableTracing: true, + tracerName: 'api', +})); + +app.use(cors()); + +app.use(secureHeaders()); + +app.use(compress()); + +app.get('/health', (c) => { + return c.json({ status: 'ok' }); +}); + +app.route('/iam/users', usersRouter); + +app.onError(handleError); diff --git a/examples/hono-demo/src/shared/shell/messages.ts b/examples/hono-demo/src/shared/shell/messages.ts new file mode 100644 index 0000000..59879a5 --- /dev/null +++ b/examples/hono-demo/src/shared/shell/messages.ts @@ -0,0 +1,5 @@ +import { registerUserMessages } from '../../iam/users/shell/messages/registerUserMessages.ts'; + +export const initMessages = () => { + registerUserMessages(); +}; diff --git a/examples/hono-demo/src/shared/shell/mongodb.ts b/examples/hono-demo/src/shared/shell/mongodb.ts new file mode 100644 index 0000000..1dddf94 --- /dev/null +++ b/examples/hono-demo/src/shared/shell/mongodb.ts @@ -0,0 +1,37 @@ +import { getLogger } from '@nimbus/core'; +import { MongoConnectionManager } from '@nimbus/mongodb'; +import { ServerApiVersion } from 'mongodb'; +import process from 'node:process'; + +export const mongoManager = MongoConnectionManager.getInstance( + process.env['MONGO_URL'] ?? '', + { + connectionTimeout: 1000 * 60 * 5, + mongoClientOptions: { + appName: 'overtools', + serverApi: { + version: ServerApiVersion.v1, + strict: false, + deprecationErrors: true, + }, + maxPoolSize: 10, + minPoolSize: 0, + maxIdleTimeMS: 1000 * 60 * 1, // 1 minutes idle timeout + connectTimeoutMS: 1000 * 15, // 15 seconds connection timeout + socketTimeoutMS: 1000 * 30, // 30 seconds socket timeout + }, + }, +); + +export const initMongoConnectionManager = () => { + // Check to see if the MongoDB connection can be cleaned up + // This is to prevent the MongoDB connection from being left open for too long + setInterval(() => { + mongoManager.cleanup().catch((error) => { + getLogger().error({ + message: error.message, + error, + }); + }); + }, 1000 * 60); // Check every minute +}; diff --git a/examples/the-expense/start-with-otel.sh b/examples/hono-demo/start-with-otel.sh similarity index 69% rename from examples/the-expense/start-with-otel.sh rename to examples/hono-demo/start-with-otel.sh index 6f42d19..db328f1 100644 --- a/examples/the-expense/start-with-otel.sh +++ b/examples/hono-demo/start-with-otel.sh @@ -3,14 +3,14 @@ export OTEL_EXPORTER_OTLP_PROTOCOL="http/protobuf" export OTEL_EXPORTER_OTLP_ENDPOINT="https://otlp-gateway-prod-eu-west-2.grafana.net/otlp" # Read OTLP headers from secret file -if [ -f "./.otel_token" ]; then +if [[ -f "./.otel_token" ]]; then export OTEL_EXPORTER_OTLP_HEADERS="Authorization=Basic $(cat ./.otel_token)" else - echo "Error: .otel_token file not found." + echo "Error: .otel_token file not found." >&2 exit 1 fi -export OTEL_SERVICE_NAME=nimbus-the-expense +export OTEL_SERVICE_NAME=nimbus-hono-demo export OTEL_RESOURCE_ATTRIBUTES=deployment.environment=development -deno run --unstable-otel -A src/main.ts +deno run -A src/main.ts diff --git a/examples/the-expense/README.md b/examples/the-expense/README.md deleted file mode 100644 index 98c6f13..0000000 --- a/examples/the-expense/README.md +++ /dev/null @@ -1,16 +0,0 @@ -Nimbus - -# The Expense - A Nimbus Demo App - -We build a little app to track our regular expenses. - -This application is NOT production ready, it contains some unsecure code. It is just a simple example to show how to build an application with Nimbus. - -## Start App - -``` -deno task dev -``` diff --git a/examples/the-expense/src/account/core/account.type.ts b/examples/the-expense/src/account/core/account.type.ts deleted file mode 100644 index 126a871..0000000 --- a/examples/the-expense/src/account/core/account.type.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { z } from 'zod'; - -export const AccountStatus = z.enum(['active', 'frozen']); -export type AccountStatus = z.infer; - -export const Account = z.object({ - _id: z.string().length(24), - name: z.string(), - status: AccountStatus, -}); -export type Account = z.infer; diff --git a/examples/the-expense/src/account/core/commands/addAccount.ts b/examples/the-expense/src/account/core/commands/addAccount.ts deleted file mode 100644 index 146852c..0000000 --- a/examples/the-expense/src/account/core/commands/addAccount.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { AuthContext, Command, InvalidInputException } from '@nimbus/core'; -import { ObjectId } from 'mongodb'; -import { z } from 'zod'; -import { Account } from '../account.type.ts'; - -export const AddAccountData = z.object({ - name: z.string(), -}); -export type AddAccountData = z.infer; - -export const AddAccountCommand = Command( - z.literal('account.add'), - AddAccountData, - AuthContext, -); -export type AddAccountCommand = z.infer; - -export const addAccount = ( - data: AddAccountData, - authContext?: AuthContext, -): Account => { - if (!authContext) { - throw new InvalidInputException(); - } - - return { - _id: new ObjectId().toString(), - name: data.name, - status: 'active', - }; -}; diff --git a/examples/the-expense/src/account/core/commands/deleteAccount.ts b/examples/the-expense/src/account/core/commands/deleteAccount.ts deleted file mode 100644 index 75ada58..0000000 --- a/examples/the-expense/src/account/core/commands/deleteAccount.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { AuthContext, Command, InvalidInputException } from '@nimbus/core'; -import { z } from 'zod'; -import { Account } from '../account.type.ts'; - -export const DeleteAccountData = z.object({ - _id: z.string().length(24), -}); -export type DeleteAccountData = z.infer; - -export const DeleteAccountCommand = Command( - z.literal('account.delete'), - DeleteAccountData, - AuthContext, -); -export type DeleteAccountCommand = z.infer; - -export const deleteAccount = ( - account: Account, - authContext?: AuthContext, -): Account => { - if (!authContext) { - throw new InvalidInputException(); - } - - return account; -}; diff --git a/examples/the-expense/src/account/core/events/accountAdded.ts b/examples/the-expense/src/account/core/events/accountAdded.ts deleted file mode 100644 index 187121f..0000000 --- a/examples/the-expense/src/account/core/events/accountAdded.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Event } from '@nimbus/core'; -import { z } from 'zod'; -import { Account } from '../account.type.ts'; - -export const AccountAddedData = z.object({ - account: Account, -}); -export type AccountAddedData = z.infer; - -export const AccountAddedEvent = Event( - z.literal('account.added'), - AccountAddedData, -); -export type AccountAddedEvent = z.infer; diff --git a/examples/the-expense/src/account/core/queries/getAccount.ts b/examples/the-expense/src/account/core/queries/getAccount.ts deleted file mode 100644 index c2262a2..0000000 --- a/examples/the-expense/src/account/core/queries/getAccount.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { AuthContext, InvalidInputException, Query } from '@nimbus/core'; -import { z } from 'zod'; -import { Account } from '../account.type.ts'; - -export const GetAccountQuery = Query( - z.literal('account.get'), - z.object({ - id: z.string().length(24), - }), - AuthContext, -); -export type GetAccountQuery = z.infer; - -export const getAccount = ( - data: Account, - authContext?: AuthContext, -): Account => { - if (!authContext) { - throw new InvalidInputException(); - } - - return data; -}; diff --git a/examples/the-expense/src/account/core/queries/listAccounts.ts b/examples/the-expense/src/account/core/queries/listAccounts.ts deleted file mode 100644 index c7dd9cf..0000000 --- a/examples/the-expense/src/account/core/queries/listAccounts.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { AuthContext, InvalidInputException, Query } from '@nimbus/core'; -import { z } from 'zod'; -import { Account } from '../account.type.ts'; - -export const ListAccountsQuery = Query( - z.literal('account.list'), - z.object({ - limit: z.string().optional(), - skip: z.string().optional(), - filter: z.string().optional(), - sortBy: z.string().optional(), - sortDir: z.enum(['asc', 'desc']).optional(), - }), - AuthContext, -); -export type ListAccountsQuery = z.infer; - -export const listAccounts = ( - data: Account[], - authContext?: AuthContext, -): Account[] => { - if (!authContext) { - throw new InvalidInputException(); - } - - return data; -}; diff --git a/examples/the-expense/src/account/shell/account.collection.ts b/examples/the-expense/src/account/shell/account.collection.ts deleted file mode 100644 index 6d9411c..0000000 --- a/examples/the-expense/src/account/shell/account.collection.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { MongoCollectionDefinition } from '@nimbus/mongodb'; - -export const ACCOUNT_COLLECTION: MongoCollectionDefinition = { - name: 'accounts', - options: { - validator: { - $jsonSchema: { - bsonType: 'object', - required: [ - 'name', - 'status', - ], - properties: { - name: { - bsonType: 'string', - }, - status: { - bsonType: 'string', - enum: ['active', 'archived'], - }, - }, - }, - }, - }, - indexes: [ - { key: { name: 1 }, unique: true }, - { key: { status: 1 } }, - ], -}; diff --git a/examples/the-expense/src/account/shell/account.eventBus.ts b/examples/the-expense/src/account/shell/account.eventBus.ts deleted file mode 100644 index 8c3a2a7..0000000 --- a/examples/the-expense/src/account/shell/account.eventBus.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { RouteHandlerMap } from '@nimbus/core'; -import { AccountAddedEvent } from '../core/events/accountAdded.ts'; -import { accountAddedHandler } from './events/accountAdded.handler.ts'; - -export const accountEventSubscriptions: RouteHandlerMap = { - 'account.added': { - handler: accountAddedHandler, - inputType: AccountAddedEvent, - }, -}; diff --git a/examples/the-expense/src/account/shell/account.repository.ts b/examples/the-expense/src/account/shell/account.repository.ts deleted file mode 100644 index 6e31438..0000000 --- a/examples/the-expense/src/account/shell/account.repository.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { MongoDBRepository } from '@nimbus/mongodb'; -import { getEnv } from '@nimbus/utils'; -import { Document, ObjectId } from 'mongodb'; -import { mongoManager } from '../../mongodb.ts'; -import { Account } from '../core/account.type.ts'; -import { ACCOUNT_COLLECTION } from './account.collection.ts'; - -class AccountRepository extends MongoDBRepository { - constructor() { - const env = getEnv({ variables: ['MONGO_DB'] }); - - super( - () => { - return mongoManager.getCollection( - env.MONGO_DB, - ACCOUNT_COLLECTION.name, - ); - }, - Account, - 'Account', - ); - } - - override _mapDocumentToEntity(doc: Document): Account { - return Account.parse({ - _id: doc._id.toString(), - name: doc.name, - status: doc.status, - }); - } - - override _mapEntityToDocument(client: Account): Document { - return { - _id: new ObjectId(client._id), - name: client.name, - status: client.status, - }; - } -} - -export const accountRepository = new AccountRepository(); diff --git a/examples/the-expense/src/account/shell/account.router.ts b/examples/the-expense/src/account/shell/account.router.ts deleted file mode 100644 index e69d44b..0000000 --- a/examples/the-expense/src/account/shell/account.router.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { NimbusOakRouter } from '@nimbus/oak'; -import { AddAccountCommand } from '../core/commands/addAccount.ts'; -import { DeleteAccountCommand } from '../core/commands/deleteAccount.ts'; -import { GetAccountQuery } from '../core/queries/getAccount.ts'; -import { ListAccountsQuery } from '../core/queries/listAccounts.ts'; -import { addAccountHandler } from './commands/addAccount.handler.ts'; -import { deleteAccountHandler } from './commands/deleteAccount.handler.ts'; -import { getAccountHandler } from './queries/getAccount.handler.ts'; -import { listAccountsHandler } from './queries/listAccounts.handler.ts'; - -export const accountRouter = new NimbusOakRouter(); - -accountRouter.query( - '/', - 'account.list', - ListAccountsQuery, - listAccountsHandler, -); - -accountRouter.query( - '/:id', - 'account.get', - GetAccountQuery, - getAccountHandler, -); - -accountRouter.command( - '/add-account', - 'account.add', - AddAccountCommand, - addAccountHandler, -); - -accountRouter.command( - '/delete-account', - 'account.delete', - DeleteAccountCommand, - deleteAccountHandler, -); diff --git a/examples/the-expense/src/account/shell/commands/addAccount.handler.ts b/examples/the-expense/src/account/shell/commands/addAccount.handler.ts deleted file mode 100644 index 1d743b8..0000000 --- a/examples/the-expense/src/account/shell/commands/addAccount.handler.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { InvalidInputException, type RouteHandler } from '@nimbus/core'; -import { ulid } from '@std/ulid'; -import { eventBus } from '../../../eventBus.ts'; -import { Account } from '../../core/account.type.ts'; -import { - addAccount, - AddAccountCommand, -} from '../../core/commands/addAccount.ts'; -import { AccountAddedEvent } from '../../core/events/accountAdded.ts'; -import { accountRepository } from '../account.repository.ts'; - -export const addAccountHandler: RouteHandler = async ( - command: AddAccountCommand, -) => { - let account = addAccount( - command.data.payload, - command.data.authContext, - ); - - try { - account = await accountRepository.insertOne({ item: account }); - } catch (error: any) { - if (error.message.startsWith('E11000')) { - throw new InvalidInputException( - 'Account already exists', - { - errorCode: 'ACCOUNT_ALREADY_EXISTS', - reason: 'An account with the same name already exists', - }, - ); - } - - throw error; - } - - eventBus.putEvent({ - specversion: '1.0', - id: ulid(), - source: command.source, - type: 'account.added', - data: { - correlationId: command.data.correlationId, - payload: { - account: account, - }, - }, - }); - - return { - statusCode: 200, - data: account, - }; -}; diff --git a/examples/the-expense/src/account/shell/commands/deleteAccount.handler.ts b/examples/the-expense/src/account/shell/commands/deleteAccount.handler.ts deleted file mode 100644 index 6fa0e4b..0000000 --- a/examples/the-expense/src/account/shell/commands/deleteAccount.handler.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { type RouteHandler } from '@nimbus/core'; -import { ObjectId } from 'mongodb'; -import { - deleteAccount, - DeleteAccountCommand, -} from '../../core/commands/deleteAccount.ts'; -import { accountRepository } from '../account.repository.ts'; - -export const deleteAccountHandler: RouteHandler = async ( - command: DeleteAccountCommand, -) => { - let account = await accountRepository.findOne({ - filter: { - _id: new ObjectId(command.data.payload._id), - }, - }); - - account = deleteAccount(account, command.data.authContext); - - await accountRepository.deleteOne({ item: account }); - - return { - statusCode: 204, - }; -}; diff --git a/examples/the-expense/src/account/shell/events/accountAdded.handler.ts b/examples/the-expense/src/account/shell/events/accountAdded.handler.ts deleted file mode 100644 index bab3e39..0000000 --- a/examples/the-expense/src/account/shell/events/accountAdded.handler.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { getLogger, RouteHandler } from '@nimbus/core'; -import { - AccountAddedData, - AccountAddedEvent, -} from '../../core/events/accountAdded.ts'; - -export const accountAddedHandler: RouteHandler< - AccountAddedEvent, - AccountAddedData -> = async ( - event, -) => { - await new Promise((resolve) => setTimeout(resolve, 1000)); - - getLogger().info({ - message: `New account was added: ${event.data.payload.account.name}`, - }); - - return { - statusCode: 200, - data: event.data.payload, - }; -}; diff --git a/examples/the-expense/src/account/shell/queries/getAccount.handler.ts b/examples/the-expense/src/account/shell/queries/getAccount.handler.ts deleted file mode 100644 index d5a8eee..0000000 --- a/examples/the-expense/src/account/shell/queries/getAccount.handler.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { RouteHandler } from '@nimbus/core'; -import { ObjectId } from 'mongodb'; -import { Account } from '../../core/account.type.ts'; -import { getAccount, GetAccountQuery } from '../../core/queries/getAccount.ts'; -import { accountRepository } from '../account.repository.ts'; - -export const getAccountHandler: RouteHandler< - GetAccountQuery, - Account -> = async (query) => { - let account = await accountRepository.findOne({ - filter: { _id: new ObjectId(query.data.payload.id) }, - }); - - account = getAccount(account, query.data.authContext); - - return { - statusCode: 200, - data: account, - }; -}; diff --git a/examples/the-expense/src/account/shell/queries/listAccounts.handler.ts b/examples/the-expense/src/account/shell/queries/listAccounts.handler.ts deleted file mode 100644 index 34cb18f..0000000 --- a/examples/the-expense/src/account/shell/queries/listAccounts.handler.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { RouteHandler } from '@nimbus/core'; -import { MongoJSON } from '@nimbus/mongodb'; -import type { WithPagination } from '../../../shared/withPagination.type.ts'; -import { Account } from '../../core/account.type.ts'; -import { - listAccounts, - ListAccountsQuery, -} from '../../core/queries/listAccounts.ts'; -import { accountRepository } from '../account.repository.ts'; - -export const listAccountsHandler: RouteHandler< - ListAccountsQuery, - WithPagination -> = async (query) => { - const params = query.data.payload; - const limit = parseInt(params.limit ?? '24'); - const skip = parseInt(params.skip ?? '0'); - const filter = MongoJSON.parse(params.filter ?? '{}'); - - let [accounts, total] = await Promise.all([ - accountRepository.find({ - filter, - limit, - skip, - sort: { - [params.sortBy ?? 'createdAt']: params.sortDir ?? - 'asc', - }, - }), - - accountRepository.countDocuments({ - filter, - }), - ]); - - accounts = listAccounts(accounts, query.data.authContext); - - return { - statusCode: 200, - data: { - limit, - skip, - total, - items: accounts, - }, - }; -}; diff --git a/examples/the-expense/src/auth/shell/auth.middleware.ts b/examples/the-expense/src/auth/shell/auth.middleware.ts deleted file mode 100644 index fa8bcb8..0000000 --- a/examples/the-expense/src/auth/shell/auth.middleware.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { AuthContext, getLogger } from '@nimbus/core'; -import type { Context } from '@oak/oak/context'; -import type { Next } from '@oak/oak/middleware'; - -/** - * ! NOT FOR PRODUCTION USE - * - * This is just a simple example of how to implement a middleware for authentication. - */ -export const exampleAuthMiddleware = async ( - ctx: Context, - next: Next, -) => { - const authorization = ctx.request.headers.get('authorization'); - - if (!authorization) { - const anonymousAuthContext: AuthContext = { - sub: 'anonymous', - groups: [], - }; - - ctx.state.authContext = anonymousAuthContext; - - await next(); - } else { - try { - const token = authorization?.replace('Bearer ', ''); - - if (token === 'very-special-secret') { - const adminAuthContext: AuthContext = { - sub: '02e50464-b051-70fa-25ef-63038890d80c', - groups: ['admin'], - }; - - ctx.state.authContext = adminAuthContext; - } else { - throw new Error('Invalid token'); - } - - await next(); - } catch (error: any) { - getLogger().error({ - message: 'Failed to authenticate user', - error, - }); - - ctx.response.status = 401; - ctx.response.body = { - message: 'Unauthorized', - }; - } - } -}; diff --git a/examples/the-expense/src/eventBus.ts b/examples/the-expense/src/eventBus.ts deleted file mode 100644 index d24f6a2..0000000 --- a/examples/the-expense/src/eventBus.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { NimbusEventBus, RouteHandlerMap } from '@nimbus/core'; -import { accountEventSubscriptions } from './account/shell/account.eventBus.ts'; - -export const eventBus = new NimbusEventBus({ - maxRetries: 3, -}); - -export const initEventBusSubscriptions = () => { - const subscriptions: Record = { - account: accountEventSubscriptions, - }; - - for (const [, handlerMap] of Object.entries(subscriptions)) { - for (const eventName of Object.keys(handlerMap)) { - eventBus.subscribeEvent( - eventName, - handlerMap[eventName].inputType, - handlerMap[eventName].handler, - ); - } - } -}; diff --git a/examples/the-expense/src/main.ts b/examples/the-expense/src/main.ts deleted file mode 100644 index fef2fd8..0000000 --- a/examples/the-expense/src/main.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { - jsonLogFormatter, - parseLogLevel, - prettyLogFormatter, - setupLogger, -} from '@nimbus/core'; -import { requestCorrelationId } from '@nimbus/oak'; -import { Application } from '@oak/oak/application'; -import { oakCors } from '@tajpouria/cors'; -import 'jsr:@std/dotenv/load'; -import process from 'node:process'; -import { exampleAuthMiddleware } from './auth/shell/auth.middleware.ts'; -import { initEventBusSubscriptions } from './eventBus.ts'; -import { initMongoConnectionManager } from './mongodb.ts'; -import { router } from './router.ts'; - -// -// Setup logging with basic options provided by Nimbus -// -// See https://nimbus.overlap.at/guide/logging.html for more information about logging of Nimbus. -// -setupLogger({ - logLevel: parseLogLevel(process.env.LOG_LEVEL), - formatter: process.env.LOG_FORMAT === 'pretty' - ? prettyLogFormatter - : jsonLogFormatter, - useConsoleColors: process.env.LOG_FORMAT === 'pretty', -}); - -// Initialize MongoDB Manager -initMongoConnectionManager(); - -// Initialize Event Bus Subscriptions -initEventBusSubscriptions(); - -// Oak HTTP Server APP -const app = new Application(); - -app.addEventListener('listen', ({ hostname, port, secure }) => { - console.log( - `Listening on: ${secure ? 'https://' : 'http://'}${ - hostname ?? 'localhost' - }:${port}`, - ); -}); - -// CORS Middleware -app.use(oakCors()); - -// Correlation ID Middleware -app.use(requestCorrelationId); - -// Auth Middleware -app.use(exampleAuthMiddleware); - -// API Routes -app.use(router.routes()); -app.use(router.allowedMethods()); - -// Get the server started -app.listen({ port: 3100 }); diff --git a/examples/the-expense/src/mongodb.ts b/examples/the-expense/src/mongodb.ts deleted file mode 100644 index 5f81ced..0000000 --- a/examples/the-expense/src/mongodb.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { MongoConnectionManager } from '@nimbus/mongodb'; -import { ServerApiVersion } from 'mongodb'; -import process from 'node:process'; - -export let mongoManager: MongoConnectionManager; - -export const initMongoConnectionManager = () => { - mongoManager = MongoConnectionManager.getInstance( - process.env['MONGO_URL'] ?? '', - { - connectionTimeout: 1000 * 60 * 5, - mongoClientOptions: { - appName: 'the-expanse', - serverApi: { - version: ServerApiVersion.v1, - strict: false, - deprecationErrors: true, - }, - maxPoolSize: 10, - minPoolSize: 0, - maxIdleTimeMS: 1000 * 60 * 1, // 1 minutes idle timeout - connectTimeoutMS: 1000 * 15, // 15 seconds connection timeout - socketTimeoutMS: 1000 * 30, // 30 seconds socket timeout - }, - }, - ); - - // Check to see if the MongoDB connection can be cleaned up - // This is to prevent the MongoDB connection from being left open for too long - setInterval(() => { - mongoManager.cleanup().catch(console.error); - }, 1000 * 60); // Check every minute -}; diff --git a/examples/the-expense/src/router.ts b/examples/the-expense/src/router.ts deleted file mode 100644 index fb1c189..0000000 --- a/examples/the-expense/src/router.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { getLogger } from '@nimbus/core'; -import { NimbusOakRouter } from '@nimbus/oak'; -import { accountRouter } from './account/shell/account.router.ts'; -import { mongoManager } from './mongodb.ts'; - -export const router = new NimbusOakRouter(); - -router.get('/health', async (ctx) => { - const logger = getLogger(); - const now = new Date().toISOString(); - - const mongoHealth = await mongoManager.healthCheck(); - - logger.info({ - message: 'Health check', - data: { - time: now, - database: { ...mongoHealth }, - ...(ctx.state.correlationId - ? { correlationId: ctx.state.correlationId } - : {}), - ...(ctx.state.authContext - ? { authContext: ctx.state.authContext } - : {}), - }, - }); - - ctx.response.body = { - status: mongoHealth.status === 'healthy' ? 'OK' : 'ERROR', - http: { - status: 'healthy', - }, - database: { ...mongoHealth }, - ...(ctx.state.correlationId - ? { correlationId: ctx.state.correlationId } - : {}), - time: now, - }; -}); - -router.use( - '/accounts', - accountRouter.routes(), - accountRouter.allowedMethods(), -); diff --git a/examples/the-expense/src/shared/withPagination.type.ts b/examples/the-expense/src/shared/withPagination.type.ts deleted file mode 100644 index f069ae2..0000000 --- a/examples/the-expense/src/shared/withPagination.type.ts +++ /dev/null @@ -1,6 +0,0 @@ -export type WithPagination = { - limit: number; - skip: number; - total: number; - items: TItems[]; -}; diff --git a/packages/core/README.md b/packages/core/README.md index 91635a3..d08f588 100644 --- a/packages/core/README.md +++ b/packages/core/README.md @@ -18,106 +18,25 @@ For detailed documentation, please refer to the [Nimbus documentation](https://n ## Command ```typescript -import { AuthContext, Command } from "@nimbus/core"; -import { z } from "zod"; - -export const AddAccountData = z.object({ - name: z.string(), -}); -export type AddAccountData = z.infer; - -export const AddAccountCommand = Command( - z.literal("account.add"), - AddAccountData, - AuthContext -); -export type AddAccountCommand = z.infer; + ``` ## Query ```typescript -import { AuthContext, Query } from "@nimbus/core"; -import { z } from "zod"; - -export const GetAccountQuery = Query( - z.literal("account.get"), - z.object({ - id: z.string().length(24), - }), - AuthContext -); -export type GetAccountQuery = z.infer; + ``` ## Event ```typescript -import { Event } from "@nimbus/core"; -import { z } from "zod"; -import { Account } from "../account.type.ts"; - -export const AccountAddedData = z.object({ - account: Account, -}); -export type AccountAddedData = z.infer; - -export const AccountAddedEvent = Event( - z.literal("account.added"), - AccountAddedData -); -export type AccountAddedEvent = z.infer; + ``` ## Router ```typescript -import { createRouter } from "@nimbus/core"; - -// ... - -const accountRouter = createRouter({ - handlerMap: { - "account.get": { - handler: getAccountHandler, - inputType: GetAccountQuery, - }, - "account.add": { - handler: addAccountHandler, - inputType: AddAccountCommand, - }, - }, -}); -``` - -## EventBus -```typescript -import { NimbusEventBus } from "@nimbus/core"; - -// ... - -export const eventBus = new NimbusEventBus({ - maxRetries: 3, - retryDelay: 3000, -}); - -eventBus.subscribeEvent( - "account.added", - AccountAddedEvent, - accountAddedHandler -); - -eventBus.putEvent({ - specversion: "1.0", - id: "123", - source: command.source, - type: "account.added", - data: { - correlationId: command.metadata.correlationId, - payload: { account: account }, - }, -}); ``` # License diff --git a/packages/core/deno.json b/packages/core/deno.json index 28ad3dc..d735a70 100644 --- a/packages/core/deno.json +++ b/packages/core/deno.json @@ -13,7 +13,9 @@ "homepage": "https://nimbus.overlap.at", "exports": "./src/index.ts", "fmt": { - "include": ["src/"], + "include": [ + "src/" + ], "useTabs": false, "lineWidth": 80, "indentWidth": 4, @@ -22,17 +24,25 @@ "proseWrap": "always" }, "lint": { - "include": ["src/"], + "include": [ + "src/" + ], "rules": { - "exclude": ["no-explicit-any", "no-slow-types"] + "exclude": [ + "no-explicit-any" + ] } }, "test": { - "include": ["src/"] + "include": [ + "src/" + ] }, "imports": { + "@opentelemetry/api": "npm:@opentelemetry/api@^1.9.0", "@std/assert": "jsr:@std/assert@^1.0.10", "@std/fmt": "jsr:@std/fmt@^1.0.5", - "zod": "npm:zod@^3.24.1" + "@std/ulid": "jsr:@std/ulid@^1.0.0", + "zod": "npm:zod@^4.3.5" } -} +} \ No newline at end of file diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index cdf9ef0..1f5c229 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,10 +1,17 @@ -export * from './lib/authContext.ts'; -export * from './lib/cloudEvent/cloudEvent.ts'; -export * from './lib/command/command.ts'; -export * from './lib/event/event.ts'; -export * from './lib/event/eventBus.ts'; -export * from './lib/exception/index.ts'; -export * from './lib/log/index.ts'; -export * from './lib/messageEnvelope.ts'; -export * from './lib/query/query.ts'; -export * from './lib/router/index.ts'; +export * from './lib/eventBus/eventBus.ts'; +export * from './lib/exception/exception.ts'; +export * from './lib/exception/forbiddenException.ts'; +export * from './lib/exception/genericException.ts'; +export * from './lib/exception/invalidInputException.ts'; +export * from './lib/exception/notFoundException.ts'; +export * from './lib/exception/unauthorizedException.ts'; +export * from './lib/log/logFormatter.ts'; +export * from './lib/log/logger.ts'; +export * from './lib/log/logLevel.ts'; +export * from './lib/log/options.ts'; +export * from './lib/message/command.ts'; +export * from './lib/message/event.ts'; +export * from './lib/message/message.ts'; +export * from './lib/message/query.ts'; +export * from './lib/message/router.ts'; +export * from './lib/tracing/withSpan.ts'; diff --git a/packages/core/src/lib/authContext.ts b/packages/core/src/lib/authContext.ts deleted file mode 100644 index cff9798..0000000 --- a/packages/core/src/lib/authContext.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { z } from 'zod'; - -/** - * Zod schema for the AuthContext. - * - * This is a default AuthContext to store some basic information - * about a user triggering a command, query or event. - * - * Feel free to define and use your own AuthContext with more detailed - * information or a policy attached to handle access control. - */ -export const AuthContext = z.object({ - sub: z.string(), - groups: z.array(z.string()), -}); - -/** - * The AuthContext type. - */ -export type AuthContext = z.infer; diff --git a/packages/core/src/lib/cloudEvent/absoluteUri.ts b/packages/core/src/lib/cloudEvent/absoluteUri.ts deleted file mode 100644 index 29c8e4b..0000000 --- a/packages/core/src/lib/cloudEvent/absoluteUri.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { z } from 'zod'; - -/** - * Validation for absolute URIs - * based on the RFC 3986 specification. - */ -export const absoluteUri = z.string().refine((value) => { - if (!value || value.length === 0) { - return false; - } - - try { - new URL(value); - return true; - } catch { - return false; - } -}, { - message: - 'Must be a valid absolute URI according to RFC 3986. See https://datatracker.ietf.org/doc/html/rfc3986#section-4.3', -}); diff --git a/packages/core/src/lib/cloudEvent/cloudEvent.ts b/packages/core/src/lib/cloudEvent/cloudEvent.ts deleted file mode 100644 index ae412af..0000000 --- a/packages/core/src/lib/cloudEvent/cloudEvent.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { z, type ZodType } from 'zod'; -import { absoluteUri } from '../cloudEvent/absoluteUri.ts'; -import { mediaType } from '../cloudEvent/mediaType.ts'; -import { uriReference } from '../cloudEvent/uriReference.ts'; -import { timeRFC3339 } from './timeRFC3339.ts'; - -// TODO: fix slow type issue - -/** - * Zod schema for the CloudEvent object. - * - * Nimbus respects the CloudEvents specifications - * for messages like commands, queries and events. - * - * https://cloudevents.io/ - */ -export const CloudEvent = < - TType extends ZodType, - TData extends ZodType, ->( - typeType: TType, - dataType: TData, -) => { - return z.object({ - specversion: z.literal('1.0'), - id: z.string().min(1), - source: uriReference, - type: typeType, - data: dataType, - subject: z.string().min(1).optional(), - time: timeRFC3339.optional(), - datacontenttype: mediaType.optional(), - dataschema: absoluteUri.optional(), - }); -}; - -/** - * Inference type to create the CloudEvent type. - */ -type CloudEventType< - TType extends ZodType, - TData extends ZodType, -> = ReturnType>; - -/** - * The type of the CloudEvent object. - * - * Nimbus respects the CloudEvents specifications - * for messages like commands, queries and events. - * - * https://cloudevents.io/ - */ -export type CloudEvent = z.infer< - CloudEventType, ZodType> ->; diff --git a/packages/core/src/lib/cloudEvent/mediaType.ts b/packages/core/src/lib/cloudEvent/mediaType.ts deleted file mode 100644 index 3fdab2c..0000000 --- a/packages/core/src/lib/cloudEvent/mediaType.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { z } from 'zod'; - -/** - * MIME Media Type validation schema - * Validates media types according to RFC 2046 specification - * which defines the structure of MIME media types. - * - * Format: type/subtype[; parameter=value] - * Examples: - * - application/json - * - text/plain - * - text/plain; charset=utf-8 - * - application/cloudevents+json - * - multipart/form-data; boundary=something - */ -export const mediaType = z.string().refine((value) => { - if (!value || value.length === 0) { - return false; - } - - // RFC 2046 media type regex pattern - // Matches: type/subtype with optional parameters - // type = discrete-type / composite-type / extension-token - // subtype = extension-token - // parameter = attribute "=" value - const mediaTypeRegex = - /^([a-zA-Z][a-zA-Z0-9][a-zA-Z0-9!#$&\-^_]*|[xX]-[a-zA-Z0-9][a-zA-Z0-9!#$&\-^_]*)\/([a-zA-Z0-9][a-zA-Z0-9!#$&\-^_]*|[xX]-[a-zA-Z0-9][a-zA-Z0-9!#$&\-^_]*)(\s*;\s*[a-zA-Z0-9][a-zA-Z0-9!#$&\-^_]*\s*=\s*([a-zA-Z0-9!#$&\-^_]+|"[^"]*"))*$/; - - if (!mediaTypeRegex.test(value)) { - return false; - } - - // Split type and subtype - const parts = value.split('/'); - if (parts.length < 2) { - return false; - } - - const [type, subtypeAndParams] = parts; - const subtype = subtypeAndParams.split(';')[0].trim(); - - // Validate known discrete types - const discreteTypes = ['text', 'image', 'audio', 'video', 'application']; - const compositeTypes = ['message', 'multipart']; - const knownTypes = [...discreteTypes, ...compositeTypes]; - - // Allow extension types (starting with x- or X-) or known types - const isValidType = knownTypes.includes(type.toLowerCase()) || - /^[xX]-/.test(type); - - return isValidType && subtype.length > 0; -}, { - message: - 'Must be a valid MIME media type (e.g., "application/json", "text/plain; charset=utf-8"). See https://datatracker.ietf.org/doc/html/rfc2046', -}); diff --git a/packages/core/src/lib/cloudEvent/timeRFC3339.ts b/packages/core/src/lib/cloudEvent/timeRFC3339.ts deleted file mode 100644 index 98263ba..0000000 --- a/packages/core/src/lib/cloudEvent/timeRFC3339.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { z } from 'zod'; - -/** - * RFC 3339 date-time validation schema - * Validates timestamps according to RFC 3339 specification - * which is a profile of ISO 8601 for Internet protocols. - * - * Format: YYYY-MM-DDTHH:MM:SSZ or YYYY-MM-DDTHH:MM:SS±HH:MM - * Examples: - * - 2018-04-05T17:31:00Z - * - 2018-04-05T17:31:00.123Z - * - 2018-04-05T17:31:00+01:00 - * - 2018-04-05T17:31:00.123-05:00 - */ -export const timeRFC3339 = z.string().refine((value) => { - if (!value || value.length === 0) { - return false; - } - - // RFC 3339 regex pattern - // Matches: YYYY-MM-DDTHH:MM:SS[.fff]Z or YYYY-MM-DDTHH:MM:SS[.fff]±HH:MM - const rfc3339Regex = - /^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d{1,9})?(Z|[+-]\d{2}:\d{2})$/; - - if (!rfc3339Regex.test(value)) { - return false; - } - - // Additional validation using Date constructor to catch invalid dates - // Check if the date is valid (this catches cases like February 30th) - try { - const date = new Date(value); - return !isNaN(date.getTime()); - } catch { - return false; - } -}, { - message: - 'Must be a valid RFC 3339 timestamp (e.g., "2018-04-05T17:31:00Z" or "2018-04-05T17:31:00+01:00"). See https://datatracker.ietf.org/doc/html/rfc3339', -}); diff --git a/packages/core/src/lib/cloudEvent/uriReference.ts b/packages/core/src/lib/cloudEvent/uriReference.ts deleted file mode 100644 index a295f87..0000000 --- a/packages/core/src/lib/cloudEvent/uriReference.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { z } from 'zod'; - -/** - * URI-reference validation schema - * Validates both absolute URIs and relative references - * based on the RFC 3986 specification. - */ -export const uriReference = z.string().refine((value) => { - if (!value || value.length === 0) { - return false; - } - - try { - new URL(value); - return true; - } catch { - const relativeUriReferenceRegex = - /^([a-zA-Z][a-zA-Z0-9+.-]*:)?\/\/[^\s]*$|^[^\s]*$/; - return relativeUriReferenceRegex.test(value) && value.length > 0; - } -}, { - message: - 'Must be a valid URI-reference according to RFC 3986. See https://datatracker.ietf.org/doc/html/rfc3986#section-4.1', -}); diff --git a/packages/core/src/lib/command/command.ts b/packages/core/src/lib/command/command.ts deleted file mode 100644 index 4c9f9d7..0000000 --- a/packages/core/src/lib/command/command.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type { z, ZodType } from 'zod'; -import { CloudEvent } from '../cloudEvent/cloudEvent.ts'; -import { MessageEnvelope } from '../messageEnvelope.ts'; - -// TODO: fix slow type issue - -/** - * Zod schema for the Command object. - */ -export const Command = < - TType extends ZodType, - TData extends ZodType, - TAuthContext extends ZodType, ->( - typeType: TType, - dataType: TData, - authContextType: TAuthContext, -) => { - return CloudEvent( - typeType, - MessageEnvelope(dataType, authContextType), - ); -}; - -/** - * Inference type to create the Command type. - */ -type CommandType< - TType extends ZodType, - TData extends ZodType, - TAuthContext extends ZodType, -> = ReturnType>; - -/** - * The type of the Command object. - */ -export type Command = z.infer< - CommandType, ZodType, ZodType> ->; diff --git a/packages/core/src/lib/event/event.ts b/packages/core/src/lib/event/event.ts deleted file mode 100644 index 21b5752..0000000 --- a/packages/core/src/lib/event/event.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { z, type ZodType } from 'zod'; -import { CloudEvent } from '../cloudEvent/cloudEvent.ts'; -import { MessageEnvelope } from '../messageEnvelope.ts'; - -// TODO: fix slow type issue - -/** - * Zod schema for the Event object. - */ -export const Event = < - TType extends ZodType, - TData extends ZodType, ->( - typeType: TType, - dataType: TData, -) => { - return CloudEvent( - typeType, - MessageEnvelope(dataType, z.never()), - ); -}; - -/** - * Inference type to create the Event type. - */ -type EventType< - TType extends ZodType, - TData extends ZodType, -> = ReturnType>; - -/** - * The type of the Event object. - */ -export type Event = z.infer< - EventType, ZodType> ->; diff --git a/packages/core/src/lib/event/eventBus.test.ts b/packages/core/src/lib/event/eventBus.test.ts deleted file mode 100644 index 2c95fae..0000000 --- a/packages/core/src/lib/event/eventBus.test.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { assertInstanceOf } from '@std/assert'; -import { GenericException } from '../exception/genericException.ts'; -import type { Event } from './event.ts'; -import { NimbusEventBus } from './eventBus.ts'; - -Deno.test('EventBus rejects event that exceeds the 64KB size limit', () => { - const eventBus = new NimbusEventBus({ - maxRetries: 3, - }); - - const event: Event = { - specversion: '1.0', - id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'oversized.event', - data: { - correlationId: '123', - payload: { - bigData: 'x'.repeat(65 * 1024), - }, - }, - }; - - let exception: any; - try { - eventBus.putEvent(event); - } catch (ex: any) { - exception = ex; - } - - assertInstanceOf(exception, GenericException); -}); diff --git a/packages/core/src/lib/event/eventBus.ts b/packages/core/src/lib/event/eventBus.ts deleted file mode 100644 index 047941d..0000000 --- a/packages/core/src/lib/event/eventBus.ts +++ /dev/null @@ -1,241 +0,0 @@ -import { - createRouter, - GenericException, - getLogger, - type RouteHandler, - type Router, -} from '@nimbus/core'; -import EventEmitter from 'node:events'; -import type { ZodType } from 'zod'; -import type { CloudEvent } from '../cloudEvent/cloudEvent.ts'; - -export type NimbusEventBusOptions = { - maxRetries?: number; - retryDelay?: number; -}; - -/** - * The NimbusEventBus is used to publish and - * subscribe to events within the application. - * - * @example - * ```ts - * export const eventBus = new NimbusEventBus({ - * maxRetries: 3, - * retryDelay: 3000, - * }); - * - * eventBus.subscribeEvent( - * 'account.added', - * AccountAddedEvent, - * accountAddedHandler, - * ); - * - * eventBus.putEvent({ - * specversion: '1.0', - * id: '123', - * source: 'https://nimbus.overlap.at/account/add-account', - * type: 'account.added', - * data: { - * correlationId: command.metadata.correlationId, - * payload: { account: account }, - * }, - * }); - * ``` - */ -export class NimbusEventBus { - private _eventEmitter: EventEmitter; - private _maxRetries: number; - private _retryDelay: number; - - /** - * Create a new NimbusEventBus instance. - * - * @param {NimbusEventBusOptions} [options] - The options for the event bus. - * @param {number} [options.maxRetries] - The maximum number of retries for handling the event in case of an error. - * @param {number} [options.retryDelay] - The delay between retries in milliseconds. - * - * @example - * ```ts - * const eventBus = new NimbusEventBus({ - * maxRetries: 3, - * retryDelay: 3000, - * }); - * ``` - */ - constructor(options?: NimbusEventBusOptions) { - this._eventEmitter = new EventEmitter(); - - this._maxRetries = options?.maxRetries ?? 2; - this._retryDelay = options?.retryDelay ?? 1000; - } - - /** - * Publish an event to the event bus. - * - * @param event - The event to send to the event bus. - * - * @example - * ```ts - * eventBus.putEvent({ - * specversion: '1.0', - * id: '123', - * source: 'https://nimbus.overlap.at/api/account/add', - * type: 'account.added', - * data: { - * correlationId: command.metadata.correlationId, - * payload: { account: account }, - * }, - * }); - * ``` - */ - public putEvent>( - event: TEvent, - ): void { - this._validateEventSize(event); - - this._eventEmitter.emit(event.type, event); - } - - /** - * Subscribe to an event. - * - * @param {string} eventType - The type of event to subscribe to. - * @param {ZodType} eventSchema - The schema used for validation of the event to subscribe to. - * @param {RouteHandler} handler - The handler to call when the event got published. - * @param {Function} [onError] - The function to call when the event could not be handled after the maximum number of retries. - * @param {NimbusEventBusOptions} [options] - The options for the event bus. - * @param {number} [options.maxRetries] - The maximum number of retries for handling the event in case of an error. - * @param {number} [options.retryDelay] - The delay between retries in milliseconds. - * - * @example - * ```ts - * eventBus.subscribeEvent( - * 'account.added', - * AccountAddedEvent, - * accountAddedHandler, - * ); - * ``` - */ - public subscribeEvent( - eventType: string, - eventSchema: ZodType, - handler: RouteHandler, - onError?: (error: any, event: CloudEvent) => void, - options?: NimbusEventBusOptions, - ): void { - getLogger().info({ - category: 'Nimbus', - message: `Subscribed to ${eventType} event`, - }); - - const maxRetries = options?.maxRetries ?? this._maxRetries; - const retryDelay = options?.retryDelay ?? this._retryDelay; - - const nimbusRouter = createRouter({ - handlerMap: { - [eventType]: { - handler, - inputType: eventSchema, - }, - }, - inputLogFunc: this._logInput, - }); - - const handleEvent = async (event: CloudEvent) => { - try { - await this._processEvent( - nimbusRouter, - event, - maxRetries, - retryDelay, - ); - } catch (error: any) { - if (onError) { - onError(error, event); - } else { - getLogger().error({ - category: 'Nimbus', - message: error.message, - error, - }); - } - } - }; - - this._eventEmitter.on(eventType, handleEvent); - } - - private _logInput(input: any) { - getLogger().info({ - category: 'Nimbus', - ...(input?.data?.correlationId && { - correlationId: input?.data?.correlationId, - }), - message: - `${input?.data?.correlationId} - [Event] ${input?.type} from ${input?.source}`, - }); - } - - private async _processEvent( - nimbusRouter: Router, - event: CloudEvent, - maxRetries: number, - retryDelay: number, - ) { - let attempt = -1; - - while (attempt < maxRetries) { - try { - await nimbusRouter(event); - break; - } catch (error: any) { - attempt++; - - if (attempt >= maxRetries) { - const exception = new GenericException( - `Failed to handle event: ${event.type} from ${event.source}`, - { - retryAttempts: maxRetries, - retryDelay: retryDelay, - }, - ); - - if (error.stack) { - exception.stack = error.stack; - } - - throw exception; - } - - await new Promise((resolve) => setTimeout(resolve, retryDelay)); - } - } - } - - /** - * Validate the size of the event. - * - * To comply with the CloudEvent spec a transmitted event - * can not have a maximum size of 64KB. - * - * @param event - The event to validate. - */ - private _validateEventSize(event: CloudEvent): void { - const eventJson = JSON.stringify(event); - const eventSizeBytes = new TextEncoder().encode(eventJson).length; - const maxSizeBytes = 64 * 1024; // 64KB - - if (eventSizeBytes > maxSizeBytes) { - throw new GenericException( - `Event size exceeds the limit of 64KB`, - { - eventType: event.type, - eventSource: event.source, - eventSizeBytes, - maxSizeBytes, - }, - ); - } - } -} diff --git a/packages/core/src/lib/eventBus/eventBus.test.ts b/packages/core/src/lib/eventBus/eventBus.test.ts new file mode 100644 index 0000000..c94172f --- /dev/null +++ b/packages/core/src/lib/eventBus/eventBus.test.ts @@ -0,0 +1,326 @@ +import { assertEquals, assertExists, assertInstanceOf } from '@std/assert'; +import { GenericException } from '../exception/genericException.ts'; +import type { Event } from '../message/event.ts'; +import { getEventBus, NimbusEventBus, setupEventBus } from './eventBus.ts'; + +/** + * Helper function to create a valid test event. + */ +const createTestEvent = ( + type: string, + data?: Record, +): Event => ({ + specversion: '1.0', + id: crypto.randomUUID(), + correlationid: crypto.randomUUID(), + time: new Date().toISOString(), + source: 'https://test.nimbus.overlap.at', + type, + subject: '/test', + data: data ?? { testData: 'value' }, +}); + +Deno.test('EventBus rejects event that exceeds the 64KB size limit', () => { + const eventBus = new NimbusEventBus({ + name: 'size-test', + maxRetries: 3, + }); + + const event: Event = { + specversion: '1.0', + id: '123', + correlationid: '456', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.test-event', + subject: '/test', + data: { + bigData: 'x'.repeat(65 * 1024), + }, + }; + + let exception: any; + try { + eventBus.putEvent(event); + } catch (ex: any) { + exception = ex; + } + + assertInstanceOf(exception, GenericException); + assertEquals(exception.message, 'Event size exceeds the limit of 64KB'); +}); + +Deno.test('EventBus delivers event to subscriber', async () => { + const eventBus = new NimbusEventBus({ name: 'deliver-test' }); + let receivedEvent: Event | undefined; + + eventBus.subscribeEvent({ + type: 'test.event.deliver', + handler: (event) => { + receivedEvent = event; + return Promise.resolve(); + }, + }); + + const testEvent = createTestEvent('test.event.deliver', { + message: 'hello', + }); + eventBus.putEvent(testEvent); + + // Wait for async handler to complete + await new Promise((r) => setTimeout(r, 50)); + + assertExists(receivedEvent); + assertEquals(receivedEvent.type, 'test.event.deliver'); + assertEquals( + (receivedEvent.data as Record).message, + 'hello', + ); +}); + +Deno.test('EventBus delivers event to multiple subscribers', async () => { + const eventBus = new NimbusEventBus({ name: 'multi-sub-test' }); + const receivedEvents: Event[] = []; + + // First subscriber + eventBus.subscribeEvent({ + type: 'test.event.multi', + handler: (event) => { + receivedEvents.push(event); + return Promise.resolve(); + }, + }); + + // Second subscriber + eventBus.subscribeEvent({ + type: 'test.event.multi', + handler: (event) => { + receivedEvents.push(event); + return Promise.resolve(); + }, + }); + + const testEvent = createTestEvent('test.event.multi'); + eventBus.putEvent(testEvent); + + // Wait for async handlers to complete + await new Promise((r) => setTimeout(r, 50)); + + assertEquals(receivedEvents.length, 2); + assertEquals(receivedEvents[0].id, testEvent.id); + assertEquals(receivedEvents[1].id, testEvent.id); +}); + +Deno.test('EventBus retries on handler failure and eventually succeeds', async () => { + const eventBus = new NimbusEventBus({ + name: 'retry-success-test', + maxRetries: 3, + baseDelay: 10, + maxDelay: 100, + useJitter: false, + }); + let attempts = 0; + + eventBus.subscribeEvent({ + type: 'test.event.retry-success', + handler: () => { + attempts++; + if (attempts < 3) { + return Promise.reject(new Error('Temporary failure')); + } + // Succeeds on 3rd attempt + return Promise.resolve(); + }, + }); + + const testEvent = createTestEvent('test.event.retry-success'); + eventBus.putEvent(testEvent); + + // Wait for retries to complete (base delay * 2^0 + base delay * 2^1 = 10 + 20 = 30ms + buffer) + await new Promise((r) => setTimeout(r, 200)); + + assertEquals(attempts, 3); +}); + +Deno.test('EventBus exhausts retries and invokes onError callback', async () => { + const eventBus = new NimbusEventBus({ + name: 'retry-exhausted-test', + maxRetries: 2, + baseDelay: 10, + maxDelay: 100, + useJitter: false, + }); + let attempts = 0; + let errorReceived: Error | undefined; + let eventReceived: Event | undefined; + + eventBus.subscribeEvent({ + type: 'test.event.retry-exhausted', + handler: () => { + attempts++; + return Promise.reject(new Error('Always fails')); + }, + onError: (error, event) => { + errorReceived = error; + eventReceived = event; + }, + }); + + const testEvent = createTestEvent('test.event.retry-exhausted'); + eventBus.putEvent(testEvent); + + // Wait for all retries to exhaust + await new Promise((r) => setTimeout(r, 200)); + + // Initial attempt + 2 retries = 3 total attempts + assertEquals(attempts, 3); + assertInstanceOf(errorReceived, GenericException); + assertExists(eventReceived); + assertEquals(eventReceived.id, testEvent.id); +}); + +Deno.test('EventBus onError callback receives error and event', async () => { + const eventBus = new NimbusEventBus({ + name: 'onerror-test', + maxRetries: 0, + baseDelay: 10, + }); + let errorMessage: string | undefined; + let eventType: string | undefined; + + eventBus.subscribeEvent({ + type: 'test.event.onerror', + handler: () => { + return Promise.reject(new Error('Handler error')); + }, + onError: (error, event) => { + errorMessage = error.message; + eventType = event.type; + }, + }); + + const testEvent = createTestEvent('test.event.onerror'); + eventBus.putEvent(testEvent); + + // Wait for handler to fail + await new Promise((r) => setTimeout(r, 50)); + + assertEquals( + errorMessage, + 'Failed to handle event: test.event.onerror from https://test.nimbus.overlap.at', + ); + assertEquals(eventType, 'test.event.onerror'); +}); + +Deno.test('setupEventBus creates and registers an EventBus instance', () => { + setupEventBus('test-setup-bus', { maxRetries: 5 }); + const bus = getEventBus('test-setup-bus'); + + assertInstanceOf(bus, NimbusEventBus); +}); + +Deno.test('getEventBus creates default instance if not found', () => { + const bus = getEventBus('test-new-bus-' + crypto.randomUUID()); + + assertInstanceOf(bus, NimbusEventBus); +}); + +Deno.test('getEventBus returns same instance on repeated calls', () => { + const busName = 'test-same-instance-' + crypto.randomUUID(); + const bus1 = getEventBus(busName); + const bus2 = getEventBus(busName); + + assertEquals(bus1, bus2); +}); + +Deno.test('Multiple named EventBus instances are independent', async () => { + const ordersBus = new NimbusEventBus({ name: 'orders-independent' }); + const notificationsBus = new NimbusEventBus({ + name: 'notifications-independent', + }); + + let ordersReceived = 0; + let notificationsReceived = 0; + + ordersBus.subscribeEvent({ + type: 'test.event.independent', + handler: () => { + ordersReceived++; + return Promise.resolve(); + }, + }); + + notificationsBus.subscribeEvent({ + type: 'test.event.independent', + handler: () => { + notificationsReceived++; + return Promise.resolve(); + }, + }); + + // Publish to orders bus only + ordersBus.putEvent(createTestEvent('test.event.independent')); + + await new Promise((r) => setTimeout(r, 50)); + + assertEquals(ordersReceived, 1); + assertEquals(notificationsReceived, 0); +}); + +Deno.test('EventBus logPublish callback is invoked when publishing an event', () => { + const loggedEvents: Event[] = []; + + const eventBus = new NimbusEventBus({ + name: 'log-publish-test', + logPublish: (event) => { + loggedEvents.push(event); + }, + }); + + const testEvent = createTestEvent('test.event.log-publish', { + message: 'logged', + }); + eventBus.putEvent(testEvent); + + assertEquals(loggedEvents.length, 1); + assertEquals(loggedEvents[0].id, testEvent.id); + assertEquals(loggedEvents[0].type, 'test.event.log-publish'); + assertEquals( + (loggedEvents[0].data as Record).message, + 'logged', + ); +}); + +Deno.test('EventBus logPublish callback receives correct event for each publish', () => { + const loggedEvents: Event[] = []; + + const eventBus = new NimbusEventBus({ + name: 'log-publish-multi-test', + logPublish: (event) => { + loggedEvents.push(event); + }, + }); + + const event1 = createTestEvent('test.event.first'); + const event2 = createTestEvent('test.event.second'); + const event3 = createTestEvent('test.event.third'); + + eventBus.putEvent(event1); + eventBus.putEvent(event2); + eventBus.putEvent(event3); + + assertEquals(loggedEvents.length, 3); + assertEquals(loggedEvents[0].type, 'test.event.first'); + assertEquals(loggedEvents[1].type, 'test.event.second'); + assertEquals(loggedEvents[2].type, 'test.event.third'); +}); + +Deno.test('EventBus works without logPublish callback', () => { + const eventBus = new NimbusEventBus({ + name: 'no-log-publish-test', + }); + + // Should not throw when logPublish is not provided + const testEvent = createTestEvent('test.event.no-log'); + eventBus.putEvent(testEvent); +}); diff --git a/packages/core/src/lib/eventBus/eventBus.ts b/packages/core/src/lib/eventBus/eventBus.ts new file mode 100644 index 0000000..74992f7 --- /dev/null +++ b/packages/core/src/lib/eventBus/eventBus.ts @@ -0,0 +1,659 @@ +import { metrics, SpanKind, SpanStatusCode, trace } from '@opentelemetry/api'; +import EventEmitter from 'node:events'; +import { GenericException } from '../exception/genericException.ts'; +import { getLogger } from '../log/logger.ts'; +import type { Event } from '../message/event.ts'; + +const tracer = trace.getTracer('nimbus'); +const meter = metrics.getMeter('nimbus'); + +const eventsPublishedCounter = meter.createCounter( + 'eventbus_events_published_total', + { + description: 'Total number of events published to the event bus', + }, +); + +const eventsDeliveredCounter = meter.createCounter( + 'eventbus_events_delivered_total', + { + description: 'Total number of events delivered to handlers', + }, +); + +const handlingDuration = meter.createHistogram( + 'eventbus_event_handling_duration_seconds', + { + description: 'Duration of event handler execution in seconds', + unit: 's', + }, +); + +const retryAttemptsCounter = meter.createCounter( + 'eventbus_retry_attempts_total', + { + description: 'Total number of retry attempts for event handling', + }, +); + +const eventSizeBytes = meter.createHistogram( + 'eventbus_event_size_bytes', + { + description: 'Size of events published to the event bus in bytes', + unit: 'By', + }, +); + +/** + * The type for the NimbusEventBus options. + */ +export type NimbusEventBusOptions = { + /** + * The name of the event bus instance for metrics and traces. + * Defaults to 'default'. + */ + name?: string; + /** + * The maximum number of retries for handling the event in case of an error. + * Defaults to 2. + */ + maxRetries?: number; + /** + * The base delay for exponential backoff in milliseconds. + * Defaults to 1000ms. + */ + baseDelay?: number; + /** + * The maximum delay cap for exponential backoff in milliseconds. + * Defaults to 30000ms (30 seconds). + */ + maxDelay?: number; + /** + * Whether to add jitter to the retry delay to prevent thundering herd issues. + * Defaults to true. + */ + useJitter?: boolean; + /** + * Optional callback invoked when an event is published. + * Useful for custom logging or debugging. + */ + logPublish?: (event: Event) => void; +}; + +/** + * The input type for subscribing to an event. + */ +export type SubscribeEventInput = { + /** + * The CloudEvents event type to subscribe to (e.g., 'at.overlap.nimbus.order-created'). + */ + type: string; + /** + * The async handler function that processes received events. + */ + handler: (event: TEvent) => Promise; + /** + * Optional error callback invoked when event handling fails after all retries. + * If not provided, errors are logged using the default logger. + */ + onError?: (error: Error, event: TEvent) => void; + /** + * Optional retry options that override the EventBus defaults for this subscription. + */ + options?: Omit; +}; + +/** + * The NimbusEventBus is used to publish and subscribe to events within the application. + * + * Events are delivered asynchronously to all registered handlers. If a handler fails, + * it will be retried using exponential backoff until it succeeds or the maximum retry + * count is reached. + * + * All operations are instrumented with OpenTelemetry tracing and metrics for observability. + * + * @example + * ```ts + * import { createEvent, NimbusEventBus } from '@nimbus/core'; + * + * const eventBus = new NimbusEventBus({ + * name: 'orders', + * maxRetries: 3, + * baseDelay: 1000, + * maxDelay: 30000, + * useJitter: true, + * logPublish: (event) => { + * console.log('Event published:', event.type, event.correlationid); + * }, + * }); + * + * // Subscribe to events + * eventBus.subscribeEvent({ + * type: 'at.overlap.nimbus.order-created', + * handler: async (event) => { + * console.log('Order created:', event.data); + * }, + * onError: (error, event) => { + * console.error('Failed to handle event:', event.id, error.message); + * }, + * options: { + * maxRetries: 5, + * baseDelay: 500, + * maxDelay: 15000, + * useJitter: true, + * }, + * }); + * + * // Publish an event + * const event = createEvent({ + * type: 'at.overlap.nimbus.order-created', + * source: 'https://api.example.com', + * correlationid: '550e8400-e29b-41d4-a716-446655440000', + * subject: '/orders/12345', + * data: { orderId: '12345', customerId: '67890' }, + * datacontenttype: 'application/json', + * }); + * + * eventBus.putEvent(event); + * ``` + */ +export class NimbusEventBus { + private readonly _eventEmitter: EventEmitter; + private readonly _name: string; + private readonly _maxRetries: number; + private readonly _baseDelay: number; + private readonly _maxDelay: number; + private readonly _useJitter: boolean; + private readonly _logPublish?: (event: Event) => void; + + /** + * Create a new NimbusEventBus instance. + * + * @param options - The options for the event bus. + * @param options.name - The name of the event bus instance for metrics and traces. Defaults to 'default'. + * @param options.maxRetries - The maximum number of retries for handling the event in case of an error. Defaults to 2. + * @param options.baseDelay - The base delay for exponential backoff in milliseconds. Defaults to 1000ms. + * @param options.maxDelay - The maximum delay cap for exponential backoff in milliseconds. Defaults to 30000ms. + * @param options.useJitter - Whether to add jitter to the retry delay. Defaults to true. + * @param options.logPublish - Optional callback invoked when an event is published. + * + * @example + * ```ts + * import { getLogger, NimbusEventBus } from '@nimbus/core'; + * + * const eventBus = new NimbusEventBus({ + * name: 'orders', + * maxRetries: 3, + * baseDelay: 1000, + * maxDelay: 30000, + * useJitter: true, + * logPublish: (event) => { + * getLogger().debug({ + * category: 'EventBus', + * message: 'Published event', + * data: { type: event.type, id: event.id }, + * correlationId: event.correlationid, + * }); + * }, + * }); + * ``` + */ + constructor(options?: NimbusEventBusOptions) { + this._eventEmitter = new EventEmitter(); + this._name = options?.name ?? 'default'; + this._maxRetries = options?.maxRetries ?? 2; + this._baseDelay = options?.baseDelay ?? 1000; + this._maxDelay = options?.maxDelay ?? 30000; + this._useJitter = options?.useJitter ?? true; + this._logPublish = options?.logPublish; + } + + /** + * Publish an event to the event bus. + * + * The event is validated against the CloudEvents 64KB size limit before publishing. + * All subscribers registered for this event type will receive the event asynchronously. + * + * @param event - The CloudEvents-compliant event to publish. + * @throws {GenericException} If the event size exceeds 64KB. + * + * @example + * ```ts + * import { createEvent, getEventBus } from '@nimbus/core'; + * + * const eventBus = getEventBus('default'); + * + * // Create and publish an event with all CloudEvents properties + * const event = createEvent({ + * type: 'at.overlap.nimbus.order-created', + * source: 'https://api.example.com', + * correlationid: '550e8400-e29b-41d4-a716-446655440000', + * subject: '/orders/12345', + * data: { + * orderId: '12345', + * customerId: '67890', + * items: ['item-1', 'item-2'], + * total: 99.99, + * }, + * datacontenttype: 'application/json', + * dataschema: 'https://schemas.example.com/order-created.json', + * }); + * + * eventBus.putEvent(event); + * ``` + */ + public putEvent(event: TEvent): void { + const eventSize = this._validateEventSize(event); + const metricLabels = { + eventbus_name: this._name, + event_type: event.type, + }; + + tracer.startActiveSpan( + 'eventbus.publish', + { + kind: SpanKind.PRODUCER, + attributes: { + 'messaging.system': 'nimbusEventBus', + 'messaging.eventbus_name': this._name, + 'messaging.operation': 'publish', + 'messaging.destination': event.type, + 'cloudevents.event_id': event.id, + 'cloudevents.event_source': event.source, + ...(event.correlationid && { + correlation_id: event.correlationid, + }), + }, + }, + (span) => { + try { + eventsPublishedCounter.add(1, metricLabels); + eventSizeBytes.record(eventSize, metricLabels); + + if (this._logPublish) { + this._logPublish(event); + } + + this._eventEmitter.emit(event.type, event); + } catch (error) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error + ? error.message + : 'Unknown error', + }); + span.recordException( + error instanceof Error + ? error + : new Error('Unknown error'), + ); + throw error; + } finally { + span.end(); + } + }, + ); + } + + /** + * Subscribe to an event type with a handler function. + * + * When an event matching the specified type is published, the handler is invoked. + * If the handler throws an error, it will be retried using exponential backoff + * (delay doubles with each attempt) until either it succeeds or the maximum retry + * count is reached. + * + * @param input - The subscription configuration. + * @param input.type - The CloudEvents event type to subscribe to. + * @param input.handler - The async handler function to process events. + * @param input.onError - Optional callback invoked when all retries are exhausted. + * @param input.options - Optional retry options to override EventBus defaults. + * @param input.options.maxRetries - Override maximum retry attempts for this subscription. + * @param input.options.baseDelay - Override base delay in milliseconds for this subscription. + * @param input.options.maxDelay - Override maximum delay cap in milliseconds for this subscription. + * @param input.options.useJitter - Override jitter setting for this subscription. + * + * @example + * ```ts + * import { getEventBus, getLogger } from '@nimbus/core'; + * + * const eventBus = getEventBus('default'); + * + * // Subscribe with all available options + * eventBus.subscribeEvent({ + * type: 'at.overlap.nimbus.order-created', + * handler: async (event) => { + * // Process the event + * console.log('Order created:', event.data.orderId); + * console.log('Correlation ID:', event.correlationid); + * }, + * onError: (error, event) => { + * getLogger().error({ + * category: 'OrderHandler', + * message: 'Failed to process order after all retries', + * data: { eventId: event.id, orderId: event.data.orderId }, + * error, + * correlationId: event.correlationid, + * }); + * }, + * options: { + * maxRetries: 5, + * baseDelay: 500, + * maxDelay: 15000, + * useJitter: true, + * }, + * }); + * ``` + */ + public subscribeEvent({ + type, + handler, + onError, + options, + }: SubscribeEventInput): void { + getLogger().info({ + category: 'Nimbus', + message: `Subscribed to ${type} event`, + }); + + const maxRetries = options?.maxRetries ?? this._maxRetries; + const baseDelay = options?.baseDelay ?? this._baseDelay; + const maxDelay = options?.maxDelay ?? this._maxDelay; + const useJitter = options?.useJitter ?? this._useJitter; + + const handleEvent = async (event: TEvent) => { + try { + await this._processEvent( + handler, + event, + maxRetries, + baseDelay, + maxDelay, + useJitter, + ); + } catch (error: any) { + if (onError) { + onError(error, event); + } else { + getLogger().error({ + category: 'Nimbus', + message: error.message, + error, + }); + } + } + }; + + this._eventEmitter.on(type, handleEvent); + } + + private _processEvent( + handler: (event: TEvent) => Promise, + event: TEvent, + maxRetries: number, + baseDelay: number, + maxDelay: number, + useJitter: boolean, + ): Promise { + const startTime = performance.now(); + const metricLabels = { + eventbus_name: this._name, + event_type: event.type, + }; + + return tracer.startActiveSpan( + 'eventbus.handle', + { + kind: SpanKind.CONSUMER, + attributes: { + 'messaging.system': 'nimbusEventBus', + 'messaging.eventbus_name': this._name, + 'messaging.operation': 'process', + 'messaging.destination': event.type, + 'cloudevents.event_id': event.id, + 'cloudevents.event_source': event.source, + ...(event.correlationid && { + correlation_id: event.correlationid, + }), + }, + }, + async (span) => { + let attempt = 0; + + while (attempt <= maxRetries) { + try { + await handler(event); + + this._recordDeliveryMetrics( + metricLabels, + 'success', + startTime, + ); + + span.end(); + return; + } catch (error: unknown) { + attempt++; + + if (attempt > maxRetries) { + this._handleFinalFailure({ + error, + event, + span, + metricLabels, + startTime, + retryConfig: { + maxRetries, + baseDelay, + maxDelay, + }, + }); + } + + retryAttemptsCounter.add(1, metricLabels); + + const delayMs = this._calculateRetryDelay( + attempt, + baseDelay, + maxDelay, + useJitter, + ); + + span.addEvent('retry', { attempt, delay_ms: delayMs }); + + await new Promise((resolve) => + setTimeout(resolve, delayMs) + ); + } + } + }, + ); + } + + private _recordDeliveryMetrics( + metricLabels: { eventbus_name: string; event_type: string }, + status: 'success' | 'error', + startTime: number, + ): void { + eventsDeliveredCounter.add(1, { ...metricLabels, status }); + handlingDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + } + + private _handleFinalFailure(options: { + error: unknown; + event: Event; + span: ReturnType; + metricLabels: { eventbus_name: string; event_type: string }; + startTime: number; + retryConfig: { + maxRetries: number; + baseDelay: number; + maxDelay: number; + }; + }): never { + const { error, event, span, metricLabels, startTime, retryConfig } = + options; + + this._recordDeliveryMetrics(metricLabels, 'error', startTime); + + const errorMessage = error instanceof Error + ? error.message + : 'Unknown error'; + const errorInstance = error instanceof Error + ? error + : new Error('Unknown error'); + + span.setStatus({ code: SpanStatusCode.ERROR, message: errorMessage }); + span.recordException(errorInstance); + span.end(); + + const exception = new GenericException( + `Failed to handle event: ${event.type} from ${event.source}`, + retryConfig, + ); + + if (error instanceof Error && error.stack) { + exception.stack = error.stack; + } + + throw exception; + } + + private _calculateRetryDelay( + attempt: number, + baseDelay: number, + maxDelay: number, + useJitter: boolean, + ): number { + const delay = Math.min(baseDelay * Math.pow(2, attempt - 1), maxDelay); + const jitter = useJitter ? Math.random() * delay * 0.1 : 0; + return delay + jitter; + } + + /** + * Validate the size of the event and return the size in bytes. + * + * To comply with the CloudEvent spec a transmitted event + * can only have a maximum size of 64KB. + * + * @param event - The event to validate. + * @returns The size of the event in bytes. + */ + private _validateEventSize(event: Event): number { + const eventJson = JSON.stringify(event); + const size = new TextEncoder().encode(eventJson).length; + const maxSizeBytes = 64 * 1024; // 64KB + + if (size > maxSizeBytes) { + throw new GenericException( + `Event size exceeds the limit of 64KB`, + { + eventType: event.type, + eventSource: event.source, + eventSizeBytes: size, + maxSizeBytes, + }, + ); + } + + return size; + } +} + +/** + * Registry to store named EventBus instances. + */ +const eventBusRegistry = new Map(); + +/** + * Setup a named EventBus instance and register it for later retrieval. + * + * Use this function to configure an EventBus with specific options at application + * startup, then retrieve it later using {@link getEventBus}. + * + * @param name - The unique name for this EventBus instance. + * @param options - Optional configuration options for the EventBus. + * @param options.maxRetries - The maximum number of retries for handling events. Defaults to 2. + * @param options.baseDelay - The base delay for exponential backoff in milliseconds. Defaults to 1000ms. + * @param options.maxDelay - The maximum delay cap for exponential backoff in milliseconds. Defaults to 30000ms. + * @param options.useJitter - Whether to add jitter to the retry delay. Defaults to true. + * @param options.logPublish - Optional callback invoked when an event is published. + * + * @example + * ```ts + * import { getLogger, setupEventBus } from '@nimbus/core'; + * + * // At application startup, configure the event bus with all options + * setupEventBus('default', { + * maxRetries: 3, + * baseDelay: 1000, + * maxDelay: 30000, + * useJitter: true, + * logPublish: (event) => { + * getLogger().debug({ + * category: 'EventBus', + * message: 'Published event', + * data: { type: event.type, id: event.id }, + * correlationId: event.correlationid, + * }); + * }, + * }); + * ``` + */ +export const setupEventBus = ( + name: string, + options?: Omit, +): void => { + eventBusRegistry.set(name, new NimbusEventBus({ ...options, name })); +}; + +/** + * Get a named EventBus instance. + * + * If an EventBus with the given name has been configured via {@link setupEventBus}, + * that instance is returned. Otherwise, a new EventBus with default options is created + * and registered. + * + * @param name - The name of the EventBus instance to retrieve. Defaults to 'default'. + * @returns The NimbusEventBus instance. + * + * @example + * ```ts + * import { createEvent, getEventBus } from '@nimbus/core'; + * + * // Get the event bus configured earlier with setupEventBus + * const eventBus = getEventBus('default'); + * + * // Subscribe to events + * eventBus.subscribeEvent({ + * type: 'at.overlap.nimbus.order-created', + * handler: async (event) => { + * console.log('Order created:', event.data.orderId); + * }, + * }); + * + * // Publish an event + * const event = createEvent({ + * type: 'at.overlap.nimbus.order-created', + * source: 'https://api.example.com', + * correlationid: '550e8400-e29b-41d4-a716-446655440000', + * data: { orderId: '12345', customerId: '67890' }, + * datacontenttype: 'application/json', + * }); + * + * eventBus.putEvent(event); + * ``` + */ +export const getEventBus = (name: string = 'default'): NimbusEventBus => { + let eventBus = eventBusRegistry.get(name); + + if (!eventBus) { + eventBus = new NimbusEventBus({ name }); + eventBusRegistry.set(name, eventBus); + } + + return eventBus; +}; diff --git a/packages/core/src/lib/exception/exception.ts b/packages/core/src/lib/exception/exception.ts index 774d51a..757ee09 100644 --- a/packages/core/src/lib/exception/exception.ts +++ b/packages/core/src/lib/exception/exception.ts @@ -1,10 +1,7 @@ /** * Base exception */ -export class Exception { - public readonly name: string; - public message: string; - public stack?: string; +export class Exception extends Error { public details?: Record; public statusCode?: number; @@ -14,8 +11,8 @@ export class Exception { details?: Record, statusCode?: number, ) { + super(message); this.name = name; - this.message = message; if (details) { this.details = details; @@ -25,7 +22,10 @@ export class Exception { this.statusCode = statusCode; } - Error.captureStackTrace(this, this.constructor); + // Maintains proper stack trace in V8 environments + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } } public fromError(error: Error): Exception { diff --git a/packages/core/src/lib/exception/forbiddenException.ts b/packages/core/src/lib/exception/forbiddenException.ts index 1ce205e..71be59c 100644 --- a/packages/core/src/lib/exception/forbiddenException.ts +++ b/packages/core/src/lib/exception/forbiddenException.ts @@ -5,6 +5,6 @@ import { Exception } from './exception.ts'; */ export class ForbiddenException extends Exception { constructor(message?: string, details?: Record) { - super('FORBIDDEN_EXCEPTION', message ?? 'Forbidden', details, 403); + super('FORBIDDEN', message ?? 'Forbidden', details, 403); } } diff --git a/packages/core/src/lib/exception/genericException.test.ts b/packages/core/src/lib/exception/genericException.test.ts index c9cf4cc..96efa6d 100644 --- a/packages/core/src/lib/exception/genericException.test.ts +++ b/packages/core/src/lib/exception/genericException.test.ts @@ -5,8 +5,8 @@ Deno.test('GenericException without constructor input', () => { const exception = new GenericException(); assertInstanceOf(exception, GenericException); - assertEquals(exception.name, 'GENERIC_EXCEPTION'); - assertEquals(exception.message, 'An error occurred'); + assertEquals(exception.name, 'INTERNAL_SERVER_ERROR'); + assertEquals(exception.message, 'Internal server error'); assertEquals(exception.statusCode, 500); assertEquals(typeof exception.details, 'undefined'); assertEquals(typeof exception.stack, 'string'); @@ -21,7 +21,7 @@ Deno.test('GenericException with constructor input', () => { const exception = new GenericException(message, details); assertInstanceOf(exception, GenericException); - assertEquals(exception.name, 'GENERIC_EXCEPTION'); + assertEquals(exception.name, 'INTERNAL_SERVER_ERROR'); assertEquals(exception.message, message); assertEquals(exception.statusCode, 500); assertEquals(exception.details, details); @@ -34,7 +34,7 @@ Deno.test('GenericException from error without constructor input', () => { const exception = new GenericException().fromError(nativeError); assertInstanceOf(exception, GenericException); - assertEquals(exception.name, 'GENERIC_EXCEPTION'); + assertEquals(exception.name, 'INTERNAL_SERVER_ERROR'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 500); assertEquals(typeof exception.details, 'undefined'); @@ -54,7 +54,7 @@ Deno.test('GenericException from error with constructor input', () => { ).fromError(nativeError); assertInstanceOf(exception, GenericException); - assertEquals(exception.name, 'GENERIC_EXCEPTION'); + assertEquals(exception.name, 'INTERNAL_SERVER_ERROR'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 500); assertEquals(exception.details, details); diff --git a/packages/core/src/lib/exception/genericException.ts b/packages/core/src/lib/exception/genericException.ts index 6fc8340..48d7ebb 100644 --- a/packages/core/src/lib/exception/genericException.ts +++ b/packages/core/src/lib/exception/genericException.ts @@ -6,8 +6,8 @@ import { Exception } from './exception.ts'; export class GenericException extends Exception { constructor(message?: string, details?: Record) { super( - 'GENERIC_EXCEPTION', - message ?? 'An error occurred', + 'INTERNAL_SERVER_ERROR', + message ?? 'Internal server error', details, 500, ); diff --git a/packages/core/src/lib/exception/index.ts b/packages/core/src/lib/exception/index.ts deleted file mode 100644 index 3628465..0000000 --- a/packages/core/src/lib/exception/index.ts +++ /dev/null @@ -1,6 +0,0 @@ -export * from './exception.ts'; -export * from './forbiddenException.ts'; -export * from './genericException.ts'; -export * from './invalidInputException.ts'; -export * from './notFoundException.ts'; -export * from './unauthorizedException.ts'; diff --git a/packages/core/src/lib/exception/invalidInputException.test.ts b/packages/core/src/lib/exception/invalidInputException.test.ts index bf8aa57..2c384d9 100644 --- a/packages/core/src/lib/exception/invalidInputException.test.ts +++ b/packages/core/src/lib/exception/invalidInputException.test.ts @@ -1,13 +1,12 @@ import { assertEquals, assertInstanceOf } from '@std/assert'; -import { z } from 'zod'; import { InvalidInputException } from './invalidInputException.ts'; Deno.test('InvalidInputException without constructor input', () => { const exception = new InvalidInputException(); assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.name, 'INVALID_INPUT_EXCEPTION'); - assertEquals(exception.message, 'The provided input is invalid'); + assertEquals(exception.name, 'INVALID_INPUT'); + assertEquals(exception.message, 'Invalid input'); assertEquals(exception.statusCode, 400); assertEquals(typeof exception.details, 'undefined'); assertEquals(typeof exception.stack, 'string'); @@ -25,7 +24,7 @@ Deno.test('InvalidInputException with constructor input', () => { ); assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.name, 'INVALID_INPUT_EXCEPTION'); + assertEquals(exception.name, 'INVALID_INPUT'); assertEquals(exception.message, message); assertEquals(exception.statusCode, 400); assertEquals(exception.details, details); @@ -40,7 +39,7 @@ Deno.test('InvalidInputException from error without constructor input', () => { ); assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.name, 'INVALID_INPUT_EXCEPTION'); + assertEquals(exception.name, 'INVALID_INPUT'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 400); assertEquals(typeof exception.details, 'undefined'); @@ -60,44 +59,9 @@ Deno.test('InvalidInputException from error with constructor input', () => { ).fromError(nativeError); assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.name, 'INVALID_INPUT_EXCEPTION'); + assertEquals(exception.name, 'INVALID_INPUT'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 400); assertEquals(exception.details, details); assertEquals(exception.stack, nativeError.stack); }); - -Deno.test('InvalidInputException from ZodError', () => { - const expectedPayload = z.object({ - foo: z.string(), - }); - const payload = { - foo: 123, - }; - - try { - expectedPayload.parse(payload); - } catch (error: any) { - const exception = new InvalidInputException() - .fromZodError(error); - - assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.name, 'INVALID_INPUT_EXCEPTION'); - assertEquals(exception.message, 'The provided input is invalid'); - assertEquals(exception.statusCode, 400); - assertEquals(exception.details, { - 'issues': [ - { - 'code': 'invalid_type', - 'expected': 'string', - 'received': 'number', - 'path': [ - 'foo', - ], - 'message': 'Expected string, received number', - }, - ], - }); - assertEquals(exception.stack, error.stack); - } -}); diff --git a/packages/core/src/lib/exception/invalidInputException.ts b/packages/core/src/lib/exception/invalidInputException.ts index 0cfe273..fe25bcc 100644 --- a/packages/core/src/lib/exception/invalidInputException.ts +++ b/packages/core/src/lib/exception/invalidInputException.ts @@ -7,8 +7,8 @@ import { Exception } from './exception.ts'; export class InvalidInputException extends Exception { constructor(message?: string, details?: Record) { super( - 'INVALID_INPUT_EXCEPTION', - message ?? 'The provided input is invalid', + 'INVALID_INPUT', + message ?? 'Invalid input', details, 400, ); @@ -23,10 +23,15 @@ export class InvalidInputException extends Exception { * @returns {InvalidInputException} The InvalidInputException. */ public fromZodError(error: ZodError): InvalidInputException { - this.stack = error.stack; - this.details = { - issues: error.issues, - }; + if (error.stack) { + this.stack = error.stack; + } + + if (error.issues) { + this.details = { + issues: error.issues, + }; + } return this; } diff --git a/packages/core/src/lib/exception/notFoundException.test.ts b/packages/core/src/lib/exception/notFoundException.test.ts index f2fec2e..cf33374 100644 --- a/packages/core/src/lib/exception/notFoundException.test.ts +++ b/packages/core/src/lib/exception/notFoundException.test.ts @@ -5,7 +5,7 @@ Deno.test('NotFoundException without constructor input', () => { const exception = new NotFoundException(); assertInstanceOf(exception, NotFoundException); - assertEquals(exception.name, 'NOT_FOUND_EXCEPTION'); + assertEquals(exception.name, 'NOT_FOUND'); assertEquals(exception.message, 'Not found'); assertEquals(exception.statusCode, 404); assertEquals(typeof exception.details, 'undefined'); @@ -21,7 +21,7 @@ Deno.test('NotFoundException with constructor input', () => { const exception = new NotFoundException(message, details); assertInstanceOf(exception, NotFoundException); - assertEquals(exception.name, 'NOT_FOUND_EXCEPTION'); + assertEquals(exception.name, 'NOT_FOUND'); assertEquals(exception.message, message); assertEquals(exception.statusCode, 404); assertEquals(exception.details, details); @@ -34,7 +34,7 @@ Deno.test('NotFoundException from error without constructor input', () => { const exception = new NotFoundException().fromError(nativeError); assertInstanceOf(exception, NotFoundException); - assertEquals(exception.name, 'NOT_FOUND_EXCEPTION'); + assertEquals(exception.name, 'NOT_FOUND'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 404); assertEquals(typeof exception.details, 'undefined'); @@ -54,7 +54,7 @@ Deno.test('NotFoundException from error with constructor input', () => { ).fromError(nativeError); assertInstanceOf(exception, NotFoundException); - assertEquals(exception.name, 'NOT_FOUND_EXCEPTION'); + assertEquals(exception.name, 'NOT_FOUND'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 404); assertEquals(exception.details, details); diff --git a/packages/core/src/lib/exception/notFoundException.ts b/packages/core/src/lib/exception/notFoundException.ts index e86fd78..a293c2a 100644 --- a/packages/core/src/lib/exception/notFoundException.ts +++ b/packages/core/src/lib/exception/notFoundException.ts @@ -5,6 +5,6 @@ import { Exception } from './exception.ts'; */ export class NotFoundException extends Exception { constructor(message?: string, details?: Record) { - super('NOT_FOUND_EXCEPTION', message ?? 'Not found', details, 404); + super('NOT_FOUND', message ?? 'Not found', details, 404); } } diff --git a/packages/core/src/lib/exception/unauthorizedException.ts b/packages/core/src/lib/exception/unauthorizedException.ts index 7d6b1af..d729cd7 100644 --- a/packages/core/src/lib/exception/unauthorizedException.ts +++ b/packages/core/src/lib/exception/unauthorizedException.ts @@ -6,7 +6,7 @@ import { Exception } from './exception.ts'; export class UnauthorizedException extends Exception { constructor(message?: string, details?: Record) { super( - 'UNAUTHORIZED_EXCEPTION', + 'UNAUTHORIZED', message ?? 'Unauthorized', details, 401, diff --git a/packages/core/src/lib/log/index.ts b/packages/core/src/lib/log/index.ts deleted file mode 100644 index 79cac34..0000000 --- a/packages/core/src/lib/log/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -export * from './logFormatter.ts'; -export * from './logger.ts'; -export * from './logLevel.ts'; -export * from './options.ts'; diff --git a/packages/core/src/lib/log/logFormatter.ts b/packages/core/src/lib/log/logFormatter.ts index 5e6d25a..3d57247 100644 --- a/packages/core/src/lib/log/logFormatter.ts +++ b/packages/core/src/lib/log/logFormatter.ts @@ -60,6 +60,11 @@ export const prettyLogFormatter: LogFormatter = ( ): string | string[] => { let dataString = ''; let errorString = ''; + let correlationId = ''; + + if (logRecord.correlationId) { + correlationId = `(${logRecord.correlationId}) `; + } if (logRecord.data) { dataString = JSON.stringify(logRecord.data, null, 2); @@ -69,14 +74,14 @@ export const prettyLogFormatter: LogFormatter = ( errorString = JSON.stringify(logRecord.error, null, 2); return [ - `[${logRecord.category}] ${logRecord.level.toUpperCase()} :: ${logRecord.message}`, + `[${logRecord.category}] ${logRecord.level.toUpperCase()} ${correlationId}:: ${logRecord.message}`, errorString.length ? `\n${errorString}` : '', logRecord.error.stack ? `\n${logRecord.error.stack}` : '', dataString.length ? `\n${dataString}` : '', ]; } - return `[${logRecord.category}] ${logRecord.level.toUpperCase()} :: ${logRecord.message}${ + return `[${logRecord.category}] ${logRecord.level.toUpperCase()} ${correlationId}:: ${logRecord.message}${ dataString.length ? `\n${dataString}` : '' }`; }; diff --git a/packages/core/src/lib/log/logger.ts b/packages/core/src/lib/log/logger.ts index b2015c6..88db4fc 100644 --- a/packages/core/src/lib/log/logger.ts +++ b/packages/core/src/lib/log/logger.ts @@ -6,31 +6,117 @@ import { defaultLogOptions, type LogOptions } from './options.ts'; /** * The input for a log message. + * + * Use this type when calling any of the Logger's log methods (debug, info, warn, error, critical). */ export type LogInput = { + /** + * The log message to output. This should be a human-readable description of what occurred. + */ message: string; + /** + * An optional category to group related log messages. + * Useful for filtering logs by component or domain (e.g., 'Database', 'API', 'Auth'). + * Defaults to 'Default' if not provided. + */ category?: string; + /** + * Optional structured data to include with the log message. + * This data will be serialized according to the configured formatter. + */ data?: Record; - error?: Error | Exception; + /** + * Optional error or exception to include with the log message. + * The error's message and stack trace will be captured in the log output. + */ + error?: Error; + /** + * Optional correlation ID to trace related log messages across operations. + * Useful for tracking a request through multiple services or handlers. + */ correlationId?: string; }; /** * A full log record with the log input and additional metadata attached. + * + * This type is passed to the LogFormatter function to produce the final log output. */ export type LogRecord = { + /** + * The timestamp when the log message was created. + */ timestamp: Date; + /** + * The severity level of the log message (debug, info, warn, error, critical). + */ level: LogLevel; + /** + * The category for grouping related log messages. + */ category: string; + /** + * The log message describing what occurred. + */ message: string; + /** + * Optional structured data included with the log message. + */ data?: Record; + /** + * Optional error or exception included with the log message. + */ error?: Error | Exception; + /** + * Optional correlation ID for tracing related log messages. + */ correlationId?: string; }; /** - * The Logger provides different log methods to - * log messages at different levels. + * The Logger provides structured logging with configurable log levels, formatters, and console colors. + * + * The Logger is a singleton that should be configured once at application startup using + * {@link setupLogger}, then accessed throughout the application using {@link getLogger}. + * + * Log levels in order of severity: debug < info < warn < error < critical. + * Messages below the configured log level are silently ignored. + * + * @example + * ```ts + * import { + * getLogger, + * jsonLogFormatter, + * parseLogLevel, + * prettyLogFormatter, + * setupLogger, + * } from '@nimbus/core'; + * + * // Configure the logger at application startup + * setupLogger({ + * logLevel: parseLogLevel(process.env.LOG_LEVEL), + * formatter: process.env.NODE_ENV === 'production' + * ? jsonLogFormatter + * : prettyLogFormatter, + * useConsoleColors: process.env.NODE_ENV !== 'production', + * }); + * + * // Use the logger throughout your application + * const logger = getLogger(); + * + * logger.info({ + * message: 'Application started', + * category: 'App', + * data: { port: 3000, environment: 'production' }, + * }); + * + * logger.error({ + * message: 'Failed to connect to database', + * category: 'Database', + * error: new Error('Connection timeout'), + * correlationId: '550e8400-e29b-41d4-a716-446655440000', + * }); + * ``` */ export class Logger { private static _instance: Logger; @@ -358,13 +444,51 @@ export const setupLogger = (options: LogOptions): void => { /** * Get the Logger instance. * - * @returns {Logger} The Logger instance + * Returns the singleton Logger instance. If the logger has not been configured + * via {@link setupLogger}, a default logger with silent log level is returned. + * + * @returns The Logger instance. * * @example * ```ts - * import { getLogger } from "@nimbus/core"; + * import { getLogger } from '@nimbus/core'; * * const logger = getLogger(); + * + * // Log at different levels with all available options + * logger.debug({ + * message: 'Processing request', + * category: 'API', + * data: { method: 'POST', path: '/orders' }, + * correlationId: '550e8400-e29b-41d4-a716-446655440000', + * }); + * + * logger.info({ + * message: 'Order created successfully', + * category: 'Orders', + * data: { orderId: '12345', customerId: '67890' }, + * correlationId: '550e8400-e29b-41d4-a716-446655440000', + * }); + * + * logger.warn({ + * message: 'Rate limit approaching', + * category: 'API', + * data: { currentRate: 95, maxRate: 100 }, + * }); + * + * logger.error({ + * message: 'Failed to process payment', + * category: 'Payments', + * data: { orderId: '12345' }, + * error: new Error('Payment gateway timeout'), + * correlationId: '550e8400-e29b-41d4-a716-446655440000', + * }); + * + * logger.critical({ + * message: 'Database connection lost', + * category: 'Database', + * error: new Error('Connection refused'), + * }); * ``` */ export const getLogger = (): Logger => { diff --git a/packages/core/src/lib/log/options.ts b/packages/core/src/lib/log/options.ts index 7310025..54f293c 100644 --- a/packages/core/src/lib/log/options.ts +++ b/packages/core/src/lib/log/options.ts @@ -2,11 +2,30 @@ import { jsonLogFormatter, type LogFormatter } from './logFormatter.ts'; import type { LogLevel } from './logLevel.ts'; /** - * The options for the Log class. + * Configuration options for the Logger. + * + * Use these options with {@link setupLogger} to configure the logger at application startup. */ export type LogOptions = { + /** + * The minimum log level to output. Messages below this level are silently ignored. + * Levels in order of severity: debug < info < warn < error < critical. + * Defaults to 'silent' (no logs output). + */ logLevel?: LogLevel; + /** + * The formatter function used to convert LogRecord objects into output strings. + * Use `jsonLogFormatter` for structured JSON output (recommended for production), + * or `prettyLogFormatter` for human-readable output (recommended for development). + * Defaults to `jsonLogFormatter`. + */ formatter?: LogFormatter; + /** + * Whether to apply ANSI color codes to the console output based on log level. + * Set to true for colored output in development terminals. + * Set to false for production or when outputting to log aggregation systems. + * Defaults to false. + */ useConsoleColors?: boolean; }; diff --git a/packages/core/src/lib/message/command.ts b/packages/core/src/lib/message/command.ts new file mode 100644 index 0000000..e008abd --- /dev/null +++ b/packages/core/src/lib/message/command.ts @@ -0,0 +1,151 @@ +import { ulid } from '@std/ulid'; +import { z } from 'zod'; + +/** + * A command is a message that is sent to tell the system + * to perform an action. Typically commands come in via an API + * like HTTP POST requests, gRPC calls, or similar inbound traffic. + * + * Nimbus sticks to the CloudEvents specifications for all messages + * to make it easier to work with these messages across multiple systems. + * + * @see https://cloudevents.io/ for more information. + * + * @property {string} specversion - The version of the CloudEvents specification which the event uses. + * @property {string} id - A globally unique identifier of the event. + * @property {string} correlationid - A globally unique identifier that indicates a correlation to previous and subsequent messages. + * @property {string} time - The time when the command was created. + * @property {string} source - A URI reference that identifies the system that is constructing the command. + * @property {string} type - The type must follow the CloudEvents naming convention, which uses a reversed domain name as a namespace, followed by a domain-specific name. + * @property {string} subject - An identifier for an object or entity the command is about (optional). + * @property {TData} data - The actual data, containing the specific business payload. + * @property {string} datacontenttype - A MIME type that indicates the format that the data is in (optional). + * @property {string} dataschema - An absolute URL to the schema that the data adheres to (optional). + * + * @template TData - The type of the data. + * + * @example + * const submitOrderCommand: Command = { + * specversion: '1.0', + * id: '123', + * correlationid: '456', + * time: '2025-01-01T00:00:00Z', + * source: 'https://nimbus.overlap.at', + * type: 'at.overlap.nimbus.submit-order', + * data: { + * customerId: '666', + * cartId: '123', + * }, + * datacontenttype: 'application/json', + * }; + */ +export type Command = { + specversion: '1.0'; + id: string; + correlationid: string; + time: string; + source: string; + type: string; + subject?: string; + data: TData; + datacontenttype?: string; + dataschema?: string; +}; + +/** + * Type alias for the command data field schema. + */ +type CommandDataSchema = z.ZodUnion< + [ + z.ZodRecord, + z.ZodString, + z.ZodNumber, + z.ZodArray, + z.ZodBoolean, + ] +>; + +/** + * Type alias for the command schema shape. + */ +export type CommandSchemaType = z.ZodObject<{ + specversion: z.ZodLiteral<'1.0'>; + id: z.ZodString; + correlationid: z.ZodString; + time: z.ZodISODateTime; + source: z.ZodString; + type: z.ZodString; + subject: z.ZodOptional; + data: CommandDataSchema; + datacontenttype: z.ZodOptional; + dataschema: z.ZodOptional; +}>; + +/** + * The Zod schema matching the Command type. + * + * Zod is the default for validating incomming messages. + * + * We do not infer the Command type from this schema because of + * slow type issues see https://jsr.io/docs/about-slow-types for more details. + */ +export const commandSchema: CommandSchemaType = z.object({ + specversion: z.literal('1.0'), + id: z.string(), + correlationid: z.string(), + time: z.iso.datetime(), + source: z.string(), + type: z.string(), + subject: z.string().optional(), + data: z.union([ + z.record(z.string(), z.unknown()), + z.string(), + z.number(), + z.array(z.unknown()), + z.boolean(), + ]), + datacontenttype: z.string().optional(), + dataschema: z.url().optional(), +}); + +/** + * Input for creating a command. + */ +export type CreateCommandInput = Partial> & { + type: string; + source: string; + data: unknown; +}; + +/** + * Creates a command based on input data with the convenience + * to skip properties and use the defaults for the rest. + */ +export const createCommand = ( + { + id, + correlationid, + time, + source, + type, + subject, + data, + datacontenttype, + dataschema, + }: CreateCommandInput, +): TCommand => { + const command = { + specversion: '1.0', + id: id ?? ulid(), + correlationid: correlationid ?? ulid(), + time: time ?? new Date().toISOString(), + source, + type, + ...(subject && { subject }), + data, + datacontenttype: datacontenttype ?? 'application/json', + ...(dataschema && { dataschema }), + } as TCommand; + + return command; +}; diff --git a/packages/core/src/lib/message/event.ts b/packages/core/src/lib/message/event.ts new file mode 100644 index 0000000..ce8464e --- /dev/null +++ b/packages/core/src/lib/message/event.ts @@ -0,0 +1,155 @@ +import { ulid } from '@std/ulid'; +import { z } from 'zod'; + +/** + * An event is a message that is emitted by the system to notify + * subscribers that something has happened. Typically events are + * the result of a command that was executed before. + * + * Nimbus sticks to the CloudEvents specifications for all messages + * to make it easier to work with these messages across multiple systems. + * + * @see https://cloudevents.io/ for more information. + * + * @property {string} specversion - The version of the CloudEvents specification which the event uses. + * @property {string} id - A globally unique identifier of the event. + * @property {string} correlationid - A globally unique identifier that indicates a correlation to previous and subsequent messages to this event. + * @property {string} time - The time when the event was created. + * @property {string} source - A URI reference that identifies the system that is constructing the event. + * @property {string} type - The type must follow the CloudEvents naming convention, which uses a reversed domain name as a namespace, followed by a domain-specific name. + * @property {string} subject - An identifier for an object or entity the event is about (optional). + * @property {TData} data - The actual data, containing the specific business payload. + * @property {string} datacontenttype - A MIME type that indicates the format that the data is in (optional). + * @property {string} dataschema - An absolute URL to the schema that the data adheres to (optional). + * + * @template TData - The type of the data. + * + * @example + * const orderSubmittedEvent: Event = { + * specversion: '1.0', + * id: '123', + * correlationid: '456', + * time: '2025-01-01T00:00:00Z', + * source: 'https://nimbus.overlap.at', + * type: 'at.overlap.nimbus.submit-order', + * subject: '/orders/42', + * data: { + * orderId: '42', + * customerId: '666', + * cartId: '123', + * status: 'submitted', + * }, + * datacontenttype: 'application/json', + * }; + */ +export type Event = { + specversion: '1.0'; + id: string; + correlationid: string; + time: string; + source: string; + type: string; + subject: string; + data: TData; + datacontenttype?: string; + dataschema?: string; +}; + +/** + * Type alias for the event data field schema. + */ +type EventDataSchema = z.ZodUnion< + [ + z.ZodRecord, + z.ZodString, + z.ZodNumber, + z.ZodArray, + z.ZodBoolean, + ] +>; + +/** + * Type alias for the event schema shape. + */ +export type EventSchemaType = z.ZodObject<{ + specversion: z.ZodLiteral<'1.0'>; + id: z.ZodString; + correlationid: z.ZodString; + time: z.ZodISODateTime; + source: z.ZodString; + type: z.ZodString; + subject: z.ZodString; + data: EventDataSchema; + datacontenttype: z.ZodOptional; + dataschema: z.ZodOptional; +}>; + +/** + * The Zod schema matching the Event type. + * + * Zod is the default for validating incomming messages. + * + * We do not infer the Event type from this schema because of + * slow type issues see https://jsr.io/docs/about-slow-types for more details. + */ +export const eventSchema: EventSchemaType = z.object({ + specversion: z.literal('1.0'), + id: z.string(), + correlationid: z.string(), + time: z.iso.datetime(), + source: z.string(), + type: z.string(), + subject: z.string(), + data: z.union([ + z.record(z.string(), z.unknown()), + z.string(), + z.number(), + z.array(z.unknown()), + z.boolean(), + ]), + datacontenttype: z.string().optional(), + dataschema: z.url().optional(), +}); + +/** + * Input for creating an event. + */ +export type CreateEventInput = Partial> & { + type: string; + source: string; + subject: string; + data: unknown; +}; + +/** + * Creates an event based on input data with the convenience + * to skip properties and use the defaults for the rest. + */ +export const createEvent = ( + { + id, + correlationid, + time, + source, + type, + subject, + data, + datacontenttype, + dataschema, + }: CreateEventInput, +): TEvent => { + const event = { + specversion: '1.0', + id: id ?? ulid(), + correlationid: correlationid ?? ulid(), + time: time ?? new Date().toISOString(), + source, + type, + subject, + data, + datacontenttype: datacontenttype ?? 'application/json', + ...(dataschema && { dataschema }), + } as TEvent; + + return event; +}; diff --git a/packages/core/src/lib/message/message.ts b/packages/core/src/lib/message/message.ts new file mode 100644 index 0000000..0b4eb26 --- /dev/null +++ b/packages/core/src/lib/message/message.ts @@ -0,0 +1,21 @@ +import type { Command } from './command.ts'; +import type { Event } from './event.ts'; +import type { Query } from './query.ts'; + +/** + * A message is a communication object that can be passed between + * systems, modules, functions etc. + * + * In the Nimbus ecosystem it is either a Command, Event or Query. + * + * Nimbus sticks to the CloudEvents specifications for all messages + * to make it easier to work with these messages across multiple systems. + * + * @see https://cloudevents.io/ for more information. + * + * @template TData - The type of the data. + */ +export type Message = + | Command + | Event + | Query; diff --git a/packages/core/src/lib/message/query.ts b/packages/core/src/lib/message/query.ts new file mode 100644 index 0000000..c5ac52d --- /dev/null +++ b/packages/core/src/lib/message/query.ts @@ -0,0 +1,143 @@ +import { ulid } from '@std/ulid'; +import { z } from 'zod'; + +/** + * A query is a message that is sent to the system to request + * information. + * + * Nimbus sticks to the CloudEvents specifications for all messages + * to make it easier to work with these messages across multiple systems. + * + * @see https://cloudevents.io/ for more information. + * + * @property {string} specversion - The version of the CloudEvents specification which the query uses. + * @property {string} id - A globally unique identifier of the query. + * @property {string} correlationid - A globally unique identifier that indicates a correlation to previous and subsequent messages to this query. + * @property {string} time - The time when the query was created. + * @property {string} source - A URI reference that identifies the system that is constructing the query. + * @property {string} type - The type must follow the CloudEvents naming convention, which uses a reversed domain name as a namespace, followed by a domain-specific name. + * @property {TData} data - The actual data, containing the specific business payload. + * @property {string} datacontenttype - A MIME type that indicates the format that the data is in (optional). + * @property {string} dataschema - An absolute URL to the schema that the data adheres to (optional). + * + * @template TData - The type of the data. + * + * @example + * const getOrdersQuery: Query = { + * specversion: '1.0', + * id: '123', + * time: '2025-01-01T00:00:00Z', + * source: 'https://nimbus.overlap.at', + * type: 'at.overlap.nimbus.get-orders', + * data: { + * customerId: '666', + * status: 'fulfilled', + * }, + * datacontenttype: 'application/json', + * }; + */ +export type Query = { + specversion: '1.0'; + id: string; + correlationid: string; + time: string; + source: string; + type: string; + data: TData; + datacontenttype?: string; + dataschema?: string; +}; + +/** + * Type alias for the query data field schema. + */ +type QueryDataSchema = z.ZodUnion< + [ + z.ZodRecord, + z.ZodString, + z.ZodNumber, + z.ZodArray, + z.ZodBoolean, + ] +>; + +/** + * Type alias for the query schema shape. + */ +export type QuerySchemaType = z.ZodObject<{ + specversion: z.ZodLiteral<'1.0'>; + id: z.ZodString; + correlationid: z.ZodString; + time: z.ZodISODateTime; + source: z.ZodString; + type: z.ZodString; + data: QueryDataSchema; + datacontenttype: z.ZodOptional; + dataschema: z.ZodOptional; +}>; + +/** + * The Zod schema matching the Query type. + * + * Zod is the default for validating incomming messages. + * + * We do not infer the Query type from this schema because of + * slow type issues see https://jsr.io/docs/about-slow-types for more details. + */ +export const querySchema: QuerySchemaType = z.object({ + specversion: z.literal('1.0'), + id: z.string(), + correlationid: z.string(), + time: z.iso.datetime(), + source: z.string(), + type: z.string(), + data: z.union([ + z.record(z.string(), z.unknown()), + z.string(), + z.number(), + z.array(z.unknown()), + z.boolean(), + ]), + datacontenttype: z.string().optional(), + dataschema: z.url().optional(), +}); + +/** + * Input for creating a query. + */ +export type CreateQueryInput = Partial> & { + type: string; + source: string; + data: unknown; +}; + +/** + * Creates a query based on input data with the convenience + * to skip properties and use the defaults for the rest. + */ +export const createQuery = ( + { + id, + correlationid, + time, + source, + type, + data, + datacontenttype, + dataschema, + }: CreateQueryInput, +): TQuery => { + const query = { + specversion: '1.0', + id: id ?? ulid(), + correlationid: correlationid ?? ulid(), + time: time ?? new Date().toISOString(), + source, + type, + data, + datacontenttype: datacontenttype ?? 'application/json', + ...(dataschema && { dataschema }), + } as TQuery; + + return query; +}; diff --git a/packages/core/src/lib/message/router.ts b/packages/core/src/lib/message/router.ts new file mode 100644 index 0000000..e5ad800 --- /dev/null +++ b/packages/core/src/lib/message/router.ts @@ -0,0 +1,412 @@ +import { metrics, SpanKind, SpanStatusCode, trace } from '@opentelemetry/api'; +import type { z } from 'zod'; +import { InvalidInputException } from '../exception/invalidInputException.ts'; +import { NotFoundException } from '../exception/notFoundException.ts'; +import { getLogger } from '../log/logger.ts'; +import type { Message } from './message.ts'; + +const tracer = trace.getTracer('nimbus'); +const meter = metrics.getMeter('nimbus'); + +const messagesRoutedCounter = meter.createCounter( + 'router_messages_routed_total', + { + description: 'Total number of messages routed', + }, +); + +const routingDuration = meter.createHistogram( + 'router_routing_duration_seconds', + { + description: 'Duration of message routing in seconds', + unit: 's', + }, +); + +/** + * The message handler type - transport-agnostic, just returns domain data. + * + * @template TInput - The type of the input message. + * @template TOutput - The type of the data returned by the handler. + */ +export type MessageHandler< + TInput extends Message = Message, + TOutput = unknown, +> = ( + input: TInput, +) => Promise; + +/** + * Options for creating a MessageRouter. + */ +export type MessageRouterOptions = { + /** + * The name of the router instance for metrics and traces. + * Defaults to 'default'. + */ + name?: string; + /** + * Optional callback invoked when a message is received for routing. + * Useful for custom logging or debugging of incoming messages. + * + * @param input - The incoming message to be routed. + */ + logInput?: (input: any) => void; + /** + * Optional callback invoked after a message has been successfully handled. + * Useful for custom logging or debugging of handler results. + * + * @param output - The result returned by the message handler. + */ + logOutput?: (output: any) => void; +}; + +type ZodSchema = z.ZodType; + +/** + * Internal handler registration. + */ +type HandlerRegistration = { + handler: MessageHandler; + schema: ZodSchema; +}; + +/** + * The MessageRouter routes messages to their handlers based on the type value of the message. + * + * Messages are validated against their registered Zod schemas before being passed to handlers. + * All routing operations are instrumented with OpenTelemetry tracing and metrics for observability. + * + * @example + * ```ts + * import { createCommand, MessageRouter } from '@nimbus/core'; + * + * const messageRouter = new MessageRouter({ + * name: 'api', + * logInput: (input) => { + * console.log('Received message:', input.type); + * }, + * logOutput: (output) => { + * console.log('Handler result:', output); + * }, + * }); + * + * // Register command handler + * messageRouter.register( + * 'at.overlap.nimbus.create-order', + * createOrderHandler, + * createOrderCommandSchema, + * ); + * + * // Register query handler + * messageRouter.register( + * 'at.overlap.nimbus.get-order', + * getOrderHandler, + * getOrderQuerySchema, + * ); + * + * // Route a command + * const command = createCommand({ + * type: 'at.overlap.nimbus.create-order', + * source: 'https://api.example.com', + * data: { customerId: '123', items: ['item-1', 'item-2'] }, + * }); + * + * const result = await messageRouter.route(command); + * ``` + */ +export class MessageRouter { + private readonly _handlers: Map; + private readonly _name: string; + private readonly _logInput?: (input: any) => void; + private readonly _logOutput?: (output: any) => void; + + constructor( + options?: MessageRouterOptions, + ) { + this._handlers = new Map(); + this._name = options?.name ?? 'default'; + this._logInput = options?.logInput; + this._logOutput = options?.logOutput; + } + + /** + * Register a handler for a specific message type. + * + * @param messageType - The message type as defined in the CloudEvents specification + * (e.g., 'at.overlap.nimbus.create-order'). + * @param handler - The async handler function that processes the message and returns a result. + * @param schema - The Zod schema to validate the incoming message before passing to the handler. + * + * @example + * ```ts + * import { commandSchema, type Command, getRouter } from '@nimbus/core'; + * import { z } from 'zod'; + * + * // Define the command type and schema + * const CREATE_ORDER_TYPE = 'at.overlap.nimbus.create-order'; + * + * const createOrderSchema = commandSchema.extend({ + * type: z.literal(CREATE_ORDER_TYPE), + * data: z.object({ + * customerId: z.string(), + * items: z.array(z.string()), + * }), + * }); + * type CreateOrderCommand = z.infer; + * + * // Define the handler + * const createOrderHandler = async (command: CreateOrderCommand) => { + * // Process the command and return the result + * return { orderId: '12345', status: 'created' }; + * }; + * + * // Register the handler + * const router = getRouter('default'); + * router.register(CREATE_ORDER_TYPE, createOrderHandler, createOrderSchema); + * ``` + */ + public register( + messageType: string, + handler: MessageHandler, + schema: ZodSchema, + ): void { + this._handlers.set(messageType, { + handler, + schema, + }); + + getLogger().debug({ + category: 'Nimbus', + message: `Registered handler for: ${messageType}`, + }); + } + + /** + * Route a message to its handler. + * + * The message is validated against the registered schema before being passed to the handler. + * The routing operation is instrumented with OpenTelemetry tracing and metrics. + * + * @param input - The CloudEvents-compliant message to route (command, query, or event). + * @returns The result from the handler. + * + * @throws {NotFoundException} If no handler is registered for the message type. + * @throws {InvalidInputException} If the message has no type attribute or fails schema validation. + * + * @example + * ```ts + * import { createCommand, getRouter } from '@nimbus/core'; + * + * const router = getRouter('default'); + * + * // Create a command with all CloudEvents properties + * const command = createCommand({ + * type: 'at.overlap.nimbus.create-order', + * source: 'https://api.example.com', + * correlationid: '550e8400-e29b-41d4-a716-446655440000', + * data: { + * customerId: '123', + * items: ['item-1', 'item-2'], + * }, + * datacontenttype: 'application/json', + * }); + * + * // Route the command to its registered handler + * const result = await router.route(command); + * console.log('Order created:', result); + * ``` + */ + public async route(input: any): Promise { + const startTime = performance.now(); + const messageType = input?.type ?? 'unknown'; + + return await tracer.startActiveSpan( + 'router.route', + { + kind: SpanKind.INTERNAL, + attributes: { + 'messaging.system': 'nimbusRouter', + 'messaging.router_name': this._name, + 'messaging.operation': 'route', + 'messaging.destination': messageType, + ...(input?.correlationid && { + correlation_id: input.correlationid, + }), + }, + }, + async (span) => { + try { + if (this._logInput) { + this._logInput(input); + } + + if (!input?.type) { + throw new InvalidInputException( + 'The provided input has no type attribute', + ); + } + + const registration = this._handlers.get(input.type); + if (!registration) { + throw new NotFoundException( + 'Message handler not found', + { + reason: + `Could not find a handler for message type: "${input.type}"`, + }, + ); + } + + const { handler, schema } = registration; + + const validationResult = schema.safeParse(input); + + if (!validationResult.success) { + throw new InvalidInputException( + 'The provided input is invalid', + ).fromZodError(validationResult.error); + } + + const result = await handler(validationResult.data); + + if (this._logOutput) { + this._logOutput(result); + } + + messagesRoutedCounter.add(1, { + router_name: this._name, + message_type: input.type, + status: 'success', + }); + routingDuration.record( + (performance.now() - startTime) / 1000, + { router_name: this._name, message_type: input.type }, + ); + + return result; + } catch (error: any) { + messagesRoutedCounter.add(1, { + router_name: this._name, + message_type: messageType, + status: 'error', + }); + routingDuration.record( + (performance.now() - startTime) / 1000, + { router_name: this._name, message_type: messageType }, + ); + + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error + ? error.message + : 'Unknown error', + }); + span.recordException( + error instanceof Error + ? error + : new Error('Unknown error'), + ); + + throw error; + } finally { + span.end(); + } + }, + ); + } +} + +/** + * Registry to store named MessageRouter instances. + */ +const routerRegistry = new Map(); + +/** + * Setup a named MessageRouter instance and register it for later retrieval. + * + * Use this function to configure a MessageRouter with specific options at application + * startup, then retrieve it later using {@link getRouter}. + * + * @param name - The unique name for this MessageRouter instance. + * @param options - Optional configuration options for the MessageRouter. + * @param options.logInput - Optional callback invoked when a message is received for routing. + * @param options.logOutput - Optional callback invoked after a message has been successfully handled. + * + * @example + * ```ts + * import { getLogger, setupRouter } from '@nimbus/core'; + * + * // At application startup, configure the router with all options + * setupRouter('default', { + * logInput: (input) => { + * getLogger().debug({ + * category: 'Router', + * message: 'Received message', + * data: { type: input.type, correlationId: input.correlationid }, + * correlationId: input.correlationid, + * }); + * }, + * logOutput: (output) => { + * getLogger().debug({ + * category: 'Router', + * message: 'Handler completed', + * data: { output }, + * }); + * }, + * }); + * ``` + */ +export const setupRouter = ( + name: string, + options?: Omit, +): void => { + routerRegistry.set(name, new MessageRouter({ ...options, name })); +}; + +/** + * Get a named MessageRouter instance. + * + * If a MessageRouter with the given name has been configured via {@link setupRouter}, + * that instance is returned. Otherwise, a new MessageRouter with default options is created + * and registered. + * + * @param name - The name of the MessageRouter instance to retrieve. Defaults to 'default'. + * @returns The MessageRouter instance. + * + * @example + * ```ts + * import { createCommand, getRouter } from '@nimbus/core'; + * + * // Get the router configured earlier with setupRouter + * const router = getRouter('default'); + * + * // Register handlers + * router.register( + * 'at.overlap.nimbus.create-order', + * createOrderHandler, + * createOrderSchema, + * ); + * + * // Route a message + * const command = createCommand({ + * type: 'at.overlap.nimbus.create-order', + * source: 'https://api.example.com', + * correlationid: '550e8400-e29b-41d4-a716-446655440000', + * data: { customerId: '123', items: ['item-1'] }, + * datacontenttype: 'application/json', + * }); + * + * const result = await router.route(command); + * ``` + */ +export const getRouter = (name: string = 'default'): MessageRouter => { + let router = routerRegistry.get(name); + + if (!router) { + router = new MessageRouter({ name }); + routerRegistry.set(name, router); + } + + return router; +}; diff --git a/packages/core/src/lib/messageEnvelope.ts b/packages/core/src/lib/messageEnvelope.ts deleted file mode 100644 index 798ef7a..0000000 --- a/packages/core/src/lib/messageEnvelope.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { z, type ZodType } from 'zod'; - -// TODO: fix slow type issue - -/** - * Zod schema for the MessageEnvelope. - * - * As Nimbus uses the CloudEvents specification - * for all messages like commands, queries and events, - * there needs to be a place to add Nimbus specific - * meta information to messages. - * - * This is what the MessageEnvelope is used for. - * It contains the a correlationId and an optional authContext - * along with the actual payload. - */ -export const MessageEnvelope = < - TPayload extends ZodType, - TAuthContext extends ZodType, ->( - payloadType: TPayload, - authContextType: TAuthContext, -) => { - return z.object({ - payload: payloadType, - correlationId: z.string(), - authContext: authContextType.optional(), - }); -}; - -/** - * Inference type to create the MessageEnvelope type. - */ -type MessageEnvelopeType< - TPayload extends ZodType, - TAuthContext extends ZodType, -> = ReturnType>; - -/** - * The type of the MessageEnvelope. - * - * As Nimbus uses the CloudEvents specification - * for all messages like commands, queries and events, - * there needs to be a place to add Nimbus specific - * meta information to messages. - * - * This is what the MessageEnvelope is used for. - * It contains the a correlationId and an optional authContext - * along with the actual payload. - */ -export type MessageEnvelope = z.infer< - MessageEnvelopeType, ZodType> ->; diff --git a/packages/core/src/lib/query/query.ts b/packages/core/src/lib/query/query.ts deleted file mode 100644 index da124c5..0000000 --- a/packages/core/src/lib/query/query.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type { z, ZodType } from 'zod'; -import { CloudEvent } from '../cloudEvent/cloudEvent.ts'; -import { MessageEnvelope } from '../messageEnvelope.ts'; - -// TODO: fix slow type issue - -/** - * Zod schema for the Query object. - */ -export const Query = < - TType extends ZodType, - TData extends ZodType, - TAuthContext extends ZodType, ->( - typeType: TType, - dataType: TData, - authContextType: TAuthContext, -) => { - return CloudEvent( - typeType, - MessageEnvelope(dataType, authContextType), - ); -}; - -/** - * Inference type to create the Query type. - */ -type QueryType< - TType extends ZodType, - TData extends ZodType, - TAuthContext extends ZodType, -> = ReturnType>; - -/** - * The type of the Query object. - */ -export type Query = z.infer< - QueryType, ZodType, ZodType> ->; diff --git a/packages/core/src/lib/router/index.ts b/packages/core/src/lib/router/index.ts deleted file mode 100644 index 8bb167e..0000000 --- a/packages/core/src/lib/router/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './router.ts'; diff --git a/packages/core/src/lib/router/router.test.ts b/packages/core/src/lib/router/router.test.ts deleted file mode 100644 index 30edb78..0000000 --- a/packages/core/src/lib/router/router.test.ts +++ /dev/null @@ -1,209 +0,0 @@ -import { assertEquals, assertInstanceOf } from '@std/assert'; -import { GenericException } from '../exception/genericException.ts'; -import { - InvalidInputException, - NotFoundException, -} from '../exception/index.ts'; -import { createRouter } from './router.ts'; -import { commandHandlerMap, type TestCommand } from './testCommand.ts'; -import { eventHandlerMap, type TestEvent } from './testEvent.ts'; -import { queryHandlerMap, type TestQuery } from './testQuery.ts'; - -Deno.test('Router handles input with an unknown handler name', async () => { - const router = createRouter({ - handlerMap: {}, - }); - - const input = { - specversion: '1.0', - id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'UNKNOWN_EVENT', - data: { - payload: { - testException: false, - aNumber: 1, - }, - correlationId: '123', - authContext: { - sub: 'admin@host.tld', - }, - }, - }; - - try { - const result = await router(input); - assertEquals(typeof result === 'undefined', true); - } catch (exception: any) { - assertInstanceOf(exception, NotFoundException); - assertEquals(exception.message, 'Route handler not found'); - } -}); - -Deno.test('Router handles valid command input', async () => { - const commandRouter = createRouter({ - handlerMap: commandHandlerMap, - }); - - const input: TestCommand = { - specversion: '1.0', - id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'test.command', - data: { - payload: { - aNumber: 1, - }, - correlationId: '123', - authContext: { - sub: 'admin@host.tld', - }, - }, - }; - - try { - const result = await commandRouter(input); - assertEquals(result, { - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: { - aNumber: 1, - }, - }); - } catch (exception: any) { - assertEquals(typeof exception === 'undefined', true); - } -}); - -Deno.test('Router handles valid query input', async () => { - const queryRouter = createRouter({ - handlerMap: queryHandlerMap, - }); - - const input: TestQuery = { - specversion: '1.0', - id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'test.query', - data: { - payload: {}, - correlationId: '123', - authContext: { - sub: 'admin@host.tld', - }, - }, - }; - - try { - const result = await queryRouter(input); - assertEquals(result, { - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: { - foo: 'bar', - }, - }); - } catch (exception: any) { - assertEquals(typeof exception === 'undefined', true); - } -}); - -Deno.test('Router handles valid event input', async () => { - const eventRouter = createRouter({ - handlerMap: eventHandlerMap, - }); - - const input: TestEvent = { - specversion: '1.0', - id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'test.event', - data: { - testException: false, - aNumber: 1, - }, - }; - - try { - const result = await eventRouter(input); - assertEquals(result, { - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: { - testException: false, - aNumber: 1, - }, - }); - } catch (exception: any) { - assertEquals(typeof exception === 'undefined', true); - } -}); - -Deno.test('Router handles invalid event input', async () => { - const eventRouter = createRouter({ - handlerMap: eventHandlerMap, - }); - - const invalidInput = { - specversion: '1.0', - id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'test.event', - data: { - testException: false, - aNumber: '123', // This should trigger a validation error - }, - }; - - try { - const result = await eventRouter(invalidInput); - assertEquals(typeof result === 'undefined', true); - } catch (exception: any) { - assertInstanceOf(exception, InvalidInputException); - assertEquals( - exception.message, - 'The provided input is invalid', - ); - assertEquals(exception.details, { - issues: [ - { - code: 'invalid_type', - expected: 'number', - received: 'string', - path: ['data', 'aNumber'], - message: 'Expected number, received string', - }, - ], - }); - } -}); - -Deno.test('Router handles valid event input but handler returns an exception', async () => { - const eventRouter = createRouter({ - handlerMap: eventHandlerMap, - }); - - const input: TestEvent = { - specversion: '1.0', - id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'test.event', - data: { - testException: true, - aNumber: 1, - }, - }; - - try { - const result = await eventRouter(input); - assertEquals(typeof result === 'undefined', true); - } catch (exception: any) { - assertInstanceOf(exception, GenericException); - } -}); diff --git a/packages/core/src/lib/router/router.ts b/packages/core/src/lib/router/router.ts deleted file mode 100644 index a93a10b..0000000 --- a/packages/core/src/lib/router/router.ts +++ /dev/null @@ -1,143 +0,0 @@ -import { ZodError, type ZodType } from 'zod'; -import { GenericException } from '../exception/genericException.ts'; -import { InvalidInputException } from '../exception/invalidInputException.ts'; -import { NotFoundException } from '../exception/notFoundException.ts'; - -/** - * The result of a route handler. - * - * @template TData - The type of the data returned by the route handler. - */ -export type RouteHandlerResult = { - statusCode: number; - headers?: Record; - data?: TData; -}; - -/** - * A route handler. - * - * @template TInput - The type of the input to the route handler. - * @template TResultData - The type of the data returned by the route handler. - */ -export type RouteHandler = ( - input: TInput, -) => Promise>; - -export type RouteHandlerMap = Record< - string, - { - handler: RouteHandler; - inputType: ZodType; - } ->; - -/** - * A Nimbus router. - * - * @template TInput - The type of the input to the router. - * @template TResultData - The type of the data returned by the router. - */ -export type Router = ( - input: TInput, -) => Promise>; - -/** - * The input for creating a Nimbus router. - * - * @template TInput - The type of the input to the router. - * @template TResultData - The type of the data returned by the router. - */ -export type CreateRouterInput = { - handlerMap: RouteHandlerMap; - inputLogFunc?: (input: TInput) => void; -}; - -/** - * Creates a Nimbus router. - * - * @param {CreateRouterInput} input - * @param {RouteHandlerMap} input.handlerMap - The map of route handlers. - * @param {Function} input.inputLogFunc - Optional function to log input received by the router. - * - * @returns {Router} The Nimbus router. - * - * @example - * ```ts - * import { createRouter } from "@nimbus/core"; - * - * import { getAccountHandler } from "./queries/getAccount.handler.ts"; - * import { GetAccountQuery } from "../core/queries/getAccount.ts"; - * - * import { addAccountHandler } from "./commands/addAccount.handler.ts"; - * import { AddAccountCommand } from "../core/command/addAccount.ts"; - * - * import { accountAddedHandler } from "./events/accountAdded.handler.ts"; - * import { AccountAddedEvent } from "../core/events/accountAdded.ts"; - * - * const accountRouter = createRouter({ - * handlerMap: { - * GET_ACCOUNT: { - * handler: getAccountHandler, - * inputType: GetAccountQuery, - * }, - * ADD_ACCOUNT: { - * handler: addAccountHandler, - * inputType: AddAccountCommand, - * }, - * ACCOUNT_ADDED: { - * handler: accountAddedHandler, - * inputType: AccountAddedEvent, - * }, - * }, - * }); - * ``` - */ -export const createRouter = ({ - handlerMap, - inputLogFunc, -}: CreateRouterInput): Router => { - /** - * The Nimbus router. - * - * Takes any input, validates the input and routes it to the appropriate handler. - * - * @param {any} input - The input to the router. - * - * @returns {Promise} The result of the route handler. - * - * @throws {NotFoundException} - If the route handler is not found. - * @throws {InvalidInputException} - If the input is invalid. - * @throws {GenericException} - If an error occurs while handling the input. - */ - const router: Router = (input) => { - if (inputLogFunc) { - inputLogFunc(input); - } - - if (!handlerMap[input.type]) { - throw new NotFoundException( - 'Route handler not found', - { - reason: `Could not find a handler for "${input.type}"`, - }, - ); - } - - const { handler, inputType } = handlerMap[input.type]; - - try { - const validInput = inputType.parse(input); - - return handler(validInput); - } catch (error) { - if (error instanceof ZodError) { - throw new InvalidInputException().fromZodError(error); - } else { - throw new GenericException().fromError(error as Error); - } - } - }; - - return router; -}; diff --git a/packages/core/src/lib/router/testCommand.ts b/packages/core/src/lib/router/testCommand.ts deleted file mode 100644 index 7ea0bf7..0000000 --- a/packages/core/src/lib/router/testCommand.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { z } from 'zod'; -import { Command } from '../command/command.ts'; -import type { RouteHandler, RouteHandlerMap } from './router.ts'; - -/** - * Zod schema for the TestCommandData. - */ -export const TestCommandData = z.object({ - aNumber: z.number(), -}); - -/** - * The type of the TestCommandData. - */ -export type TestCommandData = z.infer; - -/** - * Zod schema for the TestCommand. - */ -export const TestCommand = Command( - z.literal('test.command'), - TestCommandData, - z.object({}), -); - -/** - * The type of the TestCommand. - */ -export type TestCommand = z.infer; - -/** - * The handler for the TestCommand. - */ -export const testCommandHandler: RouteHandler< - TestCommand, - TestCommandData -> = (event) => { - return Promise.resolve({ - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: event.data.payload, - }); -}; - -/** - * The handler map for the TestCommand. - */ -export const commandHandlerMap: RouteHandlerMap = { - 'test.command': { - handler: testCommandHandler, - inputType: TestCommand, - }, -}; diff --git a/packages/core/src/lib/router/testEvent.ts b/packages/core/src/lib/router/testEvent.ts deleted file mode 100644 index 55f967f..0000000 --- a/packages/core/src/lib/router/testEvent.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { z } from 'zod'; -import { CloudEvent } from '../cloudEvent/cloudEvent.ts'; -import { NotFoundException } from '../exception/notFoundException.ts'; -import type { RouteHandler, RouteHandlerMap } from './router.ts'; - -/** - * Zod schema for the TestEventData. - */ -export const TestEventData = z.object({ - testException: z.boolean(), - aNumber: z.number(), -}); - -/** - * The type of the TestEventData. - */ -export type TestEventData = z.infer; - -/** - * Zod schema for the TestEvent. - */ -export const TestEvent = CloudEvent( - z.literal('test.event'), - TestEventData, -); - -/** - * The type of the TestEvent. - */ -export type TestEvent = z.infer; - -/** - * The handler for the TestEvent. - */ -export const testEventHandler: RouteHandler = ( - event, -) => { - if (event.data.testException) { - throw new NotFoundException(); - } - - return Promise.resolve({ - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: event.data, - }); -}; - -/** - * The handler map for the TestEvent. - */ -export const eventHandlerMap: RouteHandlerMap = { - 'test.event': { - handler: testEventHandler, - inputType: TestEvent, - }, -}; diff --git a/packages/core/src/lib/router/testQuery.ts b/packages/core/src/lib/router/testQuery.ts deleted file mode 100644 index b14809c..0000000 --- a/packages/core/src/lib/router/testQuery.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { z } from 'zod'; -import { Query } from '../query/query.ts'; -import type { RouteHandler, RouteHandlerMap } from './router.ts'; - -/** - * Zod schema for the TestQuery. - */ -export const TestQuery = Query( - z.literal('test.query'), - z.object({}), - z.object({}), -); - -/** - * The type of the TestQuery. - */ -export type TestQuery = z.infer; - -/** - * The handler for the TestQuery. - */ -export const testQueryHandler: RouteHandler = () => { - return Promise.resolve({ - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: { - foo: 'bar', - }, - }); -}; - -/** - * The handler map for the TestQuery. - */ -export const queryHandlerMap: RouteHandlerMap = { - 'test.query': { - handler: testQueryHandler, - inputType: TestQuery, - }, -}; diff --git a/packages/core/src/lib/tracing/withSpan.ts b/packages/core/src/lib/tracing/withSpan.ts new file mode 100644 index 0000000..c43587c --- /dev/null +++ b/packages/core/src/lib/tracing/withSpan.ts @@ -0,0 +1,128 @@ +import { + type Attributes, + context, + type Span, + SpanKind, + SpanStatusCode, + trace, +} from '@opentelemetry/api'; + +/** + * Options for configuring a span created by withSpan. + */ +export type WithSpanOptions = { + /** + * The name of the span. This will be displayed in your tracing UI. + */ + name: string; + /** + * The name of the tracer. Defaults to "nimbus". + */ + tracerName?: string; + /** + * The kind of span. Defaults to SpanKind.INTERNAL. + */ + kind?: SpanKind; + /** + * Initial attributes to set on the span. + */ + attributes?: Attributes; +}; + +/** + * Higher-order function that wraps a function with OpenTelemetry tracing. + * + * Creates a child span within the current trace context. The span automatically: + * - Inherits the parent span from the active context + * - Records the function's execution time + * - Sets error status and records exceptions on failure + * - Ends when the function completes (success or failure) + * + * @example + * ```ts + * import { withSpan } from '@nimbus/core'; + * + * const fetchUser = withSpan( + * { + * name: 'fetchUser', + * attributes: { + * 'user.source': 'database' + * } + * }, + * async (userId: string) => { + * return await db.users.findById(userId); + * } + * ); + * + * const user = await fetchUser('123'); + * ``` + * + * @example + * ```ts + * const processOrder = withSpan( + * { name: 'processOrder' }, + * async (orderId: string, span: Span) => { + * const order = await db.orders.findById(orderId); + * + * span.setAttribute('order.total', order.total); + * span.setAttribute('order.items', order.items.length); + * + * return await processPayment(order); + * } + * ); + * ``` + */ +export const withSpan = ( + options: WithSpanOptions, + fn: (...args: [...TArgs, Span]) => TReturn, +): (...args: TArgs) => TReturn => { + const tracerName = options.tracerName ?? 'nimbus'; + const tracer = trace.getTracer(tracerName); + + return (...args: TArgs): TReturn => { + const parentContext = context.active(); + + return tracer.startActiveSpan( + options.name, + { + kind: options.kind ?? SpanKind.INTERNAL, + attributes: options.attributes, + }, + parentContext, + (span) => { + try { + const result = fn(...args, span); + + // Handle promises + if (result instanceof Promise) { + return result + .then((value) => { + span.end(); + return value; + }) + .catch((err) => { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: (err as Error).message, + }); + span.recordException(err as Error); + span.end(); + throw err; + }) as TReturn; + } + + span.end(); + return result; + } catch (err) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: (err as Error).message, + }); + span.recordException(err as Error); + span.end(); + throw err; + } + }, + ) as TReturn; + }; +}; diff --git a/packages/oak/README.md b/packages/hono/README.md similarity index 77% rename from packages/oak/README.md rename to packages/hono/README.md index 1d7dfc6..7b41a5f 100644 --- a/packages/oak/README.md +++ b/packages/hono/README.md @@ -3,9 +3,9 @@ alt="Nimbus" /> -# Nimbus Oak +# Nimbus Hono -The Oak package of the Nimbus framework. +Adapters and useful functionality to bridge Nimbus and [Hono](https://hono.dev/). Refer to the [Nimbus main repository](https://github.com/overlap-dev/Nimbus) or the [Nimbus documentation](https://nimbus.overlap.at) for more information about the Nimbus framework. diff --git a/packages/oak/deno.json b/packages/hono/deno.json similarity index 86% rename from packages/oak/deno.json rename to packages/hono/deno.json index 3ed557c..2a28316 100644 --- a/packages/oak/deno.json +++ b/packages/hono/deno.json @@ -1,5 +1,5 @@ { - "name": "@nimbus/oak", + "name": "@nimbus/hono", "version": "0.17.0", "license": "MIT", "author": "Daniel Gördes (https://overlap.at)", @@ -31,8 +31,8 @@ "include": ["src/"] }, "imports": { + "@opentelemetry/api": "npm:@opentelemetry/api@^1.9.0", "@std/ulid": "jsr:@std/ulid@^1.0.0", - "@oak/oak": "jsr:@oak/oak@^17.1.4", - "zod": "npm:zod@^3.24.1" + "hono": "npm:hono@^4.11.4" } } diff --git a/packages/hono/src/index.ts b/packages/hono/src/index.ts new file mode 100644 index 0000000..078dec9 --- /dev/null +++ b/packages/hono/src/index.ts @@ -0,0 +1,3 @@ +export * from './lib/middleware/correlationId.ts'; +export * from './lib/middleware/logger.ts'; +export * from './lib/onError.ts'; diff --git a/packages/hono/src/lib/middleware/correlationId.ts b/packages/hono/src/lib/middleware/correlationId.ts new file mode 100644 index 0000000..fbcf344 --- /dev/null +++ b/packages/hono/src/lib/middleware/correlationId.ts @@ -0,0 +1,107 @@ +import type { MiddlewareHandler } from 'hono'; +import { ulid } from '@std/ulid'; + +/** + * Header names to check for an existing correlation ID. + * Checked in order of priority. + */ +const CORRELATION_ID_HEADERS = [ + 'x-correlation-id', + 'x-request-id', + 'request-id', +] as const; + +/** + * The key used to store the correlation ID in the Hono context. + */ +export const CORRELATION_ID_KEY = 'correlationId' as const; + +/** + * Options for configuring the correlation ID middleware. + */ +export type CorrelationIdOptions = { + /** + * Add the correlation ID to the response headers. + * Defaults to true. + */ + addToResponseHeaders?: boolean; + /** + * The header name to use when adding to response headers. + * Defaults to "x-correlation-id". + */ + responseHeaderName?: string; +}; + +/** + * Correlation ID middleware for Hono. + * + * This middleware extracts the correlation ID from incoming request headers + * or generates a new one using ULID if not present. The correlation ID is + * stored in the Hono context and optionally added to response headers. + * + * Checked headers (in order): + * - x-correlation-id + * - x-request-id + * - request-id + * + * @example + * ```ts + * import { Hono } from 'hono'; + * import { correlationId, getCorrelationId } from '@nimbus/hono'; + * + * const app = new Hono(); + * app.use(correlationId()); + * + * app.get('/', (c) => { + * const id = getCorrelationId(c); + * return c.json({ correlationId: id }); + * }); + * ``` + */ +export const correlationId = ( + options?: CorrelationIdOptions, +): MiddlewareHandler => { + const addToResponseHeaders = options?.addToResponseHeaders ?? true; + const responseHeaderName = options?.responseHeaderName ?? + 'x-correlation-id'; + + return async (c, next) => { + let id: string | undefined; + + // Check incoming headers for existing correlation ID + for (const header of CORRELATION_ID_HEADERS) { + const value = c.req.header(header); + if (value) { + id = value; + break; + } + } + + // Generate new ID if not found + if (!id) { + id = ulid(); + } + + // Store in context + c.set(CORRELATION_ID_KEY, id); + + // Optionally add to response headers + if (addToResponseHeaders) { + c.header(responseHeaderName, id); + } + + await next(); + }; +}; + +/** + * Get the correlation ID from the Hono context. + * + * @param c - The Hono context + * @returns The correlation ID or undefined if not set + */ +export const getCorrelationId = (c: { + get: (key: typeof CORRELATION_ID_KEY) => string | undefined; +}): string => { + return c.get(CORRELATION_ID_KEY) ?? ''; +}; diff --git a/packages/hono/src/lib/middleware/logger.ts b/packages/hono/src/lib/middleware/logger.ts new file mode 100644 index 0000000..3092d24 --- /dev/null +++ b/packages/hono/src/lib/middleware/logger.ts @@ -0,0 +1,137 @@ +import { getLogger } from '@nimbus/core'; +import { + context, + propagation, + SpanKind, + SpanStatusCode, + trace, +} from '@opentelemetry/api'; +import type { MiddlewareHandler } from 'hono'; +import { getCorrelationId } from '../middleware/correlationId.ts'; + +/** + * Options for configuring the hono logger middleware. + */ +export type LoggerOptions = { + /** + * Enable OpenTelemetry tracing for HTTP requests. + * When enabled, the middleware creates spans for each request + * and propagates trace context from incoming headers. + */ + enableTracing?: boolean; + /** + * Optionally change the name of the tracer. + * Defaults to "nimbus". + */ + tracerName?: string; +}; + +const humanize = (times: string[]) => { + const [delimiter, separator] = [',', '.']; + + const orderTimes = times.map((v) => + v.replaceAll(/(\d)(?=(\d\d\d)+(?!\d))/g, '$1' + delimiter) + ); + + return orderTimes.join(separator); +}; + +const time = (start: number) => { + const delta = Date.now() - start; + return humanize([ + delta < 1000 ? delta + 'ms' : Math.round(delta / 1000) + 's', + ]); +}; + +/** + * Logger middleware for Hono with optional OpenTelemetry tracing. + * + * When tracing is enabled, the middleware: + * - Extracts trace context from incoming request headers (traceparent/tracestate) + * - Creates a server span for the HTTP request + * - Makes the span active so child spans can be created in handlers + * - Records HTTP method, path, and status code as span attributes + * + * @example + * ```ts + * import { Hono } from 'hono'; + * import { logger } from '@nimbus/hono'; + * + * const app = new Hono(); + * app.use(logger({ enableTracing: true })); + * ``` + */ +export const logger = (options?: LoggerOptions): MiddlewareHandler => { + const enableTracing = options?.enableTracing ?? true; + const tracerName = options?.tracerName ?? 'nimbus'; + const tracer = trace.getTracer(tracerName); + + return async (c, next) => { + const startTime = Date.now(); + const correlationId = getCorrelationId(c); + + getLogger().info({ + category: 'API', + message: `REQ: [${c.req.method}] ${c.req.path}`, + correlationId, + }); + + if (enableTracing) { + // Extract trace context from incoming headers (traceparent, tracestate) + const parentContext = propagation.extract( + context.active(), + c.req.raw.headers, + { + get: (headers, key) => headers.get(key) ?? undefined, + keys: (headers) => [...headers.keys()], + }, + ); + + // Run the request within the extracted context + await context.with(parentContext, async () => { + await tracer.startActiveSpan( + `HTTP ${c.req.method} ${c.req.path}`, + { + kind: SpanKind.SERVER, + attributes: { + 'http.method': c.req.method, + 'url.path': c.req.path, + 'http.target': c.req.url, + ...(correlationId && { + correlation_id: correlationId, + }), + }, + }, + async (span) => { + try { + await next(); + span.setAttribute('http.status_code', c.res.status); + if (c.res.status >= 400) { + span.setStatus({ code: SpanStatusCode.ERROR }); + } + } catch (err) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: (err as Error).message, + }); + span.recordException(err as Error); + throw err; + } finally { + span.end(); + } + }, + ); + }); + } else { + await next(); + } + + getLogger().info({ + category: 'API', + message: `RES: [${c.req.method}] ${c.req.path} - ${ + time(startTime) + }`, + correlationId, + }); + }; +}; diff --git a/packages/hono/src/lib/onError.ts b/packages/hono/src/lib/onError.ts new file mode 100644 index 0000000..66dd4dc --- /dev/null +++ b/packages/hono/src/lib/onError.ts @@ -0,0 +1,62 @@ +import { Exception, getLogger } from '@nimbus/core'; +import type { Context } from 'hono'; +import type { HTTPResponseError } from 'hono/types'; + +/** + * An error handler for Hono applications that maps + * Nimbus exceptions to HTTP responses and handles + * other unhandled errors. + * + * @param error - The error to handle. + * @param c - The Hono context. + * + * @example + * ```ts + * import { handleError } from '@nimbus/hono'; + * + * const app = new Hono(); + * app.onError(handleError); + * ``` + */ +export const handleError = ( + error: Error | HTTPResponseError, + c: Context, +): Response => { + let statusCode = 500; + let response: Record = { + error: 'INTERNAL_SERVER_ERROR', + }; + + const isNimbusException = error instanceof Exception; + + if (isNimbusException) { + statusCode = error.statusCode ?? 500; + response = { + error: error.name, + message: error.message, + ...(error.details && { details: error.details }), + }; + + if (statusCode >= 500) { + getLogger().error({ + category: 'Nimbus', + message: error.message, + error, + }); + } else { + getLogger().debug({ + category: 'Nimbus', + message: error.message, + error, + }); + } + } else { + getLogger().critical({ + category: 'Nimbus', + message: 'An unhandled error occurred', + error, + }); + } + + return c.json(response, statusCode as any); +}; diff --git a/packages/mongodb/deno.json b/packages/mongodb/deno.json index 290f697..9963238 100644 --- a/packages/mongodb/deno.json +++ b/packages/mongodb/deno.json @@ -13,7 +13,9 @@ "homepage": "https://nimbus.overlap.at", "exports": "./src/index.ts", "fmt": { - "include": ["src/"], + "include": [ + "src/" + ], "useTabs": false, "lineWidth": 80, "indentWidth": 4, @@ -22,18 +24,25 @@ "proseWrap": "always" }, "lint": { - "include": ["src/"], + "include": [ + "src/" + ], "rules": { - "exclude": ["no-explicit-any"] + "exclude": [ + "no-explicit-any" + ] } }, "test": { - "include": ["src/"] + "include": [ + "src/" + ] }, "imports": { + "@opentelemetry/api": "npm:@opentelemetry/api@^1.9.0", "@std/assert": "jsr:@std/assert@^1.0.10", "@std/text": "jsr:@std/text@^1.0.10", - "mongodb": "npm:mongodb@^6.12.0", - "zod": "npm:zod@^3.24.1" + "mongodb": "npm:mongodb@^7.0.0", + "zod": "npm:zod@^4.3.5" } -} +} \ No newline at end of file diff --git a/packages/mongodb/src/index.ts b/packages/mongodb/src/index.ts index 47a50b5..de221a9 100644 --- a/packages/mongodb/src/index.ts +++ b/packages/mongodb/src/index.ts @@ -1,3 +1,4 @@ +export * from './lib/crud/aggregate.ts'; export * from './lib/crud/bulkWrite.ts'; export * from './lib/crud/countDocuments.ts'; export * from './lib/crud/deleteMany.ts'; diff --git a/packages/mongodb/src/lib/crud/aggregate.ts b/packages/mongodb/src/lib/crud/aggregate.ts index e242d87..5807e63 100644 --- a/packages/mongodb/src/lib/crud/aggregate.ts +++ b/packages/mongodb/src/lib/crud/aggregate.ts @@ -2,6 +2,7 @@ import { GenericException } from '@nimbus/core'; import type { AggregateOptions, Collection, Document } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the aggregate function. @@ -34,29 +35,33 @@ export type Aggregate = ( * * @returns {Promise} The aggregated documents. */ -export const aggregate: Aggregate = async ({ +export const aggregate: Aggregate = ({ collection, aggregation, mapDocument, outputType, options, -}) => { - let res: Document[] = []; +}: AggregateInput) => { + return withSpan('aggregate', collection, async () => { + let res: Document[] = []; - try { - const aggregationRes = collection.aggregate(aggregation, options); - res = await aggregationRes.toArray(); - } catch (error) { - throw handleMongoError(error); - } + try { + const aggregationRes = collection.aggregate(aggregation, options); + res = await aggregationRes.toArray(); + } catch (error) { + throw handleMongoError(error); + } - try { - return res.map((item) => outputType.parse(mapDocument(item))); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return res.map((item) => + outputType.parse(mapDocument(item)) as TData + ); + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/bulkWrite.ts b/packages/mongodb/src/lib/crud/bulkWrite.ts index c25c40f..3bb429e 100644 --- a/packages/mongodb/src/lib/crud/bulkWrite.ts +++ b/packages/mongodb/src/lib/crud/bulkWrite.ts @@ -6,6 +6,7 @@ import type { Document, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the bulkWrite function. @@ -33,15 +34,16 @@ export type BulkWrite = ( * * @returns {Promise} The result of the bulk write operation. */ -export const bulkWrite: BulkWrite = async ({ +export const bulkWrite: BulkWrite = ({ collection, operations, options, }) => { - try { - const res = await collection.bulkWrite(operations, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('bulkWrite', collection, async () => { + try { + return await collection.bulkWrite(operations, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/countDocuments.ts b/packages/mongodb/src/lib/crud/countDocuments.ts index 83e5399..6b4f32a 100644 --- a/packages/mongodb/src/lib/crud/countDocuments.ts +++ b/packages/mongodb/src/lib/crud/countDocuments.ts @@ -5,6 +5,7 @@ import type { Filter, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the countDocuments function. @@ -32,15 +33,16 @@ export type CountDocuments = ( * * @returns {Promise} The number of documents. */ -export const countDocuments: CountDocuments = async ({ +export const countDocuments: CountDocuments = ({ collection, filter, options, }) => { - try { - const res = await collection.countDocuments(filter, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('countDocuments', collection, async () => { + try { + return await collection.countDocuments(filter, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/deleteMany.ts b/packages/mongodb/src/lib/crud/deleteMany.ts index bed6604..f5fa17b 100644 --- a/packages/mongodb/src/lib/crud/deleteMany.ts +++ b/packages/mongodb/src/lib/crud/deleteMany.ts @@ -6,6 +6,7 @@ import type { Filter, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the deleteMany function. @@ -33,15 +34,16 @@ export type DeleteMany = ( * * @returns {Promise} The result of the delete operation. */ -export const deleteMany: DeleteMany = async ({ +export const deleteMany: DeleteMany = ({ collection, filter, options, }) => { - try { - const res = await collection.deleteMany(filter, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('deleteMany', collection, async () => { + try { + return await collection.deleteMany(filter, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/deleteOne.ts b/packages/mongodb/src/lib/crud/deleteOne.ts index e77a576..eb809db 100644 --- a/packages/mongodb/src/lib/crud/deleteOne.ts +++ b/packages/mongodb/src/lib/crud/deleteOne.ts @@ -6,6 +6,7 @@ import type { Filter, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the deleteOne function. @@ -33,15 +34,16 @@ export type DeleteOne = ( * * @returns {Promise} The result of the delete operation. */ -export const deleteOne: DeleteOne = async ({ +export const deleteOne: DeleteOne = ({ collection, filter, options, }) => { - try { - const res = await collection.deleteOne(filter, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('deleteOne', collection, async () => { + try { + return await collection.deleteOne(filter, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/find.ts b/packages/mongodb/src/lib/crud/find.ts index 5f4a43d..e266ad4 100644 --- a/packages/mongodb/src/lib/crud/find.ts +++ b/packages/mongodb/src/lib/crud/find.ts @@ -9,6 +9,7 @@ import type { } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the find function. @@ -49,7 +50,7 @@ export type Find = ( * * @returns {Promise} The found documents. */ -export const find: Find = async ({ +export const find: Find = ({ collection, filter, limit, @@ -59,40 +60,44 @@ export const find: Find = async ({ mapDocument, outputType, options, -}) => { - let res: WithId[] = []; +}: FindInput) => { + return withSpan('find', collection, async () => { + let res: WithId[] = []; - try { - const findRes = collection.find(filter, options); + try { + const findRes = collection.find(filter, options); - if (typeof limit !== 'undefined') { - findRes.limit(limit); - } + if (limit !== undefined) { + findRes.limit(limit); + } - if (typeof skip !== 'undefined') { - findRes.skip(skip); - } + if (skip !== undefined) { + findRes.skip(skip); + } - if (typeof sort !== 'undefined') { - findRes.sort(sort); - } + if (sort !== undefined) { + findRes.sort(sort); + } - if (typeof project !== 'undefined') { - findRes.project(project); - } + if (project !== undefined) { + findRes.project(project); + } - res = await findRes.toArray(); - } catch (error) { - throw handleMongoError(error); - } + res = await findRes.toArray(); + } catch (error) { + throw handleMongoError(error); + } - try { - return res.map((item) => outputType.parse(mapDocument(item))); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return res.map((item) => + outputType.parse(mapDocument(item)) + ) as TData[]; + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/findOne.ts b/packages/mongodb/src/lib/crud/findOne.ts index 4596259..028477d 100644 --- a/packages/mongodb/src/lib/crud/findOne.ts +++ b/packages/mongodb/src/lib/crud/findOne.ts @@ -2,6 +2,7 @@ import { GenericException, NotFoundException } from '@nimbus/core'; import type { Collection, Document, Filter, WithId } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the findOne function. @@ -32,31 +33,33 @@ export type FindOne = ( * * @returns {Promise} The found document. */ -export const findOne: FindOne = async ({ +export const findOne: FindOne = ({ collection, filter, mapDocument, outputType, -}) => { - let res: WithId | null = null; +}: FindOneInput) => { + return withSpan('findOne', collection, async () => { + let res: WithId | null = null; - try { - res = await collection.findOne(filter); - } catch (error) { - throw handleMongoError(error); - } + try { + res = await collection.findOne(filter); + } catch (error) { + throw handleMongoError(error); + } - if (!res) { - throw new NotFoundException('Document not found'); - } + if (!res) { + throw new NotFoundException('Document not found'); + } - try { - return outputType.parse(mapDocument(res)); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return outputType.parse(mapDocument(res)) as TData; + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/findOneAndDelete.ts b/packages/mongodb/src/lib/crud/findOneAndDelete.ts index df6bb74..3f176d6 100644 --- a/packages/mongodb/src/lib/crud/findOneAndDelete.ts +++ b/packages/mongodb/src/lib/crud/findOneAndDelete.ts @@ -8,6 +8,7 @@ import type { } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the findOneAndDelete function. @@ -40,36 +41,38 @@ export type FindOneAndDelete = ( * * @returns {Promise} The found and deleted document. */ -export const findOneAndDelete: FindOneAndDelete = async ({ +export const findOneAndDelete: FindOneAndDelete = ({ collection, filter, mapDocument, outputType, options, -}) => { - let res: WithId | null = null; +}: FindOneAndDeleteInput) => { + return withSpan('findOneAndDelete', collection, async () => { + let res: WithId | null = null; - try { - if (options) { - res = await collection.findOneAndDelete(filter, options); - } else { - res = await collection.findOneAndDelete(filter); + try { + if (options) { + res = await collection.findOneAndDelete(filter, options); + } else { + res = await collection.findOneAndDelete(filter); + } + } catch (error) { + throw handleMongoError(error); } - } catch (error) { - throw handleMongoError(error); - } - if (!res) { - throw new NotFoundException('Document not found'); - } + if (!res) { + throw new NotFoundException('Document not found'); + } - try { - return outputType.parse(mapDocument(res)); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return outputType.parse(mapDocument(res)) as TData; + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/findOneAndReplace.ts b/packages/mongodb/src/lib/crud/findOneAndReplace.ts index f928023..6af7857 100644 --- a/packages/mongodb/src/lib/crud/findOneAndReplace.ts +++ b/packages/mongodb/src/lib/crud/findOneAndReplace.ts @@ -9,6 +9,7 @@ import type { } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the findOneAndReplace function. @@ -43,41 +44,43 @@ export type FindOneAndReplace = ( * * @returns {Promise} The found and replaced document. */ -export const findOneAndReplace: FindOneAndReplace = async ({ +export const findOneAndReplace: FindOneAndReplace = ({ collection, filter, replacement, mapDocument, outputType, options, -}) => { - let res: WithId | null = null; +}: FindOneAndReplaceInput) => { + return withSpan('findOneAndReplace', collection, async () => { + let res: WithId | null = null; - try { - if (options) { - res = await collection.findOneAndReplace( - filter, - replacement, - options, - ); - } else { - res = await collection.findOneAndReplace(filter, replacement); + try { + if (options) { + res = await collection.findOneAndReplace( + filter, + replacement, + options, + ); + } else { + res = await collection.findOneAndReplace(filter, replacement); + } + } catch (error) { + throw handleMongoError(error); } - } catch (error) { - throw handleMongoError(error); - } - if (!res) { - throw new NotFoundException('Document not found'); - } + if (!res) { + throw new NotFoundException('Document not found'); + } - try { - return outputType.parse(mapDocument(res)); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return outputType.parse(mapDocument(res)) as TData; + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/findOneAndUpdate.ts b/packages/mongodb/src/lib/crud/findOneAndUpdate.ts index 1e312ff..74a6e55 100644 --- a/packages/mongodb/src/lib/crud/findOneAndUpdate.ts +++ b/packages/mongodb/src/lib/crud/findOneAndUpdate.ts @@ -9,6 +9,7 @@ import type { } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the findOneAndUpdate function. @@ -43,37 +44,43 @@ export type FindOneAndUpdate = ( * * @returns {Promise} The found and updated document. */ -export const findOneAndUpdate: FindOneAndUpdate = async ({ +export const findOneAndUpdate: FindOneAndUpdate = ({ collection, filter, update, mapDocument, outputType, options, -}) => { - let res: WithId | null = null; +}: FindOneAndUpdateInput) => { + return withSpan('findOneAndUpdate', collection, async () => { + let res: WithId | null = null; - try { - if (options) { - res = await collection.findOneAndUpdate(filter, update, options); - } else { - res = await collection.findOneAndUpdate(filter, update); + try { + if (options) { + res = await collection.findOneAndUpdate( + filter, + update, + options, + ); + } else { + res = await collection.findOneAndUpdate(filter, update); + } + } catch (error) { + throw handleMongoError(error); } - } catch (error) { - throw handleMongoError(error); - } - if (!res) { - throw new NotFoundException('Document not found'); - } + if (!res) { + throw new NotFoundException('Document not found'); + } - try { - return outputType.parse(mapDocument(res)); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return outputType.parse(mapDocument(res)) as TData; + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/insertMany.ts b/packages/mongodb/src/lib/crud/insertMany.ts index 5f545fc..7522cb4 100644 --- a/packages/mongodb/src/lib/crud/insertMany.ts +++ b/packages/mongodb/src/lib/crud/insertMany.ts @@ -6,6 +6,7 @@ import type { OptionalUnlessRequiredId, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the insertMany function. @@ -33,15 +34,16 @@ export type InsertMany = ( * * @returns {Promise} The result of the insert operation. */ -export const insertMany: InsertMany = async ({ +export const insertMany: InsertMany = ({ collection, documents, options, }) => { - try { - const res = await collection.insertMany(documents, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('insertMany', collection, async () => { + try { + return await collection.insertMany(documents, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/insertOne.ts b/packages/mongodb/src/lib/crud/insertOne.ts index cfedb80..78a23e6 100644 --- a/packages/mongodb/src/lib/crud/insertOne.ts +++ b/packages/mongodb/src/lib/crud/insertOne.ts @@ -6,6 +6,7 @@ import type { OptionalUnlessRequiredId, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the insertOne function. @@ -33,15 +34,16 @@ export type InsertOne = ( * * @returns {Promise} The result of the insert operation. */ -export const insertOne: InsertOne = async ({ +export const insertOne: InsertOne = ({ collection, document, options, }) => { - try { - const res = await collection.insertOne(document, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('insertOne', collection, async () => { + try { + return await collection.insertOne(document, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/replaceOne.ts b/packages/mongodb/src/lib/crud/replaceOne.ts index 63e560f..9f15cff 100644 --- a/packages/mongodb/src/lib/crud/replaceOne.ts +++ b/packages/mongodb/src/lib/crud/replaceOne.ts @@ -7,6 +7,7 @@ import type { WithoutId, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the replaceOne function. @@ -36,16 +37,17 @@ export type ReplaceOne = ( * * @returns {Promise} The result of the replace operation. */ -export const replaceOne: ReplaceOne = async ({ +export const replaceOne: ReplaceOne = ({ collection, filter, replacement, options, }) => { - try { - const res = await collection.replaceOne(filter, replacement, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('replaceOne', collection, async () => { + try { + return await collection.replaceOne(filter, replacement, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/updateMany.ts b/packages/mongodb/src/lib/crud/updateMany.ts index cd880c6..8125871 100644 --- a/packages/mongodb/src/lib/crud/updateMany.ts +++ b/packages/mongodb/src/lib/crud/updateMany.ts @@ -7,6 +7,7 @@ import type { UpdateResult, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the updateMany function. @@ -36,16 +37,17 @@ export type UpdateMany = ( * * @returns {Promise} The result of the update operation. */ -export const updateMany: UpdateMany = async ({ +export const updateMany: UpdateMany = ({ collection, filter, update, options, }) => { - try { - const res = await collection.updateMany(filter, update, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('updateMany', collection, async () => { + try { + return await collection.updateMany(filter, update, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/updateOne.ts b/packages/mongodb/src/lib/crud/updateOne.ts index 2d938e0..e773469 100644 --- a/packages/mongodb/src/lib/crud/updateOne.ts +++ b/packages/mongodb/src/lib/crud/updateOne.ts @@ -7,6 +7,7 @@ import type { UpdateResult, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the updateOne function. @@ -36,16 +37,17 @@ export type UpdateOne = ( * * @returns {Promise} The result of the update operation. */ -export const updateOne: UpdateOne = async ({ +export const updateOne: UpdateOne = ({ collection, filter, update, options, }) => { - try { - const res = await collection.updateOne(filter, update, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('updateOne', collection, async () => { + try { + return await collection.updateOne(filter, update, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/repository.ts b/packages/mongodb/src/lib/repository.ts index f55abc0..c3793de 100644 --- a/packages/mongodb/src/lib/repository.ts +++ b/packages/mongodb/src/lib/repository.ts @@ -108,7 +108,7 @@ export class MongoDBRepository< * ZodType.parse is used to ensure the data is valid and type-safe. */ protected _mapDocumentToEntity(doc: Document): TEntity { - return this._entityType.parse(doc); + return this._entityType.parse(doc) as TEntity; } /** diff --git a/packages/mongodb/src/lib/tracing.ts b/packages/mongodb/src/lib/tracing.ts new file mode 100644 index 0000000..d8e7091 --- /dev/null +++ b/packages/mongodb/src/lib/tracing.ts @@ -0,0 +1,90 @@ +import { metrics, SpanKind, SpanStatusCode, trace } from '@opentelemetry/api'; +import type { Collection, Document } from 'mongodb'; + +export const tracer = trace.getTracer('nimbus'); + +export const DB_SYSTEM = 'mongodb'; + +const meter = metrics.getMeter('nimbus'); + +const operationCounter = meter.createCounter('mongodb_operation_total', { + description: 'Total number of MongoDB operations', +}); + +const operationDuration = meter.createHistogram( + 'mongodb_operation_duration_seconds', + { + description: 'Duration of MongoDB operations in seconds', + unit: 's', + }, +); + +/** + * Wraps an async function with OpenTelemetry tracing and metrics. + * + * Records: + * - `mongodb_operation_total` counter with operation, collection, and status labels + * - `mongodb_operation_duration_seconds` histogram with operation and collection labels + * + * @param operation - The MongoDB operation name (e.g., 'findOne', 'insertMany') + * @param collection - The MongoDB collection being operated on + * @param fn - The async function to execute within the span + * @returns The result of the async function + */ +export const withSpan = ( + operation: string, + collection: Collection, + fn: () => Promise, +): Promise => { + const startTime = performance.now(); + const metricLabels = { + operation, + collection: collection.collectionName, + }; + + return tracer.startActiveSpan( + `mongodb.${operation}`, + { + kind: SpanKind.CLIENT, + attributes: { + 'db.system': DB_SYSTEM, + 'db.operation': operation, + 'db.mongodb.collection': collection.collectionName, + }, + }, + async (span) => { + try { + const result = await fn(); + + // Record success metrics + operationCounter.add(1, { ...metricLabels, status: 'success' }); + operationDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + + return result; + } catch (error) { + // Record error metrics + operationCounter.add(1, { ...metricLabels, status: 'error' }); + operationDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error + ? error.message + : 'Unknown error', + }); + span.recordException( + error instanceof Error ? error : new Error('Unknown error'), + ); + throw error; + } finally { + span.end(); + } + }, + ); +}; diff --git a/packages/oak/src/index.ts b/packages/oak/src/index.ts deleted file mode 100644 index 946228d..0000000 --- a/packages/oak/src/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from './lib/middleware/requestCorrelationId.ts'; -export * from './lib/router.ts'; diff --git a/packages/oak/src/lib/middleware/requestCorrelationId.ts b/packages/oak/src/lib/middleware/requestCorrelationId.ts deleted file mode 100644 index 2644d10..0000000 --- a/packages/oak/src/lib/middleware/requestCorrelationId.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type { Context } from '@oak/oak/context'; -import type { Next } from '@oak/oak/middleware'; -import { ulid } from '@std/ulid'; - -/** - * Middleware to add a correlation ID (ULID) to the request context. - * - * @param ctx - The Oak context - * @param next - The Oak next function - */ -export const requestCorrelationId = async ( - ctx: Context, - next: Next, -): Promise => { - ctx.state.correlationId = ulid(); - await next(); -}; diff --git a/packages/oak/src/lib/router.ts b/packages/oak/src/lib/router.ts deleted file mode 100644 index 48cb08a..0000000 --- a/packages/oak/src/lib/router.ts +++ /dev/null @@ -1,209 +0,0 @@ -import { - createRouter, - getLogger, - type RouteHandler, - type RouteHandlerResult, -} from '@nimbus/core'; -import type { Context } from '@oak/oak/context'; -import { Router as OakRouter, type RouterOptions } from '@oak/oak/router'; -import { ulid } from '@std/ulid'; -import type { ZodType } from 'zod'; - -/** - * The NimbusOakRouter extends the Oak Router - * to directly route commands and queries coming - * in from HTTP requests to a Nimbus router. - */ -export class NimbusOakRouter extends OakRouter { - constructor(opts: RouterOptions = {}) { - super(opts); - } - - /** - * Routes a POST request to a Nimbus command router. - * - * @param {string} path - Oak request path - * @param {string} commandType - Type of the command - * @param {ZodType} commandSchema - Schema (ZodType) of the command - * @param {RouteHandler} handler - Nimbus Route Handler function - * @param {Function} onError - Optional function to customize error handling - */ - command( - path: string, - commandType: string, - commandSchema: ZodType, - handler: RouteHandler, - onError?: (error: any, ctx: Context) => void, - ) { - const inputLogFunc = (input: any) => { - getLogger().info({ - category: 'Nimbus', - ...(input?.data?.correlationId && { - correlationId: input.data.correlationId, - }), - message: - `${input?.data?.correlationId} - [Command] ${input?.type} from ${input?.source}`, - }); - }; - - super.post(path, async (ctx: Context) => { - try { - const correlationId = ctx.state.correlationId ?? ulid(); - const requestBody = await ctx.request.body.json(); - - const nimbusRouter = createRouter({ - handlerMap: { - [commandType]: { - handler, - inputType: commandSchema, - }, - }, - inputLogFunc, - }); - - const result = await nimbusRouter({ - specversion: '1.0', - id: correlationId, - source: ctx.request.url.toString(), - type: commandType, - data: { - correlationId: correlationId, - payload: requestBody, - ...(ctx.state.authContext && { - authContext: ctx.state.authContext, - }), - }, - }); - - this._handleNimbusRouterSuccess(result, ctx); - } catch (error: any) { - this._handleNimbusRouterError(error, ctx, onError); - } - }); - } - - /** - * Routes a GET request to a Nimbus query router. - * - * @param {string} path - Oak request path - * @param {string} queryType - Type of the query - * @param {ZodType} querySchema - Schema (ZodType) of the query - * @param {RouteHandler} handler - Nimbus Route Handler function - * @param {Function} onError - Optional function to customize error handling - */ - query( - path: string, - queryType: string, - querySchema: ZodType, - handler: RouteHandler, - onError?: (error: any, ctx: Context) => void, - ) { - const inputLogFunc = (input: any) => { - getLogger().info({ - category: 'Nimbus', - ...(input?.data?.correlationId && { - correlationId: input.data.correlationId, - }), - message: - `${input?.data?.correlationId} - [Query] ${input?.type} from ${input?.source}`, - }); - }; - - super.get(path, async (ctx: Context) => { - try { - const correlationId = ctx.state.correlationId ?? ulid(); - const pathParams = (ctx as any).params; - - const queryParams: Record = {}; - for ( - const [key, value] of ctx.request.url.searchParams.entries() - ) { - queryParams[key] = value; - } - - const nimbusRouter = createRouter({ - handlerMap: { - [queryType]: { - handler, - inputType: querySchema, - }, - }, - inputLogFunc, - }); - - const result = await nimbusRouter({ - specversion: '1.0', - id: correlationId, - source: ctx.request.url.toString(), - type: queryType, - data: { - correlationId: correlationId, - payload: { - ...queryParams, - ...pathParams, - }, - ...(ctx.state.authContext && { - authContext: ctx.state.authContext, - }), - }, - }); - - this._handleNimbusRouterSuccess(result, ctx); - } catch (error: any) { - this._handleNimbusRouterError(error, ctx, onError); - } - }); - } - - private _handleNimbusRouterSuccess( - result: RouteHandlerResult, - ctx: Context, - ) { - ctx.response.status = result.statusCode; - - if (result.headers) { - for (const header of Object.keys(result.headers)) { - ctx.response.headers.set( - header, - result.headers[header], - ); - } - } - - if (result.data) { - ctx.response.body = result.data; - } - } - - private _handleNimbusRouterError( - error: any, - ctx: Context, - onError?: (error: any, ctx: Context) => void, - ) { - if (onError) { - onError(error, ctx); - } else { - getLogger().error({ - category: 'Nimbus', - message: error.message, - error, - }); - - const statusCode = error.statusCode ?? 500; - ctx.response.status = statusCode; - - if (statusCode < 500) { - ctx.response.body = { - statusCode, - ...(error.details ? { code: error.name } : {}), - ...(error.message ? { message: error.message } : {}), - ...(error.details ? { details: error.details } : {}), - }; - } else { - ctx.response.body = { - message: 'Internal server error', - }; - } - } - } -}