diff --git a/.claude/settings.local.json b/.claude/settings.local.json deleted file mode 100644 index 36ded71..0000000 --- a/.claude/settings.local.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "permissions": { - "allow": [ - "Bash(deno test:*)", - "Bash(deno add:*)", - "Bash(deno task:*)", - "Bash(deno check:*)", - "Bash(find:*)", - "Bash(deno doc:*)", - "WebFetch(domain:docs.eventsourcingdb.io)", - "Bash(cat:*)", - "WebSearch", - "Bash(ls:*)" - ], - "deny": [], - "ask": [] - } -} diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml index 6ac1c12..1c561e6 100644 --- a/.github/workflows/checks.yaml +++ b/.github/workflows/checks.yaml @@ -14,24 +14,16 @@ jobs: deploy: name: Format, Lint, Type Check & Test runs-on: ubuntu-latest - env: - DENO_DIR: deno_cache_dir - steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - - name: Cache Deno dependencies - uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 - with: - path: ${{ env.DENO_DIR }} - key: deno-${{ runner.os }}-${{ hashFiles('deno.lock') }} - restore-keys: | - deno-${{ runner.os }}- - - uses: denoland/setup-deno@e95548e56dfa95d4e1a28d6f422fafe75c4c26fb with: deno-version: v2 + - name: Install dependencies + run: deno install --frozen + - name: Check formatting run: deno fmt --check diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 0a1af16..8844bea 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -35,3 +35,7 @@ jobs: - name: Publish utils package working-directory: ./packages/utils run: deno publish + + - name: Publish eventsourcingdb package + working-directory: ./packages/eventsourcingdb + run: deno publish diff --git a/.gitignore b/.gitignore index cda8035..3640d52 100644 --- a/.gitignore +++ b/.gitignore @@ -45,3 +45,9 @@ testem.log Thumbs.db .nx/cache + +# Claude Code +.claude/settings.local.json + +# EventSourcingDB +esdb-data/ diff --git a/CLAUDE.md b/CLAUDE.md index 4b873a9..c887bdd 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,10 +1,25 @@ +# Add and adjust tests + +Whenever new functionality is added, add tests for it afterwards. + +When functionality is changed, adjust the tests or add new test cases accordingly. + +# Documentation + +Documentation is handled in two ways: + +- Technical documentation as JSDoc comments in the code. This is mandatory for all publicly exported elements so JSR can generate documentation based on it. +- User documentation in the docs folder. This is targeted for users in a more guided way. + +Make sure to update the documentation when functionality is changed or added. + # Format, Lint, Type Check & Test -Whenever something is changed in the examples or packages, run the following commands to format, lint, type check and test the code. Running these commands from the repository root will check all examples and packages. +Whenever something is changed in the examples or packages, run the following commands to type check, format, lint and test the code. Running these commands from the repository root will check all examples and packages. ``` +deno check deno fmt --check deno lint -deno check -deno test +deno test --allow-all ``` diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md new file mode 100644 index 0000000..9226fda --- /dev/null +++ b/DEVELOPMENT.md @@ -0,0 +1,37 @@ +# EventSourcingDB + +To use the EventSourcingDB for testing and development purposes, you can use the following commands and find all the details in the [EventSourcingDB documentation](https://docs.eventsourcingdb.io/). + +**Install EventSourcingDB** + +```bash +docker pull thenativeweb/eventsourcingdb +``` + +**Start EventSourcingDB with temporary data** + +```bash +docker run -it -p 3000:3000 \ + thenativeweb/eventsourcingdb run \ + --api-token=secret \ + --data-directory-temporary \ + --http-enabled \ + --https-enabled=false \ + --with-ui +``` + +**Start EventSourcingDB with persistent data** + +The data will be stored in the `esdb-data` directory which is ignored by Git. + +```bash +docker run -it \ + -p 3000:3000 \ + -v ./esdb-data:/var/lib/esdb \ + thenativeweb/eventsourcingdb run \ + --api-token=secret \ + --data-directory=/var/lib/esdb \ + --http-enabled \ + --https-enabled=false \ + --with-ui +``` diff --git a/deno.json b/deno.json index fa10ed8..f789c24 100644 --- a/deno.json +++ b/deno.json @@ -10,7 +10,9 @@ "./packages/mongodb", "./packages/hono", "./packages/utils", - "./examples/hono-demo" + "./packages/eventsourcingdb", + "./examples/hono-demo", + "./examples/eventsourcing-demo" ], "nodeModulesDir": "none", "fmt": { diff --git a/deno.lock b/deno.lock index e30337d..21c8165 100644 --- a/deno.lock +++ b/deno.lock @@ -1,19 +1,19 @@ { "version": "5", "specifiers": { + "jsr:@std/assert@1": "1.0.15", "jsr:@std/assert@^1.0.10": "1.0.15", - "jsr:@std/dotenv@*": "0.225.3", "jsr:@std/dotenv@~0.225.6": "0.225.6", - "jsr:@std/fmt@^1.0.4": "1.0.5", - "jsr:@std/fmt@^1.0.5": "1.0.5", + "jsr:@std/fmt@^1.0.5": "1.0.8", "jsr:@std/internal@^1.0.12": "1.0.12", - "jsr:@std/text@^1.0.10": "1.0.10", + "jsr:@std/regexp@^1.0.1": "1.0.1", + "jsr:@std/text@^1.0.10": "1.0.16", "jsr:@std/ulid@1": "1.0.0", "npm:@opentelemetry/api@^1.9.0": "1.9.0", - "npm:@types/node@*": "22.5.4", - "npm:hono@^4.11.4": "4.11.4", + "npm:eventsourcingdb@^1.8.1": "1.8.1", + "npm:hono@^4.11.4": "4.11.7", "npm:mongodb@7": "7.0.0", - "npm:zod@^4.3.5": "4.3.5" + "npm:zod@^4.3.5": "4.3.6" }, "jsr": { "@std/assert@1.0.15": { @@ -22,34 +22,75 @@ "jsr:@std/internal" ] }, - "@std/dotenv@0.225.3": { - "integrity": "a95e5b812c27b0854c52acbae215856d9cce9d4bbf774d938c51d212711e8d4a" - }, - "@std/dotenv@0.225.5": { - "integrity": "9ce6f9d0ec3311f74a32535aa1b8c62ed88b1ab91b7f0815797d77a6f60c922f" - }, "@std/dotenv@0.225.6": { "integrity": "1d6f9db72f565bd26790fa034c26e45ecb260b5245417be76c2279e5734c421b" }, - "@std/fmt@1.0.4": { - "integrity": "e14fe5bedee26f80877e6705a97a79c7eed599e81bb1669127ef9e8bc1e29a74" - }, - "@std/fmt@1.0.5": { - "integrity": "0cfab43364bc36650d83c425cd6d99910fc20c4576631149f0f987eddede1a4d" + "@std/fmt@1.0.8": { + "integrity": "71e1fc498787e4434d213647a6e43e794af4fd393ef8f52062246e06f7e372b7" }, "@std/internal@1.0.12": { "integrity": "972a634fd5bc34b242024402972cd5143eac68d8dffaca5eaa4dba30ce17b027" }, - "@std/text@1.0.10": { - "integrity": "9dcab377450253c0efa9a9a0c731040bfd4e1c03f8303b5934381467b7954338" + "@std/regexp@1.0.1": { + "integrity": "5179d823465085c5480dafb44438466e83c424fadc61ba31f744050ecc0f596d" + }, + "@std/text@1.0.16": { + "integrity": "ddb9853b75119a2473857d691cf1ec02ad90793a2e8b4a4ac49d7354281a0cf8", + "dependencies": [ + "jsr:@std/regexp" + ] }, "@std/ulid@1.0.0": { "integrity": "d41c3d27a907714413649fee864b7cde8d42ee68437d22b79d5de4f81d808780" } }, "npm": { - "@mongodb-js/saslprep@1.4.4": { - "integrity": "sha512-p7X/ytJDIdwUfFL/CLOhKgdfJe1Fa8uw9seJYvdOmnP9JBWGWHW69HkOixXS6Wy9yvGf1MbhcS6lVmrhy4jm2g==", + "@balena/dockerignore@1.0.2": { + "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==" + }, + "@grpc/grpc-js@1.14.3": { + "integrity": "sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA==", + "dependencies": [ + "@grpc/proto-loader@0.8.0", + "@js-sdsl/ordered-map" + ] + }, + "@grpc/proto-loader@0.7.15": { + "integrity": "sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ==", + "dependencies": [ + "lodash.camelcase", + "long", + "protobufjs", + "yargs" + ], + "bin": true + }, + "@grpc/proto-loader@0.8.0": { + "integrity": "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==", + "dependencies": [ + "lodash.camelcase", + "long", + "protobufjs", + "yargs" + ], + "bin": true + }, + "@isaacs/cliui@8.0.2": { + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dependencies": [ + "string-width@5.1.2", + "string-width-cjs@npm:string-width@4.2.3", + "strip-ansi@7.1.2", + "strip-ansi-cjs@npm:strip-ansi@6.0.1", + "wrap-ansi@8.1.0", + "wrap-ansi-cjs@npm:wrap-ansi@7.0.0" + ] + }, + "@js-sdsl/ordered-map@4.4.2": { + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==" + }, + "@mongodb-js/saslprep@1.4.5": { + "integrity": "sha512-k64Lbyb7ycCSXHSLzxVdb2xsKGPMvYZfCICXvDsI8Z65CeWQzTEKS4YmGbnqw+U9RBvLPTsB6UCmwkgsDTGWIw==", "dependencies": [ "sparse-bitfield" ] @@ -57,12 +98,77 @@ "@opentelemetry/api@1.9.0": { "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==" }, - "@types/node@22.5.4": { - "integrity": "sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==", + "@pkgjs/parseargs@0.11.0": { + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==" + }, + "@protobufjs/aspromise@1.1.2": { + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" + }, + "@protobufjs/base64@1.1.2": { + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + }, + "@protobufjs/codegen@2.0.4": { + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + }, + "@protobufjs/eventemitter@1.1.0": { + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" + }, + "@protobufjs/fetch@1.1.0": { + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "dependencies": [ + "@protobufjs/aspromise", + "@protobufjs/inquire" + ] + }, + "@protobufjs/float@1.0.2": { + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" + }, + "@protobufjs/inquire@1.1.0": { + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" + }, + "@protobufjs/path@1.1.2": { + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" + }, + "@protobufjs/pool@1.1.0": { + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" + }, + "@protobufjs/utf8@1.1.0": { + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" + }, + "@types/docker-modem@3.0.6": { + "integrity": "sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg==", + "dependencies": [ + "@types/node", + "@types/ssh2" + ] + }, + "@types/dockerode@3.3.47": { + "integrity": "sha512-ShM1mz7rCjdssXt7Xz0u1/R2BJC7piWa3SJpUBiVjCf2A3XNn4cP6pUVaD8bLanpPVVn4IKzJuw3dOvkJ8IbYw==", + "dependencies": [ + "@types/docker-modem", + "@types/node", + "@types/ssh2" + ] + }, + "@types/node@18.19.130": { + "integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==", "dependencies": [ "undici-types" ] }, + "@types/ssh2-streams@0.1.13": { + "integrity": "sha512-faHyY3brO9oLEA0QlcO8N2wT7R0+1sHWZvQ+y3rMLwdY1ZyS1z0W3t65j9PqT4HmQ6ALzNe7RZlNuCNE0wBSWA==", + "dependencies": [ + "@types/node" + ] + }, + "@types/ssh2@0.5.52": { + "integrity": "sha512-lbLLlXxdCZOSJMCInKH2+9V/77ET2J6NPQHpFI0kda61Dd1KglJs+fPQBchizmzYSOJBgdTajhPqBO1xxLywvg==", + "dependencies": [ + "@types/node", + "@types/ssh2-streams" + ] + }, "@types/webidl-conversions@7.0.3": { "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==" }, @@ -72,17 +178,394 @@ "@types/webidl-conversions" ] }, - "bson@7.0.0": { - "integrity": "sha512-Kwc6Wh4lQ5OmkqqKhYGKIuELXl+EPYSCObVE6bWsp1T/cGkOCBN0I8wF/T44BiuhHyNi1mmKVPXk60d41xZ7kw==" + "abort-controller@3.0.0": { + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": [ + "event-target-shim" + ] + }, + "ansi-regex@5.0.1": { + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + }, + "ansi-regex@6.2.2": { + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==" + }, + "ansi-styles@4.3.0": { + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": [ + "color-convert" + ] + }, + "ansi-styles@6.2.3": { + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==" + }, + "archiver-utils@5.0.2": { + "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", + "dependencies": [ + "glob", + "graceful-fs", + "is-stream", + "lazystream", + "lodash", + "normalize-path", + "readable-stream@4.7.0" + ] + }, + "archiver@7.0.1": { + "integrity": "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==", + "dependencies": [ + "archiver-utils", + "async", + "buffer-crc32", + "readable-stream@4.7.0", + "readdir-glob", + "tar-stream@3.1.7", + "zip-stream" + ] + }, + "asn1@0.2.6": { + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "dependencies": [ + "safer-buffer" + ] + }, + "async-lock@1.4.1": { + "integrity": "sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ==" + }, + "async@3.2.6": { + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==" + }, + "b4a@1.7.3": { + "integrity": "sha512-5Q2mfq2WfGuFp3uS//0s6baOJLMoVduPYVeNmDYxu5OUA1/cBfvr2RIS7vi62LdNj/urk1hfmj867I3qt6uZ7Q==" + }, + "balanced-match@1.0.2": { + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "bare-events@2.8.2": { + "integrity": "sha512-riJjyv1/mHLIPX4RwiK+oW9/4c3TEUeORHKefKAKnZ5kyslbN+HXowtbaVEqt4IMUB7OXlfixcs6gsFeo/jhiQ==" + }, + "bare-fs@4.5.3_bare-events@2.8.2": { + "integrity": "sha512-9+kwVx8QYvt3hPWnmb19tPnh38c6Nihz8Lx3t0g9+4GoIf3/fTgYwM4Z6NxgI+B9elLQA7mLE9PpqcWtOMRDiQ==", + "dependencies": [ + "bare-events", + "bare-path", + "bare-stream", + "bare-url", + "fast-fifo" + ] + }, + "bare-os@3.6.2": { + "integrity": "sha512-T+V1+1srU2qYNBmJCXZkUY5vQ0B4FSlL3QDROnKQYOqeiQR8UbjNHlPa+TIbM4cuidiN9GaTaOZgSEgsvPbh5A==" + }, + "bare-path@3.0.0": { + "integrity": "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw==", + "dependencies": [ + "bare-os" + ] + }, + "bare-stream@2.7.0_bare-events@2.8.2": { + "integrity": "sha512-oyXQNicV1y8nc2aKffH+BUHFRXmx6VrPzlnaEvMhram0nPBrKcEdcyBg5r08D0i8VxngHFAiVyn1QKXpSG0B8A==", + "dependencies": [ + "bare-events", + "streamx" + ], + "optionalPeers": [ + "bare-events" + ] + }, + "bare-url@2.3.2": { + "integrity": "sha512-ZMq4gd9ngV5aTMa5p9+UfY0b3skwhHELaDkhEHetMdX0LRkW9kzaym4oo/Eh+Ghm0CCDuMTsRIGM/ytUc1ZYmw==", + "dependencies": [ + "bare-path" + ] + }, + "base64-js@1.5.1": { + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" + }, + "bcrypt-pbkdf@1.0.2": { + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", + "dependencies": [ + "tweetnacl" + ] + }, + "bl@4.1.0": { + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dependencies": [ + "buffer@5.7.1", + "inherits", + "readable-stream@3.6.2" + ] + }, + "brace-expansion@2.0.2": { + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dependencies": [ + "balanced-match" + ] + }, + "bson@7.1.1": { + "integrity": "sha512-TtJgBB+QyOlWjrbM+8bRgH84VM/xrDjyBFgSgGrfZF4xvt6gbEDtcswm27Tn9F9TWsjQybxT8b8VpCP/oJK4Dw==" + }, + "buffer-crc32@1.0.0": { + "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==" + }, + "buffer@5.7.1": { + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dependencies": [ + "base64-js", + "ieee754" + ] + }, + "buffer@6.0.3": { + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "dependencies": [ + "base64-js", + "ieee754" + ] + }, + "buildcheck@0.0.7": { + "integrity": "sha512-lHblz4ahamxpTmnsk+MNTRWsjYKv965MwOrSJyeD588rR3Jcu7swE+0wN5F+PbL5cjgu/9ObkhfzEPuofEMwLA==" + }, + "byline@5.0.0": { + "integrity": "sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q==" + }, + "chownr@1.1.4": { + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" + }, + "cliui@8.0.1": { + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dependencies": [ + "string-width@4.2.3", + "strip-ansi@6.0.1", + "wrap-ansi@7.0.0" + ] + }, + "color-convert@2.0.1": { + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": [ + "color-name" + ] + }, + "color-name@1.1.4": { + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "compress-commons@6.0.2": { + "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", + "dependencies": [ + "crc-32", + "crc32-stream", + "is-stream", + "normalize-path", + "readable-stream@4.7.0" + ] + }, + "core-util-is@1.0.3": { + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + }, + "cpu-features@0.0.10": { + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", + "dependencies": [ + "buildcheck", + "nan" + ], + "scripts": true + }, + "crc-32@1.2.2": { + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "bin": true + }, + "crc32-stream@6.0.0": { + "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", + "dependencies": [ + "crc-32", + "readable-stream@4.7.0" + ] + }, + "cross-spawn@7.0.6": { + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dependencies": [ + "path-key", + "shebang-command", + "which" + ] + }, + "debug@4.4.3": { + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dependencies": [ + "ms" + ] + }, + "docker-compose@1.3.1": { + "integrity": "sha512-rF0wH69G3CCcmkN9J1RVMQBaKe8o77LT/3XmqcLIltWWVxcWAzp2TnO7wS3n/umZHN3/EVrlT3exSBMal+Ou1w==", + "dependencies": [ + "yaml" + ] + }, + "docker-modem@5.0.6": { + "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", + "dependencies": [ + "debug", + "readable-stream@3.6.2", + "split-ca", + "ssh2" + ] + }, + "dockerode@4.0.9": { + "integrity": "sha512-iND4mcOWhPaCNh54WmK/KoSb35AFqPAUWFMffTQcp52uQt36b5uNwEJTSXntJZBbeGad72Crbi/hvDIv6us/6Q==", + "dependencies": [ + "@balena/dockerignore", + "@grpc/grpc-js", + "@grpc/proto-loader@0.7.15", + "docker-modem", + "protobufjs", + "tar-fs@2.1.4", + "uuid" + ] + }, + "eastasianwidth@0.2.0": { + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + }, + "emoji-regex@8.0.0": { + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "emoji-regex@9.2.2": { + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + }, + "end-of-stream@1.4.5": { + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "dependencies": [ + "once" + ] + }, + "escalade@3.2.0": { + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==" + }, + "event-target-shim@5.0.1": { + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" + }, + "events-universal@1.0.1": { + "integrity": "sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw==", + "dependencies": [ + "bare-events" + ] + }, + "events@3.3.0": { + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" + }, + "eventsourcingdb@1.8.1": { + "integrity": "sha512-RKZKEU05qPoiWGWw2/qvIrpzB8FQrB5iCiJCSoVfTKiTrpZA5MD2vUN220d9WMMux2CUI4k4/OH7HgM3Lj6IDA==", + "dependencies": [ + "testcontainers" + ] + }, + "fast-fifo@1.3.2": { + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==" + }, + "foreground-child@3.3.1": { + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dependencies": [ + "cross-spawn", + "signal-exit@4.1.0" + ] + }, + "fs-constants@1.0.0": { + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" + }, + "get-caller-file@2.0.5": { + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" + }, + "get-port@7.1.0": { + "integrity": "sha512-QB9NKEeDg3xxVwCCwJQ9+xycaz6pBB6iQ76wiWMl1927n0Kir6alPiP+yuiICLLU4jpMe08dXfpebuQppFA2zw==" + }, + "glob@10.5.0": { + "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", + "dependencies": [ + "foreground-child", + "jackspeak", + "minimatch@9.0.5", + "minipass", + "package-json-from-dist", + "path-scurry" + ], + "bin": true + }, + "graceful-fs@4.2.11": { + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, - "hono@4.11.4": { - "integrity": "sha512-U7tt8JsyrxSRKspfhtLET79pU8K+tInj5QZXs1jSugO1Vq5dFj3kmZsRldo29mTBfcjDRVRXrEZ6LS63Cog9ZA==" + "hono@4.11.7": { + "integrity": "sha512-l7qMiNee7t82bH3SeyUCt9UF15EVmaBvsppY2zQtrbIhl/yzBTny+YUxsVjSjQ6gaqaeVtZmGocom8TzBlA4Yw==" + }, + "ieee754@1.2.1": { + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==" + }, + "inherits@2.0.4": { + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "is-fullwidth-code-point@3.0.0": { + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" + }, + "is-stream@2.0.1": { + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==" + }, + "isarray@1.0.0": { + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "isexe@2.0.0": { + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "jackspeak@3.4.3": { + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dependencies": [ + "@isaacs/cliui" + ], + "optionalDependencies": [ + "@pkgjs/parseargs" + ] + }, + "lazystream@1.0.1": { + "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", + "dependencies": [ + "readable-stream@2.3.8" + ] + }, + "lodash.camelcase@4.3.0": { + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + }, + "lodash@4.17.23": { + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==" + }, + "long@5.3.2": { + "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==" + }, + "lru-cache@10.4.3": { + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==" }, "memory-pager@1.5.0": { "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" }, - "mongodb-connection-string-url@7.0.0": { - "integrity": "sha512-irhhjRVLE20hbkRl4zpAYLnDMM+zIZnp0IDB9akAFFUZp/3XdOfwwddc7y6cNvF2WCEtfTYRwYbIfYa2kVY0og==", + "minimatch@5.1.6": { + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": [ + "brace-expansion" + ] + }, + "minimatch@9.0.5": { + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dependencies": [ + "brace-expansion" + ] + }, + "minipass@7.1.2": { + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==" + }, + "mkdirp-classic@0.5.3": { + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" + }, + "mkdirp@1.0.4": { + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "bin": true + }, + "mongodb-connection-string-url@7.0.1": { + "integrity": "sha512-h0AZ9A7IDVwwHyMxmdMXKy+9oNlF0zFoahHiX3vQ8e3KFcSP3VmsmfvtRSuLPxmyv2vjIDxqty8smTgie/SNRQ==", "dependencies": [ "@types/whatwg-url", "whatwg-url" @@ -96,23 +579,312 @@ "mongodb-connection-string-url" ] }, + "ms@2.1.3": { + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "nan@2.25.0": { + "integrity": "sha512-0M90Ag7Xn5KMLLZ7zliPWP3rT90P6PN+IzVFS0VqmnPktBk3700xUVv8Ikm9EUaUE5SDWdp/BIxdENzVznpm1g==" + }, + "normalize-path@3.0.0": { + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" + }, + "once@1.4.0": { + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": [ + "wrappy" + ] + }, + "package-json-from-dist@1.0.1": { + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==" + }, + "path-key@3.1.1": { + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" + }, + "path-scurry@1.11.1": { + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dependencies": [ + "lru-cache", + "minipass" + ] + }, + "process-nextick-args@2.0.1": { + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "process@0.11.10": { + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==" + }, + "proper-lockfile@4.1.2": { + "integrity": "sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA==", + "dependencies": [ + "graceful-fs", + "retry", + "signal-exit@3.0.7" + ] + }, + "properties-reader@2.3.0": { + "integrity": "sha512-z597WicA7nDZxK12kZqHr2TcvwNU1GCfA5UwfDY/HDp3hXPoPlb5rlEx9bwGTiJnc0OqbBTkU975jDToth8Gxw==", + "dependencies": [ + "mkdirp" + ] + }, + "protobufjs@7.5.4": { + "integrity": "sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==", + "dependencies": [ + "@protobufjs/aspromise", + "@protobufjs/base64", + "@protobufjs/codegen", + "@protobufjs/eventemitter", + "@protobufjs/fetch", + "@protobufjs/float", + "@protobufjs/inquire", + "@protobufjs/path", + "@protobufjs/pool", + "@protobufjs/utf8", + "@types/node", + "long" + ], + "scripts": true + }, + "pump@3.0.3": { + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "dependencies": [ + "end-of-stream", + "once" + ] + }, "punycode@2.3.1": { "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==" }, + "readable-stream@2.3.8": { + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dependencies": [ + "core-util-is", + "inherits", + "isarray", + "process-nextick-args", + "safe-buffer@5.1.2", + "string_decoder@1.1.1", + "util-deprecate" + ] + }, + "readable-stream@3.6.2": { + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": [ + "inherits", + "string_decoder@1.3.0", + "util-deprecate" + ] + }, + "readable-stream@4.7.0": { + "integrity": "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==", + "dependencies": [ + "abort-controller", + "buffer@6.0.3", + "events", + "process", + "string_decoder@1.3.0" + ] + }, + "readdir-glob@1.1.3": { + "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", + "dependencies": [ + "minimatch@5.1.6" + ] + }, + "require-directory@2.1.1": { + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==" + }, + "retry@0.12.0": { + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==" + }, + "safe-buffer@5.1.2": { + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "safe-buffer@5.2.1": { + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, + "safer-buffer@2.1.2": { + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "shebang-command@2.0.0": { + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dependencies": [ + "shebang-regex" + ] + }, + "shebang-regex@3.0.0": { + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" + }, + "signal-exit@3.0.7": { + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + }, + "signal-exit@4.1.0": { + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==" + }, "sparse-bitfield@3.0.3": { "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", "dependencies": [ "memory-pager" ] }, + "split-ca@1.0.1": { + "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==" + }, + "ssh-remote-port-forward@1.0.4": { + "integrity": "sha512-x0LV1eVDwjf1gmG7TTnfqIzf+3VPRz7vrNIjX6oYLbeCrf/PeVY6hkT68Mg+q02qXxQhrLjB0jfgvhevoCRmLQ==", + "dependencies": [ + "@types/ssh2", + "ssh2" + ] + }, + "ssh2@1.17.0": { + "integrity": "sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==", + "dependencies": [ + "asn1", + "bcrypt-pbkdf" + ], + "optionalDependencies": [ + "cpu-features", + "nan" + ], + "scripts": true + }, + "streamx@2.23.0": { + "integrity": "sha512-kn+e44esVfn2Fa/O0CPFcex27fjIL6MkVae0Mm6q+E6f0hWv578YCERbv+4m02cjxvDsPKLnmxral/rR6lBMAg==", + "dependencies": [ + "events-universal", + "fast-fifo", + "text-decoder" + ] + }, + "string-width@4.2.3": { + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": [ + "emoji-regex@8.0.0", + "is-fullwidth-code-point", + "strip-ansi@6.0.1" + ] + }, + "string-width@5.1.2": { + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dependencies": [ + "eastasianwidth", + "emoji-regex@9.2.2", + "strip-ansi@7.1.2" + ] + }, + "string_decoder@1.1.1": { + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": [ + "safe-buffer@5.1.2" + ] + }, + "string_decoder@1.3.0": { + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": [ + "safe-buffer@5.2.1" + ] + }, + "strip-ansi@6.0.1": { + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": [ + "ansi-regex@5.0.1" + ] + }, + "strip-ansi@7.1.2": { + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dependencies": [ + "ansi-regex@6.2.2" + ] + }, + "tar-fs@2.1.4": { + "integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==", + "dependencies": [ + "chownr", + "mkdirp-classic", + "pump", + "tar-stream@2.2.0" + ] + }, + "tar-fs@3.1.1": { + "integrity": "sha512-LZA0oaPOc2fVo82Txf3gw+AkEd38szODlptMYejQUhndHMLQ9M059uXR+AfS7DNo0NpINvSqDsvyaCrBVkptWg==", + "dependencies": [ + "pump", + "tar-stream@3.1.7" + ], + "optionalDependencies": [ + "bare-fs", + "bare-path" + ] + }, + "tar-stream@2.2.0": { + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "dependencies": [ + "bl", + "end-of-stream", + "fs-constants", + "inherits", + "readable-stream@3.6.2" + ] + }, + "tar-stream@3.1.7": { + "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", + "dependencies": [ + "b4a", + "fast-fifo", + "streamx" + ] + }, + "testcontainers@11.10.0": { + "integrity": "sha512-8hwK2EnrOZfrHPpDC7CPe03q7H8Vv8j3aXdcmFFyNV8dzpBzgZYmqyDtduJ8YQ5kbzj+A+jUXMQ6zI8B5U3z+g==", + "dependencies": [ + "@balena/dockerignore", + "@types/dockerode", + "archiver", + "async-lock", + "byline", + "debug", + "docker-compose", + "dockerode", + "get-port", + "proper-lockfile", + "properties-reader", + "ssh-remote-port-forward", + "tar-fs@3.1.1", + "tmp", + "undici" + ] + }, + "text-decoder@1.2.3": { + "integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==", + "dependencies": [ + "b4a" + ] + }, + "tmp@0.2.5": { + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==" + }, "tr46@5.1.1": { "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==", "dependencies": [ "punycode" ] }, - "undici-types@6.19.8": { - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" + "tweetnacl@0.14.5": { + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" + }, + "undici-types@5.26.5": { + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, + "undici@7.19.2": { + "integrity": "sha512-4VQSpGEGsWzk0VYxyB/wVX/Q7qf9t5znLRgs0dzszr9w9Fej/8RVNQ+S20vdXSAyra/bJ7ZQfGv6ZMj7UEbzSg==" + }, + "util-deprecate@1.0.2": { + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "uuid@10.0.0": { + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "bin": true }, "webidl-conversions@7.0.0": { "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==" @@ -124,51 +896,78 @@ "webidl-conversions" ] }, - "zod@4.3.5": { - "integrity": "sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==" + "which@2.0.2": { + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dependencies": [ + "isexe" + ], + "bin": true + }, + "wrap-ansi@7.0.0": { + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": [ + "ansi-styles@4.3.0", + "string-width@4.2.3", + "strip-ansi@6.0.1" + ] + }, + "wrap-ansi@8.1.0": { + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dependencies": [ + "ansi-styles@6.2.3", + "string-width@5.1.2", + "strip-ansi@7.1.2" + ] + }, + "wrappy@1.0.2": { + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "y18n@5.0.8": { + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" + }, + "yaml@2.8.2": { + "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", + "bin": true + }, + "yargs-parser@21.1.1": { + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==" + }, + "yargs@17.7.2": { + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dependencies": [ + "cliui", + "escalade", + "get-caller-file", + "require-directory", + "string-width@4.2.3", + "y18n", + "yargs-parser" + ] + }, + "zip-stream@6.0.1": { + "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", + "dependencies": [ + "archiver-utils", + "compress-commons", + "readable-stream@4.7.0" + ] + }, + "zod@4.3.6": { + "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==" } }, - "redirects": { - "https://deno.land/std/testing/asserts.ts": "https://deno.land/std@0.224.0/testing/asserts.ts" - }, - "remote": { - "https://deno.land/std@0.224.0/assert/_constants.ts": "a271e8ef5a573f1df8e822a6eb9d09df064ad66a4390f21b3e31f820a38e0975", - "https://deno.land/std@0.224.0/assert/assert.ts": "09d30564c09de846855b7b071e62b5974b001bb72a4b797958fe0660e7849834", - "https://deno.land/std@0.224.0/assert/assert_almost_equals.ts": "9e416114322012c9a21fa68e187637ce2d7df25bcbdbfd957cd639e65d3cf293", - "https://deno.land/std@0.224.0/assert/assert_array_includes.ts": "14c5094471bc8e4a7895fc6aa5a184300d8a1879606574cb1cd715ef36a4a3c7", - "https://deno.land/std@0.224.0/assert/assert_equals.ts": "3bbca947d85b9d374a108687b1a8ba3785a7850436b5a8930d81f34a32cb8c74", - "https://deno.land/std@0.224.0/assert/assert_exists.ts": "43420cf7f956748ae6ed1230646567b3593cb7a36c5a5327269279c870c5ddfd", - "https://deno.land/std@0.224.0/assert/assert_false.ts": "3e9be8e33275db00d952e9acb0cd29481a44fa0a4af6d37239ff58d79e8edeff", - "https://deno.land/std@0.224.0/assert/assert_greater.ts": "5e57b201fd51b64ced36c828e3dfd773412c1a6120c1a5a99066c9b261974e46", - "https://deno.land/std@0.224.0/assert/assert_greater_or_equal.ts": "9870030f997a08361b6f63400273c2fb1856f5db86c0c3852aab2a002e425c5b", - "https://deno.land/std@0.224.0/assert/assert_instance_of.ts": "e22343c1fdcacfaea8f37784ad782683ec1cf599ae9b1b618954e9c22f376f2c", - "https://deno.land/std@0.224.0/assert/assert_is_error.ts": "f856b3bc978a7aa6a601f3fec6603491ab6255118afa6baa84b04426dd3cc491", - "https://deno.land/std@0.224.0/assert/assert_less.ts": "60b61e13a1982865a72726a5fa86c24fad7eb27c3c08b13883fb68882b307f68", - "https://deno.land/std@0.224.0/assert/assert_less_or_equal.ts": "d2c84e17faba4afe085e6c9123a63395accf4f9e00150db899c46e67420e0ec3", - "https://deno.land/std@0.224.0/assert/assert_match.ts": "ace1710dd3b2811c391946954234b5da910c5665aed817943d086d4d4871a8b7", - "https://deno.land/std@0.224.0/assert/assert_not_equals.ts": "78d45dd46133d76ce624b2c6c09392f6110f0df9b73f911d20208a68dee2ef29", - "https://deno.land/std@0.224.0/assert/assert_not_instance_of.ts": "3434a669b4d20cdcc5359779301a0588f941ffdc2ad68803c31eabdb4890cf7a", - "https://deno.land/std@0.224.0/assert/assert_not_match.ts": "df30417240aa2d35b1ea44df7e541991348a063d9ee823430e0b58079a72242a", - "https://deno.land/std@0.224.0/assert/assert_not_strict_equals.ts": "37f73880bd672709373d6dc2c5f148691119bed161f3020fff3548a0496f71b8", - "https://deno.land/std@0.224.0/assert/assert_object_match.ts": "411450fd194fdaabc0089ae68f916b545a49d7b7e6d0026e84a54c9e7eed2693", - "https://deno.land/std@0.224.0/assert/assert_rejects.ts": "4bee1d6d565a5b623146a14668da8f9eb1f026a4f338bbf92b37e43e0aa53c31", - "https://deno.land/std@0.224.0/assert/assert_strict_equals.ts": "b4f45f0fd2e54d9029171876bd0b42dd9ed0efd8f853ab92a3f50127acfa54f5", - "https://deno.land/std@0.224.0/assert/assert_string_includes.ts": "496b9ecad84deab72c8718735373feb6cdaa071eb91a98206f6f3cb4285e71b8", - "https://deno.land/std@0.224.0/assert/assert_throws.ts": "c6508b2879d465898dab2798009299867e67c570d7d34c90a2d235e4553906eb", - "https://deno.land/std@0.224.0/assert/assertion_error.ts": "ba8752bd27ebc51f723702fac2f54d3e94447598f54264a6653d6413738a8917", - "https://deno.land/std@0.224.0/assert/equal.ts": "bddf07bb5fc718e10bb72d5dc2c36c1ce5a8bdd3b647069b6319e07af181ac47", - "https://deno.land/std@0.224.0/assert/fail.ts": "0eba674ffb47dff083f02ced76d5130460bff1a9a68c6514ebe0cdea4abadb68", - "https://deno.land/std@0.224.0/assert/mod.ts": "48b8cb8a619ea0b7958ad7ee9376500fe902284bb36f0e32c598c3dc34cbd6f3", - "https://deno.land/std@0.224.0/assert/unimplemented.ts": "8c55a5793e9147b4f1ef68cd66496b7d5ba7a9e7ca30c6da070c1a58da723d73", - "https://deno.land/std@0.224.0/assert/unreachable.ts": "5ae3dbf63ef988615b93eb08d395dda771c96546565f9e521ed86f6510c29e19", - "https://deno.land/std@0.224.0/fmt/colors.ts": "508563c0659dd7198ba4bbf87e97f654af3c34eb56ba790260f252ad8012e1c5", - "https://deno.land/std@0.224.0/internal/diff.ts": "6234a4b493ebe65dc67a18a0eb97ef683626a1166a1906232ce186ae9f65f4e6", - "https://deno.land/std@0.224.0/internal/format.ts": "0a98ee226fd3d43450245b1844b47003419d34d210fa989900861c79820d21c2", - "https://deno.land/std@0.224.0/internal/mod.ts": "534125398c8e7426183e12dc255bb635d94e06d0f93c60a297723abe69d3b22e", - "https://deno.land/std@0.224.0/testing/asserts.ts": "d0cdbabadc49cc4247a50732ee0df1403fdcd0f95360294ad448ae8c240f3f5c" - }, "workspace": { "members": { + "examples/eventsourcing-demo": { + "dependencies": [ + "jsr:@std/dotenv@~0.225.6", + "jsr:@std/ulid@1", + "npm:eventsourcingdb@^1.8.1", + "npm:hono@^4.11.4", + "npm:mongodb@7", + "npm:zod@^4.3.5" + ] + }, "examples/hono-demo": { "dependencies": [ "jsr:@std/dotenv@~0.225.6", @@ -187,6 +986,14 @@ "npm:zod@^4.3.5" ] }, + "packages/eventsourcingdb": { + "dependencies": [ + "jsr:@std/assert@1", + "jsr:@std/ulid@1", + "npm:@opentelemetry/api@^1.9.0", + "npm:eventsourcingdb@^1.8.1" + ] + }, "packages/hono": { "dependencies": [ "jsr:@std/ulid@1", diff --git a/docs/.vitepress/config.mjs b/docs/.vitepress/config.mjs index 0a6fac2..a74bc28 100644 --- a/docs/.vitepress/config.mjs +++ b/docs/.vitepress/config.mjs @@ -90,6 +90,33 @@ export default defineConfig({ ], }, + { + text: "EventSourcingDB", + link: "/guide/eventsourcingdb", + items: [ + { + text: "Client Setup", + link: "/guide/eventsourcingdb/client-setup", + }, + { + text: "Write Events", + link: "/guide/eventsourcingdb/write-events", + }, + { + text: "Read Events", + link: "/guide/eventsourcingdb/read-events", + }, + { + text: "Event Observer", + link: "/guide/eventsourcingdb/event-observer", + }, + { + text: "Event Mapping", + link: "/guide/eventsourcingdb/event-mapping", + }, + ], + }, + { text: "MongoDB", link: "/guide/mongodb", diff --git a/docs/guide/core/commands.md b/docs/guide/core/commands.md index 25a47d1..b5e73ff 100644 --- a/docs/guide/core/commands.md +++ b/docs/guide/core/commands.md @@ -23,7 +23,7 @@ You can find the full example on GitHub: [hono-demo](https://github.com/overlap- ## Key Characteristics - **Write Operations**: Commands modify application state -- **Intent-Based**: Commands express what should happen (e.g., "AddUser", "DeleteUser") +- **Intent-Based**: Commands express what should happen (e.g., "InviteUser", "AcceptInvitation") - **Type-Safe**: Commands are fully typed and validated using Zod ## Command Structure @@ -45,18 +45,18 @@ type Command = { }; ``` -| Property | Description | -| ----------------- | ---------------------------------------------------------------------------------- | -| `specversion` | The CloudEvents specification version (always `'1.0'`) | -| `id` | A globally unique identifier for the command | -| `correlationid` | A unique identifier to correlate this command with related messages | -| `time` | ISO 8601 timestamp when the command was created | -| `source` | A URI reference identifying the system creating the command | -| `type` | The command type following CloudEvents naming (e.g., `at.overlap.nimbus.add-user`) | -| `subject` | Optional identifier for the entity the command targets | -| `data` | The command payload containing the business data | -| `datacontenttype` | Optional MIME type of the data (defaults to `application/json`) | -| `dataschema` | Optional URL to the schema the data adheres to | +| Property | Description | +| ----------------- | ------------------------------------------------------------------------------------- | +| `specversion` | The CloudEvents specification version (always `'1.0'`) | +| `id` | A globally unique identifier for the command | +| `correlationid` | A unique identifier to correlate this command with related messages | +| `time` | ISO 8601 timestamp when the command was created | +| `source` | A URI reference identifying the system creating the command | +| `type` | The command type following CloudEvents naming (e.g., `at.overlap.nimbus.invite-user`) | +| `subject` | Optional identifier for the entity the command targets | +| `data` | The command payload containing the business data | +| `datacontenttype` | Optional MIME type of the data (defaults to `application/json`) | +| `dataschema` | Optional URL to the schema the data adheres to | ## Command Schema @@ -67,8 +67,8 @@ import { commandSchema } from "@nimbus/core"; import { z } from "zod"; // Extend the base schema with your specific command type and data -const addUserCommandSchema = commandSchema.extend({ - type: z.literal("at.overlap.nimbus.add-user"), +const inviteUserCommandSchema = commandSchema.extend({ + type: z.literal("at.overlap.nimbus.invite-user"), data: z.object({ email: z.email(), firstName: z.string(), @@ -76,7 +76,7 @@ const addUserCommandSchema = commandSchema.extend({ }), }); -type AddUserCommand = z.infer; +type InviteUserCommand = z.infer; ``` ## Create Commands @@ -85,10 +85,10 @@ You can create commands using the `createCommand()` helper: ```typescript import { createCommand } from "@nimbus/core"; -import { AddUserCommand } from "./addUser.command.ts"; +import { InviteUserCommand } from "./inviteUser.command.ts"; -const commandForJane = createCommand({ - type: "at.overlap.nimbus.add-user", +const commandForJane = createCommand({ + type: "at.overlap.nimbus.invite-user", source: "nimbus.overlap.at", data: { email: "jane@example.com", @@ -97,8 +97,8 @@ const commandForJane = createCommand({ }, }); -const commandForJohn = createCommand({ - type: "at.overlap.nimbus.add-user", +const commandForJohn = createCommand({ + type: "at.overlap.nimbus.invite-user", source: "nimbus.overlap.at", data: { email: "john@example.com", diff --git a/docs/guide/core/event-bus.md b/docs/guide/core/event-bus.md index 8010c14..eba800f 100644 --- a/docs/guide/core/event-bus.md +++ b/docs/guide/core/event-bus.md @@ -63,8 +63,8 @@ import { getEventBus } from "@nimbus/core"; const eventBus = getEventBus("MyEventBus"); eventBus.subscribeEvent({ - type: "at.overlap.nimbus.user-added", - handler: async (event: UserAddedEvent) => { + type: "at.overlap.nimbus.user-invited", + handler: async (event: UserInvitedEvent) => { // Process event and return result }, }); @@ -103,8 +103,8 @@ import { createEvent, getEventBus } from "@nimbus/core"; const eventBus = getEventBus("default"); -const event = createEvent({ - type: "at.overlap.nimbus.user-added", +const event = createEvent({ + type: "at.overlap.nimbus.user-invited", source: "nimbus.overlap.at", correlationid: command.correlationid, subject: `/users/${user.id}`, @@ -115,7 +115,7 @@ const event = createEvent({ }, }); -eventBus.putEvent(event); +eventBus.putEvent(event); ``` ## Retry Mechanism diff --git a/docs/guide/core/events.md b/docs/guide/core/events.md index c44b667..b4dca60 100644 --- a/docs/guide/core/events.md +++ b/docs/guide/core/events.md @@ -23,7 +23,7 @@ You can find the full example on GitHub: [hono-demo](https://github.com/overlap- ## Key Characteristics - **Immutable Facts**: Events represent things that already happened and cannot be changed -- **Past Tense**: Event names use past tense (e.g., "UserAdded", not "AddUser") +- **Past Tense**: Event names use past tense (e.g., "UserInvited", not "InviteUser") - **Observable**: Other parts of the system can subscribe and react to events - **Type-Safe**: Events are fully typed and validated using Zod @@ -53,7 +53,7 @@ type Event = { | `correlationid` | A unique identifier to correlate this event with related messages | | `time` | ISO 8601 timestamp when the event was created | | `source` | A URI reference identifying the system creating the event | -| `type` | The event type following CloudEvents naming (e.g., `at.overlap.nimbus.user-added`) | +| `type` | The event type following CloudEvents naming (e.g., `at.overlap.nimbus.user-invited`) | | `subject` | An identifier for the entity the event is about (e.g., `/users/123`) | | `data` | The event payload containing the business data | | `datacontenttype` | Optional MIME type of the data (defaults to `application/json`) | @@ -80,8 +80,8 @@ import { eventSchema } from "@nimbus/core"; import { z } from "zod"; // Extend the base schema with your specific event type and data -const userAddedEventSchema = eventSchema.extend({ - type: z.literal("at.overlap.nimbus.user-added"), +const userInvitedEventSchema = eventSchema.extend({ + type: z.literal("at.overlap.nimbus.user-invited"), data: z.object({ _id: z.string(), email: z.string(), @@ -90,7 +90,7 @@ const userAddedEventSchema = eventSchema.extend({ }), }); -type UserAddedEvent = z.infer; +type UserInvitedEvent = z.infer; ``` ## Create Events @@ -99,10 +99,10 @@ You can create events using the `createEvent()` helper: ```typescript import { createEvent } from "@nimbus/core"; -import { UserAddedEvent } from "./userAdded.event.ts"; +import { UserInvitedEvent } from "./userInvited.event.ts"; -const event = createEvent({ - type: "at.overlap.nimbus.user-added", +const event = createEvent({ + type: "at.overlap.nimbus.user-invited", source: "nimbus.overlap.at", correlationid: command.correlationid, subject: `/users/${userState._id}`, @@ -126,12 +126,12 @@ Event names should describe what happened, not what should happen: ```typescript // ✅ Good - Past tense -UserAddedEvent; +UserInvitedEvent; OrderShippedEvent; PaymentProcessedEvent; // ❌ Bad - Imperative -AddUserEvent; +InviteUserEvent; ShipOrderEvent; ProcessPaymentEvent; ``` @@ -141,8 +141,8 @@ ProcessPaymentEvent; Always pass correlation IDs from commands to events for tracing: ```typescript -const event = createEvent({ - type: USER_ADDED_EVENT_TYPE, +const event = createEvent({ + type: USER_INVITED_EVENT_TYPE, source: "nimbus.overlap.at", correlationid: command.correlationid, // Always propagate data: state, diff --git a/docs/guide/core/router.md b/docs/guide/core/router.md index 7b857cc..5421908 100644 --- a/docs/guide/core/router.md +++ b/docs/guide/core/router.md @@ -68,20 +68,20 @@ export const registerUserMessages = () => { // Register a command router.register( - "at.overlap.nimbus.add-user", - async (command: AddUserCommand) => { + "at.overlap.nimbus.invite-user", + async (command: InviteUserCommand) => { // Process command and return result }, - addUserCommandSchema + inviteUserCommandSchema ); // Register an event router.register( - "at.overlap.nimbus.user-added", - async (event: UserAddedEvent) => { + "at.overlap.nimbus.user-invited", + async (event: UserInvitedEvent) => { // Process event and return result }, - addUserCommandSchema + inviteUserCommandSchema ); // Register a query @@ -99,7 +99,7 @@ The `register()` method takes three arguments: | Argument | Description | | ------------- | ------------------------------------------------------------------ | -| `messageType` | The CloudEvents type string (e.g., `'at.overlap.nimbus.add-user'`) | +| `messageType` | The CloudEvents type string (e.g., `'at.overlap.nimbus.invite-user'`) | | `handler` | An async function that processes the message and returns a result | | `schema` | A Zod schema used to validate the incoming message | @@ -110,8 +110,8 @@ Route messages to their handlers using the `route()` method: ```typescript import { createCommand, getRouter } from "@nimbus/core"; -const command = createCommand({ - type: "at.overlap.nimbus.add-user", +const command = createCommand({ + type: "at.overlap.nimbus.invite-user", source: "nimbus.overlap.at", correlationid: httpRequestCorrelationId, data: httpRequestBody, diff --git a/docs/guide/eventsourcingdb/client-setup.md b/docs/guide/eventsourcingdb/client-setup.md new file mode 100644 index 0000000..37521fa --- /dev/null +++ b/docs/guide/eventsourcingdb/client-setup.md @@ -0,0 +1,112 @@ +--- +prev: + text: "Nimbus EventSourcingDB" + link: "/guide/eventsourcingdb" + +next: + text: "Write Events" + link: "/guide/eventsourcingdb/write-events" +--- + +# Client Setup + +The `setupEventSourcingDBClient` function initializes a singleton EventSourcingDB client that is used by all other functions in this package. It verifies connectivity and authenticates with the server before the application starts processing events. + +::: info Example Application +The examples on this page reference the eventsourcing-demo application. + +You can find the full example on GitHub: [eventsourcing-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/eventsourcing-demo) +::: + +## Basic Usage + +```typescript +import { setupEventSourcingDBClient } from "@nimbus/eventsourcingdb"; + +await setupEventSourcingDBClient({ + url: new URL(process.env.ESDB_URL ?? ""), + apiToken: process.env.ESDB_API_TOKEN ?? "", +}); +``` + +## Configuration Options + +| Option | Type | Description | +| ---------------- | ----------------- | --------------------------------------------------------------- | +| `url` | `URL` | The URL of the EventSourcingDB server | +| `apiToken` | `string` | The API token for authenticating with EventSourcingDB | +| `eventObservers` | `EventObserver[]` | Optional array of event observers to start after initialization | + +## Initialization Behavior + +When `setupEventSourcingDBClient` is called, it performs the following steps: + +1. Creates a new client instance with the provided URL and API token +2. Pings the EventSourcingDB server to verify connectivity +3. Validates the API token +4. Starts any provided event observers in the background + +If the connection or authentication fails, a `GenericException` is thrown and the error is logged. + +## Setup with Event Observers + +You can provide event observers that will automatically start observing events after the client is initialized: + +```typescript +import { setupEventSourcingDBClient } from "@nimbus/eventsourcingdb"; +import type { Event } from "eventsourcingdb"; + +await setupEventSourcingDBClient({ + url: new URL(process.env.ESDB_URL ?? ""), + apiToken: process.env.ESDB_API_TOKEN ?? "", + eventObservers: [ + { + subject: "/", + recursive: true, + eventHandler: (event: Event) => { + console.log("Received event:", event); + }, + }, + ], +}); +``` + +See the [Event Observer](/guide/eventsourcingdb/event-observer) documentation for details on configuring observers. + +## Getting the Client + +After initialization, use `getEventSourcingDBClient` to access the singleton client instance anywhere in your application: + +```typescript +import { getEventSourcingDBClient } from "@nimbus/eventsourcingdb"; + +const client = getEventSourcingDBClient(); +``` + +::: tip +You typically don't need to call `getEventSourcingDBClient` directly. The [`writeEvents`](/guide/eventsourcingdb/write-events) and [`readEvents`](/guide/eventsourcingdb/read-events) functions handle this internally. +::: + +## Error Handling + +The setup function throws a `GenericException` in two cases: + +| Error | Cause | +| ------------------ | -------------------------------------------- | +| Connection failure | The EventSourcingDB server is unreachable | +| Invalid API token | The provided API token could not be verified | + +```typescript +import { setupEventSourcingDBClient } from "@nimbus/eventsourcingdb"; + +try { + await setupEventSourcingDBClient({ + url: new URL(process.env.ESDB_URL ?? ""), + apiToken: process.env.ESDB_API_TOKEN ?? "", + }); +} catch (error) { + // GenericException: + // - "Could not connect to EventSourcingDB" + // - "Invalid API token. Please check your API token." +} +``` diff --git a/docs/guide/eventsourcingdb/event-mapping.md b/docs/guide/eventsourcingdb/event-mapping.md new file mode 100644 index 0000000..ef60a35 --- /dev/null +++ b/docs/guide/eventsourcingdb/event-mapping.md @@ -0,0 +1,134 @@ +--- +prev: + text: "Event Observer" + link: "/guide/eventsourcingdb/event-observer" + +next: + text: "Nimbus MongoDB" + link: "/guide/mongodb" +--- + +# Event Mapping + +The event mapping utilities convert between Nimbus events and EventSourcingDB events. When writing events, Nimbus metadata (correlation ID, data schema) is preserved alongside the payload. When reading events back, the original Nimbus event structure is restored. + +## How Events Are Stored + +When a Nimbus event is written to EventSourcingDB, its `data` field is wrapped in a structure that preserves Nimbus-specific metadata: + +```typescript +// Original Nimbus event data +{ + email: "john@example.com", + firstName: "John", + lastName: "Doe", +} + +// Stored in EventSourcingDB as +{ + payload: { + email: "john@example.com", + firstName: "John", + lastName: "Doe", + }, + nimbusMeta: { + correlationid: "01JKXYZ...", + dataschema: "https://example.com/schemas/user-invited", + }, +} +``` + +## Types + +### EventData + +The wrapper structure used to store Nimbus events in EventSourcingDB: + +```typescript +type EventData = { + payload: Record; + nimbusMeta: NimbusEventMetadata; +}; +``` + +### NimbusEventMetadata + +Metadata that Nimbus attaches to events stored in EventSourcingDB: + +```typescript +type NimbusEventMetadata = { + correlationid: string; + dataschema?: string; +}; +``` + +## Converting Nimbus Events to EventSourcingDB + +The `nimbusEventToEventSourcingDBEventCandidate` function converts a Nimbus event into an EventSourcingDB event candidate: + +```typescript +import { nimbusEventToEventSourcingDBEventCandidate } from "@nimbus/eventsourcingdb"; + +const eventCandidate = nimbusEventToEventSourcingDBEventCandidate(nimbusEvent); +``` + +The conversion maps the following properties: + +| Nimbus Event | EventSourcingDB Event Candidate | +| ---------------- | ------------------------------- | +| `source` | `source` | +| `subject` | `subject` | +| `type` | `type` | +| `data` | `data.payload` | +| `correlationid` | `data.nimbusMeta.correlationid` | +| `dataschema` | `data.nimbusMeta.dataschema` | + +::: tip +You typically don't need to call this function directly. The [`writeEvents`](/guide/eventsourcingdb/write-events) function handles the conversion internally. +::: + +## Converting EventSourcingDB Events to Nimbus + +The `eventSourcingDBEventToNimbusEvent` function converts an EventSourcingDB event back into a Nimbus event: + +```typescript +import { eventSourcingDBEventToNimbusEvent } from "@nimbus/eventsourcingdb"; +import type { Event } from "eventsourcingdb"; + +const handleEvent = (eventSourcingDBEvent: Event) => { + const nimbusEvent = eventSourcingDBEventToNimbusEvent(eventSourcingDBEvent); + + console.log(nimbusEvent.correlationid); // Restored from nimbusMeta + console.log(nimbusEvent.data); // Original payload +}; +``` + +The function supports generic typing for specific event types: + +```typescript +import { Event } from "@nimbus/core"; +import { eventSourcingDBEventToNimbusEvent } from "@nimbus/eventsourcingdb"; + +const event = eventSourcingDBEventToNimbusEvent(eventSourcingDBEvent); +``` + +### Handling Non-Nimbus Events + +If the EventSourcingDB event was not written by Nimbus (i.e., it does not contain the `nimbusMeta` wrapper), the function gracefully handles this by: + +- Treating the entire `data` field as the payload +- Generating a new correlation ID using ULID + +## Type Guard + +The `isEventData` type guard checks whether event data conforms to the `EventData` structure: + +```typescript +import { isEventData } from "@nimbus/eventsourcingdb"; + +if (isEventData(event.data)) { + // event.data is typed as EventData + console.log(event.data.payload); + console.log(event.data.nimbusMeta.correlationid); +} +``` diff --git a/docs/guide/eventsourcingdb/event-observer.md b/docs/guide/eventsourcingdb/event-observer.md new file mode 100644 index 0000000..a758a2c --- /dev/null +++ b/docs/guide/eventsourcingdb/event-observer.md @@ -0,0 +1,198 @@ +--- +prev: + text: "Read Events" + link: "/guide/eventsourcingdb/read-events" + +next: + text: "Event Mapping" + link: "/guide/eventsourcingdb/event-mapping" +--- + +# Event Observer + +The `initEventObserver` function starts a background event observation loop that continuously listens for new events from EventSourcingDB. Observers automatically reconnect with exponential backoff on failure, making them ideal for building read-side projections and reactive event handlers. + +For full details on observing events, including resuming after connection loss and observing from the last event of a given type, see the [Observing Events](https://docs.eventsourcingdb.io/getting-started/observing-events/) section in the EventSourcingDB documentation. + +::: info Example Application +The examples on this page reference the eventsourcing-demo application. + +You can find the full example on GitHub: [eventsourcing-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/eventsourcing-demo) +::: + +## Basic Usage + +Event observers are typically configured as part of the [client setup](/guide/eventsourcingdb/client-setup): + +```typescript +import { setupEventSourcingDBClient } from "@nimbus/eventsourcingdb"; +import type { Event } from "eventsourcingdb"; + +await setupEventSourcingDBClient({ + url: new URL(process.env.ESDB_URL ?? ""), + apiToken: process.env.ESDB_API_TOKEN ?? "", + eventObservers: [ + { + subject: "/", + recursive: true, + eventHandler: (event: Event) => { + console.log("Received event:", event); + }, + }, + ], +}); +``` + +You can also start an observer independently after the client has been initialized: + +```typescript +import { initEventObserver } from "@nimbus/eventsourcingdb"; +import type { Event } from "eventsourcingdb"; + +initEventObserver({ + subject: "/users", + recursive: true, + eventHandler: async (event: Event) => { + console.log("Received event:", event); + }, +}); +``` + +## EventObserver Options + +| Option | Type | Default | Description | +| ------------------ | -------------------------- | ----------- | ------------------------------------------------------------ | +| `subject` | `string` | _(required)_ | The subject to observe events for | +| `recursive` | `boolean` | `false` | Whether to observe events recursively for all sub-subjects | +| `lowerBound` | `Bound` | `undefined` | The starting position for observation | +| `fromLatestEvent` | `ObserveFromLatestEvent` | `undefined` | Start observation from a specific latest event | +| `eventHandler` | `(event: Event) => void` | _(required)_ | Handler function called for each observed event | +| `retryOptions` | `RetryOptions` | see below | Options for retry behavior on connection failure | + +### Bound + +The `lowerBound` option defines where observation starts: + +```typescript +{ + id: "last-processed-event-id", + type: "exclusive", // or "inclusive" +} +``` + +| Property | Type | Description | +| -------- | ----------------------------- | --------------------------------------- | +| `id` | `string` | The event ID to start from | +| `type` | `"inclusive"` \| `"exclusive"` | Whether to include or exclude this event | + +### ObserveFromLatestEvent + +The `fromLatestEvent` option starts observation from the latest event matching specific criteria: + +```typescript +{ + subject: "/users", + type: "at.overlap.nimbus.user-invited", + ifEventIsMissing: "read-everything", // or "wait-for-event" +} +``` + +| Property | Type | Description | +| ------------------ | ------------------------------------------ | ---------------------------------------------------- | +| `subject` | `string` | The subject to find the latest event for | +| `type` | `string` | The event type to match | +| `ifEventIsMissing` | `"read-everything"` \| `"wait-for-event"` | What to do if no matching event exists | + +## Retry Options + +| Option | Type | Default | Description | +| -------------------- | -------- | ------- | -------------------------------------------------------- | +| `maxRetries` | `number` | `3` | Maximum number of retry attempts before giving up | +| `initialRetryDelayMs`| `number` | `3000` | Initial delay in milliseconds before the first retry | + +The observer uses **exponential backoff with jitter** for retries: + +- Base delay doubles with each attempt: `initialDelayMs * 2^attempt` +- Random jitter of 0-30% is added to avoid thundering-herd effects +- After `maxRetries` consecutive failures, a critical error is logged and the observer stops + +## Building Projections + +A common use case for event observers is building read-side projections. The observer processes events and updates an in-memory or persistent view: + +```typescript +import { Event, getLogger } from "@nimbus/core"; +import { eventSourcingDBEventToNimbusEvent } from "@nimbus/eventsourcingdb"; +import { Event as EventSourcingDBEvent } from "eventsourcingdb"; + +const USER_INVITED_EVENT_TYPE = "at.overlap.nimbus.user-invited"; +const USER_INVITATION_ACCEPTED_EVENT_TYPE = + "at.overlap.nimbus.user-invitation-accepted"; + +const usersStore = new Map(); + +export const projectViews = ( + eventSourcingDBEvent: EventSourcingDBEvent, +) => { + const event = eventSourcingDBEventToNimbusEvent( + eventSourcingDBEvent, + ); + + switch (event.type) { + case USER_INVITED_EVENT_TYPE: { + usersStore.set(event.data.id, { + id: event.data.id, + revision: event.id, + email: event.data.email, + firstName: event.data.firstName, + lastName: event.data.lastName, + invitedAt: event.data.invitedAt, + acceptedAt: null, + }); + break; + } + case USER_INVITATION_ACCEPTED_EVENT_TYPE: { + const id = event.subject.split("/")[2]; + const currentRow = usersStore.get(id); + + usersStore.set(id, { + ...currentRow, + revision: event.id, + acceptedAt: event.data.acceptedAt, + }); + break; + } + default: { + getLogger().warn({ + category: "ProjectViews", + message: `Unknown event type ${event.type}`, + }); + break; + } + } +}; +``` + +Then register this projection handler as an event observer: + +```typescript +await setupEventSourcingDBClient({ + url: new URL(process.env.ESDB_URL ?? ""), + apiToken: process.env.ESDB_API_TOKEN ?? "", + eventObservers: [ + { + subject: "/", + recursive: true, + eventHandler: projectViews, + }, + ], +}); +``` + +## Automatic Position Tracking + +The observer automatically tracks its position in the event stream. After each successfully handled event, the `lowerBound` is updated so that reconnections resume from the last processed event rather than replaying the entire stream. + +## OpenTelemetry Tracing + +Each observed event is processed within an OpenTelemetry span named `eventsourcingdb.observeEvent`. If the event carries a `traceparent` (injected by `writeEvents`), the span is linked to the original writer's trace, enabling end-to-end distributed tracing across the write and read sides. diff --git a/docs/guide/eventsourcingdb/index.md b/docs/guide/eventsourcingdb/index.md new file mode 100644 index 0000000..9a4a5ef --- /dev/null +++ b/docs/guide/eventsourcingdb/index.md @@ -0,0 +1,33 @@ +--- +prev: + text: "onError Handler" + link: "/guide/hono/on-error" + +next: + text: "Client Setup" + link: "/guide/eventsourcingdb/client-setup" +--- + +# Nimbus EventSourcingDB Package + +The EventSourcingDB package provides a seamless integration between Nimbus and [EventSourcingDB](https://www.eventsourcingdb.io/). It offers a managed client, event reading and writing with automatic Nimbus event mapping, event observers with retry logic, and built-in OpenTelemetry tracing. + +[https://jsr.io/@nimbus/eventsourcingdb](https://jsr.io/@nimbus/eventsourcingdb) + +### Deno + +```bash +deno add jsr:@nimbus/eventsourcingdb +``` + +### NPM + +```bash +npx jsr add @nimbus/eventsourcingdb +``` + +### Bun + +```bash +bunx jsr add @nimbus/eventsourcingdb +``` diff --git a/docs/guide/eventsourcingdb/read-events.md b/docs/guide/eventsourcingdb/read-events.md new file mode 100644 index 0000000..4fca38c --- /dev/null +++ b/docs/guide/eventsourcingdb/read-events.md @@ -0,0 +1,114 @@ +--- +prev: + text: "Write Events" + link: "/guide/eventsourcingdb/write-events" + +next: + text: "Event Observer" + link: "/guide/eventsourcingdb/event-observer" +--- + +# Read Events + +The `readEvents` function reads events from EventSourcingDB for a given subject. It returns an async generator that yields raw EventSourcingDB events, which can be converted to Nimbus events using the [event mapping](/guide/eventsourcingdb/event-mapping) utilities. + +For full details on reading events, including reading from multiple subjects, reading in reverse order, reading specific ranges, and reading from the last event of a given type, see the [Reading Events](https://docs.eventsourcingdb.io/getting-started/reading-events/) section in the EventSourcingDB documentation. + +::: info Example Application +The examples on this page reference the eventsourcing-demo application. + +You can find the full example on GitHub: [eventsourcing-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/eventsourcing-demo) +::: + +## Basic Usage + +```typescript +import { + eventSourcingDBEventToNimbusEvent, + readEvents, +} from "@nimbus/eventsourcingdb"; + +for await (const eventSourcingDBEvent of readEvents("/users/123", { + recursive: false, +})) { + const event = eventSourcingDBEventToNimbusEvent(eventSourcingDBEvent); + console.log(event); +} +``` + +## Function Parameters + +| Parameter | Type | Description | +| --------- | ------------------- | ------------------------------------------------ | +| `subject` | `string` | The subject to read events for | +| `options` | `ReadEventsOptions` | Options to control which events are read | +| `signal` | `AbortSignal` | Optional abort signal to cancel the read | + +## Rebuilding Aggregate State + +A common pattern in event sourcing is to rebuild an aggregate's state by replaying all of its events. This is used in command handlers to load the current state before making decisions: + +```typescript +import { + eventSourcingDBEventToNimbusEvent, + readEvents, + writeEvents, +} from "@nimbus/eventsourcingdb"; +import { isSubjectOnEventId } from "eventsourcingdb"; + +const acceptUserInvitationCommandHandler = async (command) => { + let state: UserState = { id: command.data.id }; + + // Replay all events to rebuild the current state + for await ( + const eventSourcingDBEvent of readEvents( + `/users/${command.data.id}`, + { recursive: false }, + ) + ) { + const event = eventSourcingDBEventToNimbusEvent(eventSourcingDBEvent); + state = applyEventToUserState(state, event); + } + + // Use the rebuilt state to make decisions + const events = acceptUserInvitation(state, command); + + // Write new events with optimistic concurrency + await writeEvents(events, [ + isSubjectOnEventId( + events[0].subject, + command.data.expectedRevision, + ), + ]); +}; +``` + +## Cancellation + +Use an `AbortSignal` to cancel an in-progress read: + +```typescript +import { readEvents } from "@nimbus/eventsourcingdb"; + +const controller = new AbortController(); + +// Cancel after 5 seconds +setTimeout(() => controller.abort(), 5000); + +for await (const event of readEvents( + "/users", + { recursive: true }, + controller.signal, +)) { + console.log(event); +} +``` + +## OpenTelemetry Tracing + +Every call to `readEvents` is automatically wrapped in an OpenTelemetry span named `eventsourcingdb.readEvents`. The following metrics are recorded: + +| Metric | Type | Labels | Description | +| -------------------------------------------- | --------- | -------------------- | ----------------------------------------- | +| `eventsourcingdb_operation_total` | Counter | `operation`, `status` | Total number of read operations | +| `eventsourcingdb_operation_duration_seconds` | Histogram | `operation` | Duration of read operations in seconds | diff --git a/docs/guide/eventsourcingdb/write-events.md b/docs/guide/eventsourcingdb/write-events.md new file mode 100644 index 0000000..5336af7 --- /dev/null +++ b/docs/guide/eventsourcingdb/write-events.md @@ -0,0 +1,73 @@ +--- +prev: + text: "Client Setup" + link: "/guide/eventsourcingdb/client-setup" + +next: + text: "Read Events" + link: "/guide/eventsourcingdb/read-events" +--- + +# Write Events + +The `writeEvents` function persists one or more Nimbus events to EventSourcingDB. It automatically converts Nimbus events to EventSourcingDB event candidates, injects OpenTelemetry trace context, and supports preconditions for optimistic concurrency control. + +::: info Example Application +The examples on this page reference the eventsourcing-demo application. + +You can find the full example on GitHub: [eventsourcing-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/eventsourcing-demo) +::: + +## Basic Usage + +```typescript +import { createEvent } from "@nimbus/core"; +import { writeEvents } from "@nimbus/eventsourcingdb"; +import { isSubjectPristine } from "eventsourcingdb"; + +const event = createEvent({ + type: "at.overlap.nimbus.user-invited", + source: "nimbus.overlap.at", + correlationid: command.correlationid, + subject: `/users/${id}`, + data: { + email: "john@example.com", + firstName: "John", + lastName: "Doe", + invitedAt: new Date().toISOString(), + }, +}); + +await writeEvents([event], [isSubjectPristine(event.subject)]); +``` + +## Function Parameters + +| Parameter | Type | Description | +| --------------- | ---------------- | ---------------------------------------------------------------- | +| `events` | `Event[]` | An array of Nimbus events to write | +| `preconditions` | `Precondition[]` | Optional preconditions that must be met for the write to succeed | + +## Preconditions + +EventSourcingDB supports the following preconditions. For full details, see the [Using Preconditions](https://docs.eventsourcingdb.io/getting-started/writing-events/#using-preconditions) section in the EventSourcingDB documentation. + +To use these preconditions, you can directly import them from the [`EventSourcingDB JavaScript SDK`](https://www.npmjs.com/package/eventsourcingdb). + +| Precondition | Description | +| -------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `isSubjectPristine` | Ensures that no events have been written to a subject yet. Use this when creating a new aggregate. | +| `isSubjectOnEventId` | Ensures the last event on a subject matches a specific event ID. Use this for optimistic locking. | +| `isSubjectPopulated` | Ensures the subject already has at least one event. The opposite of `isSubjectPristine`. | +| `isEventQlQueryTrue` | Ensures a custom [EventQL](https://docs.eventsourcingdb.io/reference/eventql/) query evaluates to `true`, allowing arbitrary conditions based on the current state of the event store. | + +## OpenTelemetry Tracing + +Every call to `writeEvents` is automatically wrapped in an OpenTelemetry span named `eventsourcingdb.writeEvents`. The current trace context (`traceparent` and `tracestate`) is injected into each event candidate, enabling end-to-end distributed tracing from the event writer to any [event observer](/guide/eventsourcingdb/event-observer) that processes the event. + +The following metrics are recorded: + +| Metric | Type | Labels | Description | +| -------------------------------------------- | --------- | --------------------- | --------------------------------------- | +| `eventsourcingdb_operation_total` | Counter | `operation`, `status` | Total number of write operations | +| `eventsourcingdb_operation_duration_seconds` | Histogram | `operation` | Duration of write operations in seconds | diff --git a/docs/guide/hono/on-error.md b/docs/guide/hono/on-error.md index 36fdc64..3495181 100644 --- a/docs/guide/hono/on-error.md +++ b/docs/guide/hono/on-error.md @@ -4,8 +4,8 @@ prev: link: "/guide/hono/logger" next: - text: "Nimbus MongoDB" - link: "/guide/mongodb" + text: "Nimbus EventSourcingDB" + link: "/guide/eventsourcingdb" --- # onError Handler diff --git a/docs/guide/mongodb/index.md b/docs/guide/mongodb/index.md index 7173de4..17cd8f6 100644 --- a/docs/guide/mongodb/index.md +++ b/docs/guide/mongodb/index.md @@ -1,7 +1,7 @@ --- prev: - text: "onError Handler" - link: "/guide/hono/on-error" + text: "Event Mapping" + link: "/guide/eventsourcingdb/event-mapping" next: text: "Connection Manager" diff --git a/docs/guide/observability.md b/docs/guide/observability.md index 3c2db89..0f510a6 100644 --- a/docs/guide/observability.md +++ b/docs/guide/observability.md @@ -162,16 +162,16 @@ The correlation ID is automatically: ```typescript // Correlation ID is passed from command to event -const command = createCommand({ - type: ADD_USER_COMMAND_TYPE, +const command = createCommand({ + type: INVITE_USER_COMMAND_TYPE, source: "nimbus.overlap.at", correlationid: getCorrelationId(c), // From HTTP request data: body, }); // In the handler, create event with same correlation ID -const event = createEvent({ - type: USER_ADDED_EVENT_TYPE, +const event = createEvent({ + type: USER_INVITED_EVENT_TYPE, source: "nimbus.overlap.at", correlationid: command.correlationid, // Propagate data: state, @@ -204,8 +204,8 @@ console.log("User created:", user.id); Always pass correlation IDs when creating events from commands: ```typescript -const event = createEvent({ - type: USER_ADDED_EVENT_TYPE, +const event = createEvent({ + type: USER_INVITED_EVENT_TYPE, source: "nimbus.overlap.at", correlationid: command.correlationid, // Always propagate data: state, diff --git a/docs/guide/what-is-nimbus.md b/docs/guide/what-is-nimbus.md index 26080d9..4b01de7 100644 --- a/docs/guide/what-is-nimbus.md +++ b/docs/guide/what-is-nimbus.md @@ -49,26 +49,26 @@ import { commandSchema, createCommand, getRouter } from "@nimbus/core"; import { z } from "zod"; // Define a type-safe command schema -const addUserCommandSchema = commandSchema.extend({ - type: z.literal("com.example.add-user"), +const inviteUserCommandSchema = commandSchema.extend({ + type: z.literal("com.example.invite-user"), data: z.object({ email: z.string().email(), name: z.string(), }), }); -type AddUserCommand = z.infer; +type InviteUserCommand = z.infer; // Register a handler with automatic validation and tracing const router = getRouter("MyRouter"); router.register( - "com.example.add-user", - async (command: AddUserCommand) => { + "com.example.invite-user", + async (command: InviteUserCommand) => { // Your business logic here return { userId: "123", email: command.data.email }; }, - addUserCommandSchema + inviteUserCommandSchema ); ``` diff --git a/docs/package-lock.json b/docs/package-lock.json index 0a67793..05d70ef 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -1691,9 +1691,9 @@ "license": "MIT" }, "node_modules/mdast-util-to-hast": { - "version": "13.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz", - "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==", + "version": "13.2.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz", + "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==", "license": "MIT", "dependencies": { "@types/hast": "^3.0.0", @@ -1882,9 +1882,9 @@ } }, "node_modules/preact": { - "version": "10.27.2", - "resolved": "https://registry.npmjs.org/preact/-/preact-10.27.2.tgz", - "integrity": "sha512-5SYSgFKSyhCbk6SrXyMpqjb5+MQBgfvEKE/OC+PujcY34sOpqtr+0AZQtPYx5IA6VxynQ7rUPCtKzyovpj9Bpg==", + "version": "10.28.3", + "resolved": "https://registry.npmjs.org/preact/-/preact-10.28.3.tgz", + "integrity": "sha512-tCmoRkPQLpBeWzpmbhryairGnhW9tKV6c6gr/w+RhoRoKEJwsjzipwp//1oCpGPOchvSLaAPlpcJi9MwMmoPyA==", "license": "MIT", "funding": { "type": "opencollective", @@ -2162,9 +2162,9 @@ } }, "node_modules/vite": { - "version": "5.4.20", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.20.tgz", - "integrity": "sha512-j3lYzGC3P+B5Yfy/pfKNgVEg4+UtcIJcVRt2cDjIOmhLourAqPqf8P7acgxeiSgUB7E3p2P8/3gNIgDLpwzs4g==", + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", "license": "MIT", "dependencies": { "esbuild": "^0.21.3", diff --git a/examples/eventsourcing-demo/deno.json b/examples/eventsourcing-demo/deno.json new file mode 100644 index 0000000..f56aead --- /dev/null +++ b/examples/eventsourcing-demo/deno.json @@ -0,0 +1,48 @@ +{ + "tasks": { + "dev": "deno run -A --watch src/main.ts", + "dev:otel": "bash start-with-otel.sh", + "test": "deno test -A", + "database:seed": "deno run -A src/seedCollections.ts" + }, + "lint": { + "include": [ + "src/" + ], + "exclude": [], + "rules": { + "tags": [ + "recommended" + ], + "include": [], + "exclude": [ + "no-explicit-any" + ] + } + }, + "fmt": { + "include": [ + "src/" + ], + "exclude": [], + "useTabs": false, + "lineWidth": 80, + "indentWidth": 4, + "semiColons": true, + "singleQuote": true, + "proseWrap": "always" + }, + "test": { + "include": [ + "src/" + ] + }, + "imports": { + "@std/dotenv": "jsr:@std/dotenv@^0.225.6", + "@std/ulid": "jsr:@std/ulid@^1.0.0", + "eventsourcingdb": "npm:eventsourcingdb@^1.8.1", + "hono": "npm:hono@^4.11.4", + "mongodb": "npm:mongodb@^7.0.0", + "zod": "npm:zod@^4.3.5" + } +} \ No newline at end of file diff --git a/examples/eventsourcing-demo/src/main.ts b/examples/eventsourcing-demo/src/main.ts new file mode 100644 index 0000000..795a261 --- /dev/null +++ b/examples/eventsourcing-demo/src/main.ts @@ -0,0 +1,105 @@ +import { + getLogger, + jsonLogFormatter, + parseLogLevel, + prettyLogFormatter, + setupLogger, + setupRouter, +} from '@nimbus/core'; +import { setupEventSourcingDBClient } from '@nimbus/eventsourcingdb'; +import '@std/dotenv/load'; +import process from 'node:process'; +import { projectViews } from './read/core/projectViews.ts'; +import { app } from './shared/shell/http.ts'; +import { initMessages } from './shared/shell/messages.ts'; + +setupLogger({ + logLevel: parseLogLevel(process.env.LOG_LEVEL), + formatter: process.env.LOG_FORMAT === 'pretty' + ? prettyLogFormatter + : jsonLogFormatter, + useConsoleColors: process.env.LOG_FORMAT === 'pretty', +}); + +await setupEventSourcingDBClient( + { + url: new URL(process.env.ESDB_URL ?? ''), + apiToken: process.env.ESDB_API_TOKEN ?? '', + eventObservers: [ + { + subject: '/', + recursive: true, + eventHandler: projectViews, + }, + ], + }, +); + +setupRouter('writeRouter', { + logInput: (input) => { + getLogger().debug({ + category: 'MessageRouter', + message: 'Received input', + data: { input }, + ...(input?.correlationid + ? { correlationId: input.correlationid } + : {}), + }); + }, + logOutput: (output) => { + getLogger().debug({ + category: 'MessageRouter', + message: 'Output', + data: { output }, + ...(output?.correlationid + ? { correlationId: output.correlationid } + : {}), + }); + }, +}); + +setupRouter('readRouter', { + logInput: (input) => { + getLogger().debug({ + category: 'MessageRouter', + message: 'Received input', + data: { input }, + ...(input?.correlationid + ? { correlationId: input.correlationid } + : {}), + }); + }, + logOutput: (output) => { + getLogger().debug({ + category: 'MessageRouter', + message: 'Output', + data: { output }, + ...(output?.correlationid + ? { correlationId: output.correlationid } + : {}), + }); + }, +}); + +initMessages(); + +if (process.env.PORT) { + const port = Number.parseInt(process.env.PORT); + + Deno.serve({ + hostname: '0.0.0.0', + port, + onListen: ({ port, hostname }) => { + getLogger().info({ + category: 'API', + message: `Started HTTP API on http://${hostname}:${port}`, + }); + }, + }, app.fetch); +} else { + getLogger().critical({ + category: 'API', + message: + `Could not start the HTTP API! Please define a valid port environment variable.`, + }); +} diff --git a/examples/eventsourcing-demo/src/read/core/projectViews.ts b/examples/eventsourcing-demo/src/read/core/projectViews.ts new file mode 100644 index 0000000..b2f3152 --- /dev/null +++ b/examples/eventsourcing-demo/src/read/core/projectViews.ts @@ -0,0 +1,76 @@ +import { getLogger } from '@nimbus/core'; +import { eventSourcingDBEventToNimbusEvent } from '@nimbus/eventsourcingdb'; +import { Event as EventSourcingDBEvent } from 'eventsourcingdb'; +import { + isUserInvitationAcceptedEvent, + UserInvitationAcceptedEvent, +} from '../../write/iam/users/core/events/userInvitationAccepted.event.ts'; +import { + isUserInvitedEvent, + UserInvitedEvent, +} from '../../write/iam/users/core/events/userInvited.event.ts'; +import { + setUsersMemoryStoreLastEventId, + usersMemoryStore, + UsersRow, +} from '../shell/memoryStore/usersMemoryStore.ts'; + +export const projectViews = (eventSourcingDBEvent: EventSourcingDBEvent) => { + const event = eventSourcingDBEventToNimbusEvent< + UserInvitedEvent | UserInvitationAcceptedEvent + >( + eventSourcingDBEvent, + ); + + if (isUserInvitedEvent(event)) { + const usersRow: UsersRow = { + id: event.data.id, + revision: event.id, + email: event.data.email, + firstName: event.data.firstName, + lastName: event.data.lastName, + invitedAt: event.data.invitedAt, + acceptedAt: null, + }; + + usersMemoryStore.set( + event.data.id, + usersRow, + ); + + setUsersMemoryStoreLastEventId(event.id); + return; + } + + if (isUserInvitationAcceptedEvent(event)) { + const id = event.subject.split('/')[2]; + const currentUsersRow = usersMemoryStore.get(id); + + if (!currentUsersRow) { + getLogger().warn({ + category: 'ProjectViews', + message: `User not found in memory store: ${id}`, + }); + return; + } + + const usersRow: UsersRow = { + ...currentUsersRow, + revision: event.id, + acceptedAt: event.data.acceptedAt, + }; + + usersMemoryStore.set( + id, + usersRow, + ); + + setUsersMemoryStoreLastEventId(event.id); + return; + } + + getLogger().warn({ + category: 'ProjectViews', + message: `Unknown event type ${(event as { type: string }).type}`, + }); +}; diff --git a/examples/eventsourcing-demo/src/read/core/queries/getUser.query.ts b/examples/eventsourcing-demo/src/read/core/queries/getUser.query.ts new file mode 100644 index 0000000..e576b0f --- /dev/null +++ b/examples/eventsourcing-demo/src/read/core/queries/getUser.query.ts @@ -0,0 +1,12 @@ +import { querySchema } from '@nimbus/core'; +import { z } from 'zod'; + +export const GET_USER_QUERY_TYPE = 'at.overlap.nimbus.get-user'; + +export const getUserQuerySchema = querySchema.extend({ + type: z.literal(GET_USER_QUERY_TYPE), + data: z.object({ + id: z.string(), + }), +}); +export type GetUserQuery = z.infer; diff --git a/examples/eventsourcing-demo/src/read/core/queries/listUsers.query.ts b/examples/eventsourcing-demo/src/read/core/queries/listUsers.query.ts new file mode 100644 index 0000000..4a09139 --- /dev/null +++ b/examples/eventsourcing-demo/src/read/core/queries/listUsers.query.ts @@ -0,0 +1,10 @@ +import { querySchema } from '@nimbus/core'; +import { z } from 'zod'; + +export const LIST_USERS_QUERY_TYPE = 'at.overlap.nimbus.list-users'; + +export const listUsersQuerySchema = querySchema.extend({ + type: z.literal(LIST_USERS_QUERY_TYPE), + data: z.object({}), +}); +export type ListUsersQuery = z.infer; diff --git a/examples/eventsourcing-demo/src/read/shell/http/router.ts b/examples/eventsourcing-demo/src/read/shell/http/router.ts new file mode 100644 index 0000000..64ab1d5 --- /dev/null +++ b/examples/eventsourcing-demo/src/read/shell/http/router.ts @@ -0,0 +1,54 @@ +import { createQuery, getRouter } from '@nimbus/core'; +import { getCorrelationId } from '@nimbus/hono'; +import { Hono } from 'hono'; +import { + GET_USER_QUERY_TYPE, + GetUserQuery, +} from '../../core/queries/getUser.query.ts'; +import { + LIST_USERS_QUERY_TYPE, + ListUsersQuery, +} from '../../core/queries/listUsers.query.ts'; + +const readRouter = new Hono(); + +readRouter.get( + '/list-users', + async (c) => { + const correlationId = getCorrelationId(c); + + const query = createQuery({ + type: LIST_USERS_QUERY_TYPE, + source: 'nimbus.overlap.at', + correlationid: correlationId, + data: {}, + }); + + const result = await getRouter('readRouter').route(query); + + return c.json(result); + }, +); + +readRouter.get( + '/get-user-by-id/:id', + async (c) => { + const id = c.req.param('id'); + const correlationId = getCorrelationId(c); + + const query = createQuery({ + type: GET_USER_QUERY_TYPE, + source: 'nimbus.overlap.at', + correlationid: correlationId, + data: { + id: id, + }, + }); + + const result = await getRouter('readRouter').route(query); + + return c.json(result); + }, +); + +export default readRouter; diff --git a/examples/eventsourcing-demo/src/read/shell/memoryStore/usersMemoryStore.ts b/examples/eventsourcing-demo/src/read/shell/memoryStore/usersMemoryStore.ts new file mode 100644 index 0000000..5c1e9c2 --- /dev/null +++ b/examples/eventsourcing-demo/src/read/shell/memoryStore/usersMemoryStore.ts @@ -0,0 +1,21 @@ +export type UsersRow = { + id: string; + revision: string; + email: string; + firstName: string; + lastName: string; + invitedAt: string; + acceptedAt: string | null; +}; + +export const usersMemoryStore = new Map(); + +let usersMemoryStoreLastEventId: string | null = null; + +export const setUsersMemoryStoreLastEventId = (lastEventId: string) => { + usersMemoryStoreLastEventId = lastEventId; +}; + +export const getUsersMemoryStoreLastEventId = () => { + return usersMemoryStoreLastEventId; +}; diff --git a/examples/eventsourcing-demo/src/read/shell/queries/getUser.query.ts b/examples/eventsourcing-demo/src/read/shell/queries/getUser.query.ts new file mode 100644 index 0000000..0df4f93 --- /dev/null +++ b/examples/eventsourcing-demo/src/read/shell/queries/getUser.query.ts @@ -0,0 +1,16 @@ +import { NotFoundException } from '@nimbus/core'; +import { GetUserQuery } from '../../core/queries/getUser.query.ts'; +import { usersMemoryStore } from '../memoryStore/usersMemoryStore.ts'; + +export const getUserQueryHandler = (query: GetUserQuery) => { + const user = usersMemoryStore.get(query.data.id); + + if (!user) { + throw new NotFoundException('User not found', { + errorCode: 'USER_NOT_FOUND', + userId: query.data.id, + }); + } + + return user; +}; diff --git a/examples/eventsourcing-demo/src/read/shell/queries/listUsers.query.ts b/examples/eventsourcing-demo/src/read/shell/queries/listUsers.query.ts new file mode 100644 index 0000000..c875ce8 --- /dev/null +++ b/examples/eventsourcing-demo/src/read/shell/queries/listUsers.query.ts @@ -0,0 +1,8 @@ +import { ListUsersQuery } from '../../core/queries/listUsers.query.ts'; +import { usersMemoryStore } from '../memoryStore/usersMemoryStore.ts'; + +export const listUsersQueryHandler = (_query: ListUsersQuery) => { + const users = [...usersMemoryStore.values()]; + + return users; +}; diff --git a/examples/eventsourcing-demo/src/read/shell/registerViews.ts b/examples/eventsourcing-demo/src/read/shell/registerViews.ts new file mode 100644 index 0000000..4ec6bc9 --- /dev/null +++ b/examples/eventsourcing-demo/src/read/shell/registerViews.ts @@ -0,0 +1,27 @@ +import { getRouter } from '@nimbus/core'; +import { + GET_USER_QUERY_TYPE, + getUserQuerySchema, +} from '../core/queries/getUser.query.ts'; +import { + LIST_USERS_QUERY_TYPE, + listUsersQuerySchema, +} from '../core/queries/listUsers.query.ts'; +import { getUserQueryHandler } from './queries/getUser.query.ts'; +import { listUsersQueryHandler } from './queries/listUsers.query.ts'; + +export const registerViews = () => { + const router = getRouter('readRouter'); + + router.register( + GET_USER_QUERY_TYPE, + getUserQueryHandler, + getUserQuerySchema, + ); + + router.register( + LIST_USERS_QUERY_TYPE, + listUsersQueryHandler, + listUsersQuerySchema, + ); +}; diff --git a/examples/eventsourcing-demo/src/shared/shell/http.ts b/examples/eventsourcing-demo/src/shared/shell/http.ts new file mode 100644 index 0000000..8e53a1c --- /dev/null +++ b/examples/eventsourcing-demo/src/shared/shell/http.ts @@ -0,0 +1,31 @@ +import { correlationId, handleError, logger } from '@nimbus/hono'; +import { Hono } from 'hono'; +import { compress } from 'hono/compress'; +import { cors } from 'hono/cors'; +import { secureHeaders } from 'hono/secure-headers'; +import readRouter from '../../read/shell/http/router.ts'; +import usersRouter from '../../write/iam/users/shell/http/router.ts'; + +export const app = new Hono(); + +app.use(correlationId()); + +app.use(logger({ + enableTracing: true, + tracerName: 'api', +})); + +app.use(cors()); + +app.use(secureHeaders()); + +app.use(compress()); + +app.get('/health', (c) => { + return c.json({ status: 'ok' }); +}); + +app.route('/iam/users', usersRouter); +app.route('/iam/users', readRouter); + +app.onError(handleError); diff --git a/examples/eventsourcing-demo/src/shared/shell/messages.ts b/examples/eventsourcing-demo/src/shared/shell/messages.ts new file mode 100644 index 0000000..6b75f78 --- /dev/null +++ b/examples/eventsourcing-demo/src/shared/shell/messages.ts @@ -0,0 +1,7 @@ +import { registerViews } from '../../read/shell/registerViews.ts'; +import { registerUserMessages } from '../../write/iam/users/shell/registerUserMessages.ts'; + +export const initMessages = () => { + registerViews(); + registerUserMessages(); +}; diff --git a/examples/eventsourcing-demo/src/write/iam/users/core/commands/acceptUserInvitation.command.ts b/examples/eventsourcing-demo/src/write/iam/users/core/commands/acceptUserInvitation.command.ts new file mode 100644 index 0000000..75e40e4 --- /dev/null +++ b/examples/eventsourcing-demo/src/write/iam/users/core/commands/acceptUserInvitation.command.ts @@ -0,0 +1,73 @@ +import { + commandSchema, + createEvent, + InvalidInputException, +} from '@nimbus/core'; +import { z } from 'zod'; +import { hasPendingInvitation, UserState } from '../domain/user.state.ts'; +import { + USER_INVITATION_ACCEPTED_EVENT_TYPE, + UserInvitationAcceptedEvent, +} from '../events/userInvitationAccepted.event.ts'; + +export const ACCEPT_USER_INVITATION_COMMAND_TYPE = + 'at.overlap.nimbus.accept-user-invitation'; + +export const acceptUserInvitationInputSchema = z.object({ + id: z.string().min(1), + expectedRevision: z.string().min(1), +}); + +export const acceptUserInvitationCommandSchema = commandSchema.extend({ + type: z.literal(ACCEPT_USER_INVITATION_COMMAND_TYPE), + data: acceptUserInvitationInputSchema, +}); +export type AcceptUserInvitationCommand = z.infer< + typeof acceptUserInvitationCommandSchema +>; + +export const acceptUserInvitation = ( + state: UserState, + command: AcceptUserInvitationCommand, +): [UserInvitationAcceptedEvent] => { + if (!hasPendingInvitation(state)) { + throw new InvalidInputException( + 'The user does not have a pending invitation', + { + errorCode: 'USER_HAS_NO_PENDING_INVITATION', + details: { + userId: state.id, + }, + }, + ); + } + + const inviteExpiredAfterHours = 24; + + if ( + state.invitedAt && + new Date(state.invitedAt).getTime() + + inviteExpiredAfterHours * 60 * 60 * 1000 < Date.now() + ) { + throw new InvalidInputException('The invitation has expired', { + errorCode: 'INVITATION_EXPIRED', + details: { + userId: state.id, + }, + }); + } + + const userInvitationAcceptedEvent = createEvent< + UserInvitationAcceptedEvent + >({ + type: USER_INVITATION_ACCEPTED_EVENT_TYPE, + source: command.source, + correlationid: command.correlationid, + subject: `/users/${state.id}`, + data: { + acceptedAt: new Date().toISOString(), + }, + }); + + return [userInvitationAcceptedEvent]; +}; diff --git a/examples/eventsourcing-demo/src/write/iam/users/core/commands/inviteUser.command.ts b/examples/eventsourcing-demo/src/write/iam/users/core/commands/inviteUser.command.ts new file mode 100644 index 0000000..63aede7 --- /dev/null +++ b/examples/eventsourcing-demo/src/write/iam/users/core/commands/inviteUser.command.ts @@ -0,0 +1,45 @@ +import { commandSchema, createEvent } from '@nimbus/core'; +import { z } from 'zod'; +import { UserState } from '../domain/user.state.ts'; +import { + USER_INVITED_EVENT_TYPE, + UserInvitedEvent, +} from '../events/userInvited.event.ts'; + +export const INVITE_USER_COMMAND_TYPE = 'at.overlap.nimbus.invite-user'; + +export const inviteUserInputSchema = z.object({ + email: z.email(), + firstName: z.string().min(1), + lastName: z.string().min(1), +}); + +export const inviteUserCommandSchema = commandSchema.extend({ + type: z.literal(INVITE_USER_COMMAND_TYPE), + data: inviteUserInputSchema, +}); +export type InviteUserCommand = z.infer; + +export const inviteUser = ( + state: UserState, + command: InviteUserCommand, +): [UserInvitedEvent] => { + // Always make sure to cast all user emails to lowercase + const email = command.data.email.toLowerCase(); + + const userInvitedEvent = createEvent({ + type: USER_INVITED_EVENT_TYPE, + source: command.source, + correlationid: command.correlationid, + subject: `/users/${state.id}`, + data: { + id: state.id, + email: email, + firstName: command.data.firstName, + lastName: command.data.lastName, + invitedAt: new Date().toISOString(), + }, + }); + + return [userInvitedEvent]; +}; diff --git a/examples/eventsourcing-demo/src/write/iam/users/core/domain/user.state.ts b/examples/eventsourcing-demo/src/write/iam/users/core/domain/user.state.ts new file mode 100644 index 0000000..1fb517f --- /dev/null +++ b/examples/eventsourcing-demo/src/write/iam/users/core/domain/user.state.ts @@ -0,0 +1,25 @@ +import { Event } from '@nimbus/core'; +import { isUserInvitedEvent } from '../events/userInvited.event.ts'; + +export type UserState = { + id: string; + invitedAt?: string; +}; + +export const hasPendingInvitation = (state: UserState): boolean => { + return state.invitedAt !== undefined; +}; + +export const applyEventToUserState = ( + state: UserState, + event: Event, +): UserState => { + if (isUserInvitedEvent(event)) { + return { + ...state, + invitedAt: event.data.invitedAt, + }; + } + + return state; +}; diff --git a/examples/eventsourcing-demo/src/write/iam/users/core/events/userInvitationAccepted.event.ts b/examples/eventsourcing-demo/src/write/iam/users/core/events/userInvitationAccepted.event.ts new file mode 100644 index 0000000..e47bf04 --- /dev/null +++ b/examples/eventsourcing-demo/src/write/iam/users/core/events/userInvitationAccepted.event.ts @@ -0,0 +1,29 @@ +import { eventSchema } from '@nimbus/core'; +import z from 'zod'; + +export const USER_INVITATION_ACCEPTED_EVENT_TYPE = + 'at.overlap.nimbus.user-invitation-accepted'; + +export const userInvitationAcceptedEventDataSchema = z.object({ + acceptedAt: z.iso.datetime(), +}); + +export const userInvitationAcceptedEventSchema = eventSchema.extend({ + type: z.literal(USER_INVITATION_ACCEPTED_EVENT_TYPE), + data: userInvitationAcceptedEventDataSchema, +}); +export type UserInvitationAcceptedEvent = z.infer< + typeof userInvitationAcceptedEventSchema +>; + +/** + * Type guard that checks whether the given event is a {@link UserInvitationAcceptedEvent}. + * + * @param event - The event to check. + * @returns `true` if the event is a {@link UserInvitationAcceptedEvent}, `false` otherwise. + */ +export const isUserInvitationAcceptedEvent = ( + event: { type: string }, +): event is UserInvitationAcceptedEvent => { + return event.type === USER_INVITATION_ACCEPTED_EVENT_TYPE; +}; diff --git a/examples/eventsourcing-demo/src/write/iam/users/core/events/userInvited.event.ts b/examples/eventsourcing-demo/src/write/iam/users/core/events/userInvited.event.ts new file mode 100644 index 0000000..4b5708f --- /dev/null +++ b/examples/eventsourcing-demo/src/write/iam/users/core/events/userInvited.event.ts @@ -0,0 +1,30 @@ +import { eventSchema } from '@nimbus/core'; +import z from 'zod'; + +export const USER_INVITED_EVENT_TYPE = 'at.overlap.nimbus.user-invited'; + +export const userInvitedEventDataSchema = z.object({ + id: z.string(), + email: z.email(), + firstName: z.string().min(1), + lastName: z.string().min(1), + invitedAt: z.iso.datetime(), +}); + +export const userInvitedEventSchema = eventSchema.extend({ + type: z.literal(USER_INVITED_EVENT_TYPE), + data: userInvitedEventDataSchema, +}); +export type UserInvitedEvent = z.infer; + +/** + * Type guard that checks whether the given event is a {@link UserInvitedEvent}. + * + * @param event - The event to check. + * @returns `true` if the event is a {@link UserInvitedEvent}, `false` otherwise. + */ +export const isUserInvitedEvent = ( + event: { type: string }, +): event is UserInvitedEvent => { + return event.type === USER_INVITED_EVENT_TYPE; +}; diff --git a/examples/eventsourcing-demo/src/write/iam/users/shell/commands/acceptUserInvitation.command.ts b/examples/eventsourcing-demo/src/write/iam/users/shell/commands/acceptUserInvitation.command.ts new file mode 100644 index 0000000..84842f7 --- /dev/null +++ b/examples/eventsourcing-demo/src/write/iam/users/shell/commands/acceptUserInvitation.command.ts @@ -0,0 +1,46 @@ +import { + eventSourcingDBEventToNimbusEvent, + readEvents, + writeEvents, +} from '@nimbus/eventsourcingdb'; +import { isSubjectOnEventId } from 'eventsourcingdb'; +import { + acceptUserInvitation, + AcceptUserInvitationCommand, +} from '../../core/commands/acceptUserInvitation.command.ts'; +import { + applyEventToUserState, + UserState, +} from '../../core/domain/user.state.ts'; + +export const acceptUserInvitationCommandHandler = async ( + command: AcceptUserInvitationCommand, +) => { + let state: UserState = { id: command.data.id }; + + for await ( + const eventSourcingDBEvent of readEvents( + `/users/${command.data.id}`, + { + recursive: false, + }, + ) + ) { + const event = eventSourcingDBEventToNimbusEvent( + eventSourcingDBEvent, + ); + + state = applyEventToUserState(state, event); + } + + const events = acceptUserInvitation(state, command); + + await writeEvents(events, [ + isSubjectOnEventId( + events[0].subject, + command.data.expectedRevision, + ), + ]); + + return {}; +}; diff --git a/examples/eventsourcing-demo/src/write/iam/users/shell/commands/inviteUser.command.ts b/examples/eventsourcing-demo/src/write/iam/users/shell/commands/inviteUser.command.ts new file mode 100644 index 0000000..78c1015 --- /dev/null +++ b/examples/eventsourcing-demo/src/write/iam/users/shell/commands/inviteUser.command.ts @@ -0,0 +1,24 @@ +import { writeEvents } from '@nimbus/eventsourcingdb'; +import { ulid } from '@std/ulid'; +import { isSubjectPristine } from 'eventsourcingdb'; +import { + inviteUser, + InviteUserCommand, +} from '../../core/commands/inviteUser.command.ts'; +import { UserState } from '../../core/domain/user.state.ts'; + +export const inviteUserCommandHandler = async (command: InviteUserCommand) => { + const id = ulid(); + + const state: UserState = { id }; + + const events = inviteUser(state, command); + + await writeEvents(events, [ + isSubjectPristine(events[0].subject), + ]); + + return { + userId: id, + }; +}; diff --git a/examples/eventsourcing-demo/src/write/iam/users/shell/http/router.ts b/examples/eventsourcing-demo/src/write/iam/users/shell/http/router.ts new file mode 100644 index 0000000..dcee185 --- /dev/null +++ b/examples/eventsourcing-demo/src/write/iam/users/shell/http/router.ts @@ -0,0 +1,53 @@ +import { createCommand, getRouter } from '@nimbus/core'; +import { getCorrelationId } from '@nimbus/hono'; +import { Hono } from 'hono'; +import { + ACCEPT_USER_INVITATION_COMMAND_TYPE, + AcceptUserInvitationCommand, +} from '../../core/commands/acceptUserInvitation.command.ts'; +import { + INVITE_USER_COMMAND_TYPE, + InviteUserCommand, +} from '../../core/commands/inviteUser.command.ts'; + +const usersRouter = new Hono(); + +usersRouter.post( + '/invite-user', + async (c) => { + const body = await c.req.json(); + const correlationId = getCorrelationId(c); + + const command = createCommand({ + type: INVITE_USER_COMMAND_TYPE, + source: 'https://nimbus.overlap.at', + correlationid: correlationId, + data: body, + }); + + const result = await getRouter('writeRouter').route(command); + + return c.json(result); + }, +); + +usersRouter.post( + '/accept-user-invitation', + async (c) => { + const body = await c.req.json(); + const correlationId = getCorrelationId(c); + + const command = createCommand({ + type: ACCEPT_USER_INVITATION_COMMAND_TYPE, + source: 'https://nimbus.overlap.at', + correlationid: correlationId, + data: body, + }); + + const result = await getRouter('writeRouter').route(command); + + return c.json(result); + }, +); + +export default usersRouter; diff --git a/examples/eventsourcing-demo/src/write/iam/users/shell/registerUserMessages.ts b/examples/eventsourcing-demo/src/write/iam/users/shell/registerUserMessages.ts new file mode 100644 index 0000000..4c82545 --- /dev/null +++ b/examples/eventsourcing-demo/src/write/iam/users/shell/registerUserMessages.ts @@ -0,0 +1,27 @@ +import { getRouter } from '@nimbus/core'; +import { + ACCEPT_USER_INVITATION_COMMAND_TYPE, + acceptUserInvitationCommandSchema, +} from '../core/commands/acceptUserInvitation.command.ts'; +import { + INVITE_USER_COMMAND_TYPE, + inviteUserCommandSchema, +} from '../core/commands/inviteUser.command.ts'; +import { acceptUserInvitationCommandHandler } from './commands/acceptUserInvitation.command.ts'; +import { inviteUserCommandHandler } from './commands/inviteUser.command.ts'; + +export const registerUserMessages = () => { + const router = getRouter('writeRouter'); + + router.register( + INVITE_USER_COMMAND_TYPE, + inviteUserCommandHandler, + inviteUserCommandSchema, + ); + + router.register( + ACCEPT_USER_INVITATION_COMMAND_TYPE, + acceptUserInvitationCommandHandler, + acceptUserInvitationCommandSchema, + ); +}; diff --git a/examples/eventsourcing-demo/start-with-otel.sh b/examples/eventsourcing-demo/start-with-otel.sh new file mode 100644 index 0000000..11cfb4b --- /dev/null +++ b/examples/eventsourcing-demo/start-with-otel.sh @@ -0,0 +1,16 @@ +export OTEL_DENO=true +export OTEL_EXPORTER_OTLP_PROTOCOL="http/protobuf" +export OTEL_EXPORTER_OTLP_ENDPOINT="https://otlp-gateway-prod-eu-west-2.grafana.net/otlp" + +# Read OTLP headers from secret file +if [[ -f "./.otel_token" ]]; then + export OTEL_EXPORTER_OTLP_HEADERS="Authorization=Basic $(cat ./.otel_token)" +else + echo "Error: .otel_token file not found." >&2 + exit 1 +fi + +export OTEL_SERVICE_NAME=nimbus-eventsourcing-demo +export OTEL_RESOURCE_ATTRIBUTES=deployment.environment=development + +deno run -A src/main.ts diff --git a/examples/hono-demo/deno.json b/examples/hono-demo/deno.json index fa54ead..3802bb1 100644 --- a/examples/hono-demo/deno.json +++ b/examples/hono-demo/deno.json @@ -1,7 +1,7 @@ { "tasks": { "dev": "deno run -A --watch src/main.ts", - "dev:otel": "sh start-with-otel.sh", + "dev:otel": "bash start-with-otel.sh", "test": "deno test -A", "database:seed": "deno run -A src/seedCollections.ts" }, diff --git a/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts b/examples/hono-demo/src/iam/users/core/commands/inviteUser.command.ts similarity index 68% rename from examples/hono-demo/src/iam/users/core/commands/addUser.command.ts rename to examples/hono-demo/src/iam/users/core/commands/inviteUser.command.ts index 4c01f24..fd386bb 100644 --- a/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts +++ b/examples/hono-demo/src/iam/users/core/commands/inviteUser.command.ts @@ -3,24 +3,24 @@ import { ObjectId } from 'mongodb'; import { z } from 'zod'; import { UserState } from '../domain/user.ts'; -export const ADD_USER_COMMAND_TYPE = 'at.overlap.nimbus.add-user'; +export const INVITE_USER_COMMAND_TYPE = 'at.overlap.nimbus.invite-user'; -export const addUserInputSchema = z.object({ +export const inviteUserInputSchema = z.object({ email: z.email(), firstName: z.string(), lastName: z.string(), group: z.string(), }); -export const addUserCommandSchema = commandSchema.extend({ - type: z.literal(ADD_USER_COMMAND_TYPE), - data: addUserInputSchema, +export const inviteUserCommandSchema = commandSchema.extend({ + type: z.literal(INVITE_USER_COMMAND_TYPE), + data: inviteUserInputSchema, }); -export type AddUserCommand = z.infer; +export type InviteUserCommand = z.infer; -export const addUser = ( +export const inviteUser = ( state: UserState, - command: AddUserCommand, + command: InviteUserCommand, ): UserState => { // Always make sure to cast all user emails to lowercase const email = command.data.email.toLowerCase(); diff --git a/examples/hono-demo/src/iam/users/core/events/userAdded.event.ts b/examples/hono-demo/src/iam/users/core/events/userAdded.event.ts deleted file mode 100644 index 99050d1..0000000 --- a/examples/hono-demo/src/iam/users/core/events/userAdded.event.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { Event } from '@nimbus/core'; -import { UserState } from '../domain/user.ts'; - -export const USER_ADDED_EVENT_TYPE = 'at.overlap.nimbus.user-added'; - -export type UserAddedEvent = Event & { - type: typeof USER_ADDED_EVENT_TYPE; -}; diff --git a/examples/hono-demo/src/iam/users/core/events/userInvited.event.ts b/examples/hono-demo/src/iam/users/core/events/userInvited.event.ts new file mode 100644 index 0000000..8a74cba --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/events/userInvited.event.ts @@ -0,0 +1,8 @@ +import { Event } from '@nimbus/core'; +import { UserState } from '../domain/user.ts'; + +export const USER_INVITED_EVENT_TYPE = 'at.overlap.nimbus.user-invited'; + +export type UserInvitedEvent = Event & { + type: typeof USER_INVITED_EVENT_TYPE; +}; diff --git a/examples/hono-demo/src/iam/users/shell/http/router.ts b/examples/hono-demo/src/iam/users/shell/http/router.ts index 874561c..838be3e 100644 --- a/examples/hono-demo/src/iam/users/shell/http/router.ts +++ b/examples/hono-demo/src/iam/users/shell/http/router.ts @@ -2,9 +2,9 @@ import { createCommand, createQuery, getRouter } from '@nimbus/core'; import { getCorrelationId } from '@nimbus/hono'; import { Hono } from 'hono'; import { - ADD_USER_COMMAND_TYPE, - AddUserCommand, -} from '../../core/commands/addUser.command.ts'; + INVITE_USER_COMMAND_TYPE, + InviteUserCommand, +} from '../../core/commands/inviteUser.command.ts'; import { GET_USER_QUERY_TYPE, GetUserQuery, @@ -17,13 +17,13 @@ import { const usersRouter = new Hono(); usersRouter.post( - '/add-user', + '/invite-user', async (c) => { const body = await c.req.json(); const correlationId = getCorrelationId(c); - const command = createCommand({ - type: ADD_USER_COMMAND_TYPE, + const command = createCommand({ + type: INVITE_USER_COMMAND_TYPE, source: 'nimbus.overlap.at', correlationid: correlationId, data: body, diff --git a/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts b/examples/hono-demo/src/iam/users/shell/messages/commands/inviteUser.command.ts similarity index 65% rename from examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts rename to examples/hono-demo/src/iam/users/shell/messages/commands/inviteUser.command.ts index 8b5d0b9..55490b9 100644 --- a/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts +++ b/examples/hono-demo/src/iam/users/shell/messages/commands/inviteUser.command.ts @@ -1,16 +1,18 @@ import { createEvent, getEventBus, NotFoundException } from '@nimbus/core'; import { - addUser, - AddUserCommand, -} from '../../../core/commands/addUser.command.ts'; + inviteUser, + InviteUserCommand, +} from '../../../core/commands/inviteUser.command.ts'; import { UserState } from '../../../core/domain/user.ts'; import { - USER_ADDED_EVENT_TYPE, - UserAddedEvent, -} from '../../../core/events/userAdded.event.ts'; + USER_INVITED_EVENT_TYPE, + UserInvitedEvent, +} from '../../../core/events/userInvited.event.ts'; import { userRepository } from '../../mongodb/user.repository.ts'; -export const addUserCommandHandler = async (command: AddUserCommand) => { +export const inviteUserCommandHandler = async ( + command: InviteUserCommand, +) => { const eventBus = getEventBus('default'); let state: UserState = null; @@ -26,22 +28,22 @@ export const addUserCommandHandler = async (command: AddUserCommand) => { } } - state = addUser(state, command); + state = inviteUser(state, command); if (state !== null) { state = await userRepository.insertOne({ item: state, }); - const event = createEvent({ - type: USER_ADDED_EVENT_TYPE, + const event = createEvent({ + type: USER_INVITED_EVENT_TYPE, source: 'nimbus.overlap.at', correlationid: command.correlationid, subject: `/users/${state._id}`, data: state, }); - eventBus.putEvent(event); + eventBus.putEvent(event); } return state; diff --git a/examples/hono-demo/src/iam/users/shell/messages/events/userAdded.event.ts b/examples/hono-demo/src/iam/users/shell/messages/events/userAdded.event.ts deleted file mode 100644 index 0961729..0000000 --- a/examples/hono-demo/src/iam/users/shell/messages/events/userAdded.event.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { getLogger } from '@nimbus/core'; -import { UserAddedEvent } from '../../../core/events/userAdded.event.ts'; - -export const userAddedEventHandler = async (event: UserAddedEvent) => { - await Promise.resolve(); - - getLogger().info({ - message: 'User added', - data: event.data ?? {}, - }); -}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/events/userInvited.event.ts b/examples/hono-demo/src/iam/users/shell/messages/events/userInvited.event.ts new file mode 100644 index 0000000..cccac6e --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/events/userInvited.event.ts @@ -0,0 +1,11 @@ +import { getLogger } from '@nimbus/core'; +import { UserInvitedEvent } from '../../../core/events/userInvited.event.ts'; + +export const userInvitedEventHandler = async (event: UserInvitedEvent) => { + await Promise.resolve(); + + getLogger().info({ + message: 'User invited', + data: event.data ?? {}, + }); +}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts index a5fdeea..8216b90 100644 --- a/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts +++ b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts @@ -1,9 +1,9 @@ import { getEventBus, getRouter } from '@nimbus/core'; import { - ADD_USER_COMMAND_TYPE, - addUserCommandSchema, -} from '../../core/commands/addUser.command.ts'; -import { USER_ADDED_EVENT_TYPE } from '../../core/events/userAdded.event.ts'; + INVITE_USER_COMMAND_TYPE, + inviteUserCommandSchema, +} from '../../core/commands/inviteUser.command.ts'; +import { USER_INVITED_EVENT_TYPE } from '../../core/events/userInvited.event.ts'; import { GET_USER_QUERY_TYPE, getUserQuerySchema, @@ -12,8 +12,8 @@ import { GET_USER_GROUPS_QUERY_TYPE, getUserGroupsQuerySchema, } from '../../core/queries/getUserGroups.ts'; -import { addUserCommandHandler } from './commands/addUser.command.ts'; -import { userAddedEventHandler } from './events/userAdded.event.ts'; +import { inviteUserCommandHandler } from './commands/inviteUser.command.ts'; +import { userInvitedEventHandler } from './events/userInvited.event.ts'; import { getUserQueryHandler } from './queries/getUser.query.ts'; import { getUserGroupsQueryHandler } from './queries/getUserGroups.query.ts'; @@ -22,14 +22,14 @@ export const registerUserMessages = () => { const router = getRouter('default'); eventBus.subscribeEvent({ - type: USER_ADDED_EVENT_TYPE, - handler: userAddedEventHandler, + type: USER_INVITED_EVENT_TYPE, + handler: userInvitedEventHandler, }); router.register( - ADD_USER_COMMAND_TYPE, - addUserCommandHandler, - addUserCommandSchema, + INVITE_USER_COMMAND_TYPE, + inviteUserCommandHandler, + inviteUserCommandSchema, ); router.register( diff --git a/packages/core/src/lib/message/router.ts b/packages/core/src/lib/message/router.ts index e5ad800..d3b15fe 100644 --- a/packages/core/src/lib/message/router.ts +++ b/packages/core/src/lib/message/router.ts @@ -34,7 +34,7 @@ export type MessageHandler< TOutput = unknown, > = ( input: TInput, -) => Promise; +) => Promise | TOutput; /** * Options for creating a MessageRouter. diff --git a/packages/eventsourcingdb/README.md b/packages/eventsourcingdb/README.md new file mode 100644 index 0000000..c1296cb --- /dev/null +++ b/packages/eventsourcingdb/README.md @@ -0,0 +1,14 @@ +Nimbus + +# Nimbus EventSourcingDB + +Use the [EventSourcingDB](https://eventsourcingdb.io) with Nimbus. + +Refer to the [Nimbus main repository](https://github.com/overlap-dev/Nimbus) or the [Nimbus documentation](https://nimbus.overlap.at) for more information about the Nimbus framework. + +# License + +The MIT License (MIT) diff --git a/packages/eventsourcingdb/deno.json b/packages/eventsourcingdb/deno.json new file mode 100644 index 0000000..2181110 --- /dev/null +++ b/packages/eventsourcingdb/deno.json @@ -0,0 +1,42 @@ +{ + "name": "@nimbus/eventsourcingdb", + "version": "1.1.0-beta.1", + "license": "MIT", + "author": "Daniel Gördes (https://overlap.at)", + "repository": { + "type": "git", + "url": "https://github.com/overlap-dev/Nimbus" + }, + "bugs": { + "url": "https://github.com/overlap-dev/Nimbus/issues" + }, + "homepage": "https://nimbus.overlap.at", + "exports": "./src/index.ts", + "fmt": { + "include": [ + "src/" + ], + "useTabs": false, + "lineWidth": 80, + "indentWidth": 4, + "semiColons": true, + "singleQuote": true, + "proseWrap": "always" + }, + "lint": { + "include": [ + "src/" + ] + }, + "test": { + "include": [ + "src/" + ] + }, + "imports": { + "@opentelemetry/api": "npm:@opentelemetry/api@^1.9.0", + "@std/assert": "jsr:@std/assert@^1.0.0", + "@std/ulid": "jsr:@std/ulid@^1.0.0", + "eventsourcingdb": "npm:eventsourcingdb@^1.8.1" + } +} \ No newline at end of file diff --git a/packages/eventsourcingdb/src/index.ts b/packages/eventsourcingdb/src/index.ts new file mode 100644 index 0000000..e231fc5 --- /dev/null +++ b/packages/eventsourcingdb/src/index.ts @@ -0,0 +1,5 @@ +export * from './lib/client.ts'; +export * from './lib/eventMapping.ts'; +export * from './lib/eventObserver.ts'; +export * from './lib/readEvents.ts'; +export * from './lib/writeEvents.ts'; diff --git a/packages/eventsourcingdb/src/lib/client.test.ts b/packages/eventsourcingdb/src/lib/client.test.ts new file mode 100644 index 0000000..b87177b --- /dev/null +++ b/packages/eventsourcingdb/src/lib/client.test.ts @@ -0,0 +1,11 @@ +import { assertInstanceOf, assertThrows } from '@std/assert'; +import { GenericException } from '@nimbus/core'; +import { getEventSourcingDBClient } from './client.ts'; + +Deno.test('getEventSourcingDBClient throws GenericException before init', () => { + const error = assertThrows(() => { + getEventSourcingDBClient(); + }); + + assertInstanceOf(error, GenericException); +}); diff --git a/packages/eventsourcingdb/src/lib/client.ts b/packages/eventsourcingdb/src/lib/client.ts new file mode 100644 index 0000000..0169da2 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/client.ts @@ -0,0 +1,147 @@ +import { GenericException, getLogger } from '@nimbus/core'; +import { Client } from 'eventsourcingdb'; +import { type EventObserver, initEventObserver } from './eventObserver.ts'; + +let eventSourcingDBClient: Client | null = null; + +/** + * Configuration options for setting up the EventSourcingDB client. + */ +export type SetupEventSourcingDBClientInput = { + /** + * The URL of the EventSourcingDB server. + */ + url: URL; + /** + * The API token for authenticating with EventSourcingDB. + */ + apiToken: string; + /** + * An optional array of event observers to observe events. + */ + eventObservers?: EventObserver[]; +}; + +/** + * Initialize and configure the EventSourcingDB client. + * + * This function creates a singleton client instance, verifies connectivity by pinging + * the server, and validates the provided API token. It should be called once at + * application startup before using {@link getEventSourcingDBClient}. + * + * Optionally, you can provide event observers that will start observing events + * in the background after the client is initialized. + * + * @param {SetupEventSourcingDBClientInput} options - The configuration options + * @param {URL} options.url - The URL of the EventSourcingDB server + * @param {string} options.apiToken - The API token for authentication + * @param {EventObserver[]} [options.eventObservers] - Optional array of event observers + * + * @throws {GenericException} If the connection to EventSourcingDB fails + * @throws {GenericException} If the API token is invalid + * + * @example + * ```ts + * import { setupEventSourcingDBClient } from '@nimbus/eventsourcingdb'; + * import type { Event } from 'eventsourcingdb'; + * + * await setupEventSourcingDBClient({ + * url: new URL(process.env.ESDB_URL ?? ''), + * apiToken: process.env.ESDB_API_TOKEN ?? '', + * eventObservers: [ + * { + * subject: '/users', + * recursive: true, + * lowerBound: { + * id: 'last-processed-event-id', + * type: 'exclusive', + * }, + * fromLatestEvent: { + * subject: '/users', + * type: 'io.nimbus.users.invited', + * ifEventIsMissing: 'read-everything', + * }, + * eventHandler: async (event: Event) => { + * console.log('Received event:', event); + * }, + * retryOptions: { + * maxRetries: 3, + * initialRetryDelayMs: 3000, + * }, + * }, + * ], + * }); + * ``` + */ +export const setupEventSourcingDBClient = async ( + { url, apiToken, eventObservers }: SetupEventSourcingDBClientInput, +): Promise => { + eventSourcingDBClient = new Client( + url, + apiToken, + ); + + try { + await eventSourcingDBClient.ping(); + } catch (error) { + getLogger().error({ + category: 'Nimbus', + message: 'Could not connect to EventSourcingDB', + error: error as Error, + }); + throw new GenericException( + 'Could not connect to EventSourcingDB', + ); + } + + try { + await eventSourcingDBClient.verifyApiToken(); + } catch (error) { + getLogger().error({ + category: 'Nimbus', + message: 'Invalid API token. Please check your API token.', + error: error as Error, + }); + throw new GenericException( + 'Invalid API token. Please check your API token.', + ); + } + + getLogger().info({ + category: 'Nimbus', + message: 'EventSourcingDB client initialized successfully', + }); + + if (eventObservers?.length) { + for (const eventObserver of eventObservers) { + initEventObserver(eventObserver); + } + } +}; + +/** + * Get the EventSourcingDB client instance. + * + * Returns the singleton client instance that was created by {@link setupEventSourcingDBClient}. + * This function must be called after the client has been initialized. + * + * @returns {Client} The EventSourcingDB client instance + * + * @throws {GenericException} If the client has not been initialized via setupEventSourcingDBClient + * + * @example + * ```ts + * import { getEventSourcingDBClient } from '@nimbus/eventsourcingdb'; + * + * const client = getEventSourcingDBClient(); + * ``` + */ +export const getEventSourcingDBClient = (): Client => { + if (!eventSourcingDBClient) { + throw new GenericException( + 'EventSourcingDB client not yet initialized. Please call setupEventSourcingDBClient() first.', + ); + } + + return eventSourcingDBClient; +}; diff --git a/packages/eventsourcingdb/src/lib/eventMapping.test.ts b/packages/eventsourcingdb/src/lib/eventMapping.test.ts new file mode 100644 index 0000000..05dfb87 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/eventMapping.test.ts @@ -0,0 +1,310 @@ +import type { Event } from '@nimbus/core'; +import { createEvent } from '@nimbus/core'; +import { assertEquals, assertNotEquals } from '@std/assert'; +import type { Event as EventSourcingDBEvent } from 'eventsourcingdb'; +import { + type EventData, + eventSourcingDBEventToNimbusEvent, + isEventData, + nimbusEventToEventSourcingDBEventCandidate, +} from './eventMapping.ts'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +const createTestNimbusEvent = ( + overrides: Partial = {}, +): Event => { + return createEvent({ + source: 'https://nimbus.test', + type: 'at.test.nimbus.test-event', + subject: '/tests/1', + data: { message: 'hello' }, + correlationid: 'corr-123', + ...overrides, + }); +}; + +const createTestEventSourcingDBEvent = ( + overrides: Partial<{ + id: string; + time: Date; + source: string; + subject: string; + type: string; + data: Record; + }> = {}, +): EventSourcingDBEvent => { + const defaults = { + id: '1', + time: new Date('2025-06-01T12:00:00.000Z'), + source: 'https://nimbus.test', + subject: '/tests/1', + type: 'at.test.nimbus.test-event', + data: { + payload: { message: 'hello' }, + nimbusMeta: { + correlationid: 'corr-123', + }, + } as Record, + }; + + // The eventsourcingdb Event class has private fields and cannot be + // constructed outside its own module. Since eventMapping.ts only + // accesses plain public properties we use a plain object cast to + // satisfy the runtime while keeping type-safety in the test. + return { ...defaults, ...overrides } as unknown as EventSourcingDBEvent; +}; + +// --------------------------------------------------------------------------- +// isEventData +// --------------------------------------------------------------------------- + +Deno.test('isEventData returns true for valid EventData', () => { + const data: EventData = { + payload: { key: 'value' }, + nimbusMeta: { correlationid: 'corr-1' }, + }; + + assertEquals(isEventData(data), true); +}); + +Deno.test('isEventData returns true when nimbusMeta includes dataschema', () => { + const data: EventData = { + payload: { key: 'value' }, + nimbusMeta: { + correlationid: 'corr-1', + dataschema: 'https://schema.example.com/v1', + }, + }; + + assertEquals(isEventData(data), true); +}); + +Deno.test('isEventData returns false for null', () => { + assertEquals(isEventData(null), false); +}); + +Deno.test('isEventData returns false for undefined', () => { + assertEquals(isEventData(undefined), false); +}); + +Deno.test('isEventData returns false for a string', () => { + assertEquals(isEventData('not an object'), false); +}); + +Deno.test('isEventData returns false for a number', () => { + assertEquals(isEventData(42), false); +}); + +Deno.test('isEventData returns false for an object missing nimbusMeta', () => { + assertEquals(isEventData({ payload: { key: 'value' } }), false); +}); + +Deno.test('isEventData returns false for an object missing payload', () => { + assertEquals( + isEventData({ nimbusMeta: { correlationid: 'corr-1' } }), + false, + ); +}); + +Deno.test('isEventData returns false for an empty object', () => { + assertEquals(isEventData({}), false); +}); + +// --------------------------------------------------------------------------- +// nimbusEventToEventSourcingDBEventCandidate +// --------------------------------------------------------------------------- + +Deno.test('nimbusEventToEventSourcingDBEventCandidate maps basic properties', () => { + const event = createTestNimbusEvent(); + + const candidate = nimbusEventToEventSourcingDBEventCandidate(event); + + assertEquals(candidate.source, event.source); + assertEquals(candidate.subject, event.subject); + assertEquals(candidate.type, event.type); +}); + +Deno.test('nimbusEventToEventSourcingDBEventCandidate wraps data with nimbusMeta', () => { + const event = createTestNimbusEvent(); + + const candidate = nimbusEventToEventSourcingDBEventCandidate(event); + const data = candidate.data as EventData; + + assertEquals(data.payload, event.data); + assertEquals(data.nimbusMeta.correlationid, event.correlationid); +}); + +Deno.test('nimbusEventToEventSourcingDBEventCandidate includes dataschema in nimbusMeta when present', () => { + const event = createTestNimbusEvent({ + dataschema: 'https://schema.example.com/v1', + }); + + const candidate = nimbusEventToEventSourcingDBEventCandidate(event); + const data = candidate.data as EventData; + + assertEquals( + data.nimbusMeta.dataschema, + 'https://schema.example.com/v1', + ); +}); + +Deno.test('nimbusEventToEventSourcingDBEventCandidate omits dataschema from nimbusMeta when absent', () => { + const event = createTestNimbusEvent(); + + const candidate = nimbusEventToEventSourcingDBEventCandidate(event); + const data = candidate.data as EventData; + + assertEquals(data.nimbusMeta.dataschema, undefined); +}); + +Deno.test('nimbusEventToEventSourcingDBEventCandidate includes traceparent when provided', () => { + const event = createTestNimbusEvent(); + + const candidate = nimbusEventToEventSourcingDBEventCandidate( + event, + '00-abc-def-01', + ); + + assertEquals(candidate.traceparent, '00-abc-def-01'); +}); + +Deno.test('nimbusEventToEventSourcingDBEventCandidate includes tracestate when provided', () => { + const event = createTestNimbusEvent(); + + const candidate = nimbusEventToEventSourcingDBEventCandidate( + event, + '00-abc-def-01', + 'vendor=value', + ); + + assertEquals(candidate.traceparent, '00-abc-def-01'); + assertEquals(candidate.tracestate, 'vendor=value'); +}); + +Deno.test('nimbusEventToEventSourcingDBEventCandidate omits traceparent when not provided', () => { + const event = createTestNimbusEvent(); + + const candidate = nimbusEventToEventSourcingDBEventCandidate(event); + + assertEquals(candidate.traceparent, undefined); +}); + +Deno.test('nimbusEventToEventSourcingDBEventCandidate omits tracestate when not provided', () => { + const event = createTestNimbusEvent(); + + const candidate = nimbusEventToEventSourcingDBEventCandidate( + event, + '00-abc-def-01', + ); + + assertEquals(candidate.tracestate, undefined); +}); + +// --------------------------------------------------------------------------- +// eventSourcingDBEventToNimbusEvent +// --------------------------------------------------------------------------- + +Deno.test('eventSourcingDBEventToNimbusEvent maps event with nimbusMeta correctly', () => { + const esdbEvent = createTestEventSourcingDBEvent(); + + const nimbusEvent = eventSourcingDBEventToNimbusEvent(esdbEvent); + + assertEquals(nimbusEvent.specversion, '1.0'); + assertEquals(nimbusEvent.id, '1'); + assertEquals(nimbusEvent.time, '2025-06-01T12:00:00.000Z'); + assertEquals(nimbusEvent.source, 'https://nimbus.test'); + assertEquals(nimbusEvent.subject, '/tests/1'); + assertEquals(nimbusEvent.type, 'at.test.nimbus.test-event'); + assertEquals(nimbusEvent.data, { message: 'hello' }); + assertEquals(nimbusEvent.correlationid, 'corr-123'); +}); + +Deno.test('eventSourcingDBEventToNimbusEvent extracts dataschema from nimbusMeta', () => { + const esdbEvent = createTestEventSourcingDBEvent({ + data: { + payload: { message: 'hello' }, + nimbusMeta: { + correlationid: 'corr-123', + dataschema: 'https://schema.example.com/v1', + }, + }, + }); + + const nimbusEvent = eventSourcingDBEventToNimbusEvent(esdbEvent); + + assertEquals( + nimbusEvent.dataschema, + 'https://schema.example.com/v1', + ); +}); + +Deno.test('eventSourcingDBEventToNimbusEvent omits dataschema when nimbusMeta has none', () => { + const esdbEvent = createTestEventSourcingDBEvent(); + + const nimbusEvent = eventSourcingDBEventToNimbusEvent(esdbEvent); + + assertEquals(nimbusEvent.dataschema, undefined); +}); + +Deno.test('eventSourcingDBEventToNimbusEvent handles event without nimbusMeta', () => { + const esdbEvent = createTestEventSourcingDBEvent({ + data: { rawKey: 'rawValue' }, + }); + + const nimbusEvent = eventSourcingDBEventToNimbusEvent(esdbEvent); + + assertEquals(nimbusEvent.data, { rawKey: 'rawValue' }); + assertEquals(nimbusEvent.dataschema, undefined); + // A new correlationid should be generated (ULID format) + assertNotEquals(nimbusEvent.correlationid, undefined); + assertNotEquals(nimbusEvent.correlationid, ''); +}); + +Deno.test('eventSourcingDBEventToNimbusEvent generates different correlationid for non-nimbus events', () => { + const esdbEvent = createTestEventSourcingDBEvent({ + data: { rawKey: 'rawValue' }, + }); + + const first = eventSourcingDBEventToNimbusEvent(esdbEvent); + const second = eventSourcingDBEventToNimbusEvent(esdbEvent); + + // Each call should produce a unique correlation id + assertNotEquals(first.correlationid, second.correlationid); +}); + +// --------------------------------------------------------------------------- +// Round-trip: Nimbus → EventSourcingDB → Nimbus +// --------------------------------------------------------------------------- + +Deno.test('round-trip preserves event data through mapping', () => { + const original = createTestNimbusEvent({ + id: 'round-trip-id', + correlationid: 'round-trip-corr', + dataschema: 'https://schema.example.com/v1', + }); + + const candidate = nimbusEventToEventSourcingDBEventCandidate(original); + + // Simulate what EventSourcingDB would store and return + const storedEvent = createTestEventSourcingDBEvent({ + id: original.id, + time: new Date(original.time), + source: candidate.source, + subject: candidate.subject, + type: candidate.type, + data: candidate.data, + }); + + const restored = eventSourcingDBEventToNimbusEvent(storedEvent); + + assertEquals(restored.id, original.id); + assertEquals(restored.source, original.source); + assertEquals(restored.subject, original.subject); + assertEquals(restored.type, original.type); + assertEquals(restored.data, original.data); + assertEquals(restored.correlationid, original.correlationid); + assertEquals(restored.dataschema, original.dataschema); +}); diff --git a/packages/eventsourcingdb/src/lib/eventMapping.ts b/packages/eventsourcingdb/src/lib/eventMapping.ts new file mode 100644 index 0000000..0068bb2 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/eventMapping.ts @@ -0,0 +1,111 @@ +import { createEvent, type Event } from '@nimbus/core'; +import { ulid } from '@std/ulid'; +import type { + Event as EventSourcingDBEvent, + EventCandidate, +} from 'eventsourcingdb'; + +/** + * Metadata that Nimbus attaches to events stored in EventSourcingDB + * to preserve correlation and schema information. + * + * @property {string} correlationid - A globally unique identifier that indicates a correlation to previous and subsequent messages. + * @property {string} dataschema - An absolute URL to the schema that the data adheres to (optional). + */ +export type NimbusEventMetadata = { + correlationid: string; + dataschema?: string; +}; + +/** + * The data structure used to store Nimbus events in EventSourcingDB. + * It wraps the original event payload together with Nimbus-specific metadata. + * + * @property {Record} payload - The actual business data of the event. + * @property {NimbusEventMetadata} nimbusMeta - Nimbus-specific metadata such as correlation id and data schema. + */ +export type EventData = { + payload: Record; + nimbusMeta: NimbusEventMetadata; +}; + +/** + * Type guard that checks whether the given value conforms to the {@link EventData} structure + * by verifying the presence of both `payload` and `nimbusMeta` properties. + * + * @param data - The value to check. + * @returns `true` if the value is an {@link EventData}, `false` otherwise. + */ +export const isEventData = (data: unknown): data is EventData => { + return ( + typeof data === 'object' && + data !== null && + 'payload' in data && + 'nimbusMeta' in data + ); +}; + +/** + * Converts a Nimbus {@link Event} into an EventSourcingDB {@link EventCandidate} + * by mapping the event properties and wrapping the data with Nimbus metadata. + * + * @param event - The Nimbus event to convert. + * @returns An EventSourcingDB event candidate ready to be written. + */ +export const nimbusEventToEventSourcingDBEventCandidate = ( + event: Event, + traceparent?: string, + tracestate?: string, +): EventCandidate => { + return { + source: event.source, + subject: event.subject, + type: event.type, + data: { + payload: event.data, + nimbusMeta: { + correlationid: event.correlationid, + ...(event.dataschema && { dataschema: event.dataschema }), + }, + }, + ...(traceparent && { traceparent: traceparent }), + ...(tracestate && { tracestate: tracestate }), + }; +}; + +/** + * Converts an EventSourcingDB event back into a Nimbus {@link Event}. + * If the event data contains Nimbus metadata, it extracts the original payload + * and correlation information. Otherwise, it treats the entire data as the payload + * and generates a new correlation id. + * + * @param eventSourcingDBEvent - The EventSourcingDB event to convert. + * @returns A Nimbus event. + */ +export const eventSourcingDBEventToNimbusEvent = ( + eventSourcingDBEvent: EventSourcingDBEvent, +): TEvent => { + let data: Record; + let correlationid: string; + let dataschema: string | undefined; + + if (isEventData(eventSourcingDBEvent.data)) { + data = eventSourcingDBEvent.data.payload; + correlationid = eventSourcingDBEvent.data.nimbusMeta.correlationid; + dataschema = eventSourcingDBEvent.data.nimbusMeta.dataschema; + } else { + data = eventSourcingDBEvent.data; + correlationid = ulid(); + } + + return createEvent({ + id: eventSourcingDBEvent.id, + time: eventSourcingDBEvent.time.toISOString(), + source: eventSourcingDBEvent.source, + subject: eventSourcingDBEvent.subject, + type: eventSourcingDBEvent.type, + correlationid: correlationid, + data: data, + ...(dataschema && { dataschema: dataschema }), + }); +}; diff --git a/packages/eventsourcingdb/src/lib/eventObserver.test.ts b/packages/eventsourcingdb/src/lib/eventObserver.test.ts new file mode 100644 index 0000000..4069565 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/eventObserver.test.ts @@ -0,0 +1,60 @@ +import { assert, assertEquals } from '@std/assert'; +import { calculateBackoffDelay } from './eventObserver.ts'; + +// --------------------------------------------------------------------------- +// calculateBackoffDelay +// --------------------------------------------------------------------------- + +Deno.test('calculateBackoffDelay returns a value in the expected range for attempt 0', () => { + const initialDelayMs = 1000; + const attempt = 0; + + // baseDelay = 1000 * 2^0 = 1000 + // jitter is between 0 and 30% of baseDelay (0..300) + // result should be in [1000, 1300] + for (let i = 0; i < 50; i++) { + const result = calculateBackoffDelay(initialDelayMs, attempt); + assert( + result >= 1000 && result <= 1300, + `Expected result in [1000, 1300], got ${result}`, + ); + } +}); + +Deno.test('calculateBackoffDelay doubles base delay with each attempt', () => { + const initialDelayMs = 1000; + + // Run multiple samples to account for jitter + for (let attempt = 0; attempt < 5; attempt++) { + const baseDelay = initialDelayMs * Math.pow(2, attempt); + const maxWithJitter = Math.floor(baseDelay * 1.3); + + for (let i = 0; i < 20; i++) { + const result = calculateBackoffDelay( + initialDelayMs, + attempt, + ); + assert( + result >= baseDelay && result <= maxWithJitter, + `Attempt ${attempt}: expected [${baseDelay}, ${maxWithJitter}], got ${result}`, + ); + } + } +}); + +Deno.test('calculateBackoffDelay returns an integer', () => { + for (let i = 0; i < 20; i++) { + const result = calculateBackoffDelay(1000, i % 5); + assertEquals(result, Math.floor(result)); + } +}); + +Deno.test('calculateBackoffDelay handles small initial delay', () => { + const result = calculateBackoffDelay(1, 0); + + // baseDelay = 1, max jitter = 0.3, floor makes it 1 + assert( + result >= 1 && result <= 1, + `Expected 1, got ${result}`, + ); +}); diff --git a/packages/eventsourcingdb/src/lib/eventObserver.ts b/packages/eventsourcingdb/src/lib/eventObserver.ts new file mode 100644 index 0000000..d145221 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/eventObserver.ts @@ -0,0 +1,283 @@ +import { getLogger } from '@nimbus/core'; +import type { Event as EventSourcingDBEvent } from 'eventsourcingdb'; +import { getEventSourcingDBClient } from './client.ts'; +import { type TraceContext, withSpan } from './tracing.ts'; + +type Bound = { + id: string; + type: 'inclusive' | 'exclusive'; +}; + +type ObserveFromLatestEvent = { + subject: string; + type: string; + ifEventIsMissing: 'read-everything' | 'wait-for-event'; +}; + +export type RetryOptions = { + /** + * The maximum number of retry attempts before giving up. + * Defaults to 3. + */ + maxRetries: number; + /** + * The initial delay in milliseconds before the first retry. + * Subsequent retries will use exponential backoff with jitter. + * Defaults to 3000ms. + */ + initialRetryDelayMs: number; +}; + +/** + * An event observer defines a handler function which will be applied to each event + * and the options to observe the events according to the EventSourcingDB API. + * + * See https://docs.eventsourcingdb.io/getting-started/observing-events for more information. + */ +export type EventObserver = { + /** + * The subject of the events to observe. + */ + subject: string; + /** + * Whether to observe events recursively. + * Defaults to false. + */ + recursive?: boolean; + /** + * The lower bound of the events to observe. + * Defaults to undefined. + */ + lowerBound?: Bound; + /** + * The from latest event to observe. + * Defaults to undefined. + */ + fromLatestEvent?: ObserveFromLatestEvent; + /** + * The event handler which will be called when an event is observed. + * + * @param event - The EventSourcingDB event that was observed. + * @returns A promise that resolves when the event has been handled. + */ + eventHandler: (event: EventSourcingDBEvent) => Promise | void; + /** + * Options for retry behavior when the connection fails. + * Uses exponential backoff with jitter between retries. + * Defaults to { maxRetries: 3, initialRetryDelayMs: 3000 }. + */ + retryOptions?: RetryOptions; +}; + +/** + * Returns a promise that resolves after the given number of milliseconds. + */ +const delay = (ms: number): Promise => + new Promise((resolve) => setTimeout(resolve, ms)); + +/** + * Calculates an exponential backoff delay with jitter for a given + * retry attempt. The jitter adds up to 30% of the base delay to + * avoid thundering-herd effects. + * + * @param initialDelayMs - The base delay in milliseconds before + * exponential scaling. + * @param attempt - The zero-based retry attempt number. + * @returns The backoff delay in milliseconds. + */ +export const calculateBackoffDelay = ( + initialDelayMs: number, + attempt: number, +): number => { + const baseDelay = initialDelayMs * Math.pow(2, attempt); + + // Add jitter: random value between 0 and 30% of the base delay + const jitter = Math.random() * baseDelay * 0.3; + + return Math.floor(baseDelay + jitter); +}; + +/** + * Logs an informational message when an event observer connects or + * reconnects to EventSourcingDB. When {@link retryCount} is greater + * than zero the message indicates a successful reconnection. + * + * @param subject - The observed subject. + * @param retryCount - The number of retries that preceded this + * connection (0 for the initial connection). + * @param data - Additional context logged alongside the message. + */ +const logObserverConnection = ( + subject: string, + retryCount: number, + data: Record, +): void => { + const retryLabel = retryCount === 1 ? 'retry' : 'retries'; + const message = retryCount > 0 + ? `Reconnected event observer for subject "${subject}" after ${retryCount} ${retryLabel}` + : `Observing events for subject "${subject}"`; + + getLogger().info({ category: 'Nimbus', message, data }); +}; + +/** + * Handles an observer error by logging it and waiting with exponential + * backoff before the next retry attempt. When the maximum number of + * retries is exceeded a critical log entry is emitted and no further + * retries are attempted. + * + * @param error - The error that caused the observer to disconnect. + * @param subject - The observed subject. + * @param retryCount - The current (1-based) retry attempt number. + * @param maxRetries - The maximum number of allowed retries. + * @param initialRetryDelayMs - The base delay used for exponential + * backoff calculation. + * @returns `true` if the observer should retry, `false` if retries + * are exhausted. + */ +const handleObserverError = async ( + error: unknown, + subject: string, + retryCount: number, + maxRetries: number, + initialRetryDelayMs: number, +): Promise => { + if (retryCount > maxRetries) { + getLogger().critical({ + category: 'Nimbus', + message: + `Failed to observe events for subject "${subject}" after ${maxRetries} ${ + maxRetries === 1 ? 'retry' : 'retries' + }.`, + }); + return false; + } + + const backoffDelay = calculateBackoffDelay( + initialRetryDelayMs, + retryCount - 1, + ); + + getLogger().error({ + category: 'Nimbus', + message: + `Error observing events for subject "${subject}" (retry ${retryCount}/${maxRetries}), retrying in ${backoffDelay}ms`, + error: error as Error, + }); + + await delay(backoffDelay); + return true; +}; + +/** + * Starts observing events for the given {@link EventObserver} with + * automatic reconnection on failure. + * + * On each connection attempt the EventSourcingDB server is pinged + * first. Events are then consumed from the stream and each one is + * passed to the observer's event handler inside an OpenTelemetry + * span. If the event carries a `traceparent`, the span is linked to + * the original writer's trace for end-to-end distributed tracing. + * + * After every successfully handled event the lower bound is advanced + * so that a reconnection resumes from the last processed position. + * + * When the connection drops, exponential backoff with jitter is + * applied up to the configured maximum number of retries. + * + * @param eventObserver - The event observer configuration. + */ +const observeWithRetry = async ( + eventObserver: EventObserver, +): Promise => { + const eventSourcingDBClient = getEventSourcingDBClient(); + + const maxRetries = eventObserver.retryOptions?.maxRetries ?? 3; + const initialRetryDelayMs = + eventObserver.retryOptions?.initialRetryDelayMs ?? 3000; + + let retryCount = 0; + let lastProcessedEventId: string | undefined; + + while (true) { + try { + // Once we have a concrete position, use it as lower bound and + // drop fromLatestEvent; otherwise fall back to the original options. + const lowerBound: Bound | undefined = lastProcessedEventId + ? { id: lastProcessedEventId, type: 'exclusive' } + : eventObserver.lowerBound; + const fromLatestEvent: ObserveFromLatestEvent | undefined = + lastProcessedEventId + ? undefined + : eventObserver.fromLatestEvent; + + // Verify connection + await eventSourcingDBClient.ping(); + + logObserverConnection(eventObserver.subject, retryCount, { + recursive: eventObserver.recursive ?? false, + lowerBound, + fromLatestEvent, + }); + retryCount = 0; + + for await ( + const event of eventSourcingDBClient.observeEvents( + eventObserver.subject, + { + recursive: eventObserver.recursive ?? false, + ...(lowerBound ? { lowerBound } : {}), + ...(fromLatestEvent ? { fromLatestEvent } : {}), + }, + ) + ) { + const traceContext: TraceContext | undefined = event.traceparent + ? { + traceparent: event.traceparent, + tracestate: event.tracestate, + } + : undefined; + + await withSpan( + 'observeEvent', + async () => { + await eventObserver.eventHandler(event); + }, + traceContext, + ); + + // Track last processed position so retries resume from here + lastProcessedEventId = event.id; + } + + // If the loop completes normally (stream ended), we're done + return; + } catch (error) { + retryCount++; + + const shouldRetry = await handleObserverError( + error, + eventObserver.subject, + retryCount, + maxRetries, + initialRetryDelayMs, + ); + + if (!shouldRetry) { + return; + } + } + } +}; + +/** + * Initializes an event observer by starting the observation loop in + * the background (non-blocking). The observer will keep running and + * reconnecting according to its retry options until the stream ends + * or retries are exhausted. + * + * @param eventObserver - The event observer configuration. + */ +export const initEventObserver = (eventObserver: EventObserver): void => { + observeWithRetry(eventObserver); +}; diff --git a/packages/eventsourcingdb/src/lib/integration.test.ts b/packages/eventsourcingdb/src/lib/integration.test.ts new file mode 100644 index 0000000..ad15081 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/integration.test.ts @@ -0,0 +1,241 @@ +import { createEvent, type Event } from '@nimbus/core'; +import { assertEquals } from '@std/assert'; +import { Container, type Event as EventSourcingDBEvent } from 'eventsourcingdb'; +import { setupEventSourcingDBClient } from './client.ts'; +import { eventSourcingDBEventToNimbusEvent } from './eventMapping.ts'; +import { initEventObserver } from './eventObserver.ts'; +import { readEvents } from './readEvents.ts'; +import { writeEvents } from './writeEvents.ts'; + +// --------------------------------------------------------------------------- +// Shared container lifecycle +// --------------------------------------------------------------------------- + +const container = new Container(); + +/** + * Start the container once before all tests and stop it after. + * We use a wrapper test with steps so the container is shared + * across all assertions but properly cleaned up. + */ +Deno.test({ + name: 'integration: eventsourcingdb', + // The eventsourcingdb npm Client keeps TCP connections and timers + // alive internally; disable Deno's resource/op sanitizers for + // this integration test. + sanitizeResources: false, + sanitizeOps: false, + fn: async (t) => { + await container.start(); + + try { + await setupEventSourcingDBClient({ + url: container.getBaseUrl(), + apiToken: container.getApiToken(), + }); + + // ----------------------------------------------------------------- + // setupEventSourcingDBClient + // ----------------------------------------------------------------- + + await t.step( + 'setupEventSourcingDBClient connects successfully', + () => { + // If we reach here, setupEventSourcingDBClient did not + // throw, meaning ping() and verifyApiToken() both passed. + }, + ); + + // ----------------------------------------------------------------- + // writeEvents + readEvents round-trip + // ----------------------------------------------------------------- + + await t.step( + 'writeEvents persists events that readEvents can retrieve', + async () => { + const event = createEvent({ + source: 'https://nimbus.test', + type: 'at.test.nimbus.integration', + subject: '/integration/1', + data: { key: 'value' }, + correlationid: 'corr-integration', + }); + + await writeEvents([event]); + + const readBack: EventSourcingDBEvent[] = []; + + for await ( + const e of readEvents('/integration/1', { + recursive: false, + }) + ) { + readBack.push(e); + } + + assertEquals(readBack.length, 1); + assertEquals(readBack[0].source, event.source); + assertEquals(readBack[0].type, event.type); + assertEquals(readBack[0].subject, event.subject); + }, + ); + + await t.step( + 'written events can be mapped back to Nimbus events', + async () => { + const original = createEvent({ + source: 'https://nimbus.test', + type: 'at.test.nimbus.roundtrip', + subject: '/roundtrip/1', + data: { message: 'round-trip' }, + correlationid: 'corr-roundtrip', + dataschema: 'https://schema.example.com/v1', + }); + + await writeEvents([original]); + + const readBack: Event[] = []; + + for await ( + const e of readEvents('/roundtrip/1', { + recursive: false, + }) + ) { + readBack.push(eventSourcingDBEventToNimbusEvent(e)); + } + + assertEquals(readBack.length, 1); + + const restored = readBack[0]; + assertEquals(restored.source, original.source); + assertEquals(restored.type, original.type); + assertEquals(restored.subject, original.subject); + assertEquals(restored.data, original.data); + assertEquals( + restored.correlationid, + original.correlationid, + ); + assertEquals(restored.dataschema, original.dataschema); + }, + ); + + await t.step( + 'writeEvents persists multiple events at once', + async () => { + const events = [ + createEvent({ + source: 'https://nimbus.test', + type: 'at.test.nimbus.batch', + subject: '/batch/1', + data: { index: 0 }, + }), + createEvent({ + source: 'https://nimbus.test', + type: 'at.test.nimbus.batch', + subject: '/batch/1', + data: { index: 1 }, + }), + createEvent({ + source: 'https://nimbus.test', + type: 'at.test.nimbus.batch', + subject: '/batch/1', + data: { index: 2 }, + }), + ]; + + await writeEvents(events); + + const readBack: EventSourcingDBEvent[] = []; + + for await ( + const e of readEvents('/batch/1', { + recursive: false, + }) + ) { + readBack.push(e); + } + + assertEquals(readBack.length, 3); + }, + ); + + // ----------------------------------------------------------------- + // initEventObserver + // ----------------------------------------------------------------- + + await t.step( + 'initEventObserver receives events written to the observed subject', + async () => { + // Write events first so they are already stored + await writeEvents([ + createEvent({ + source: 'https://nimbus.test', + type: 'at.test.nimbus.observer', + subject: '/observer/1', + data: { index: 0 }, + }), + createEvent({ + source: 'https://nimbus.test', + type: 'at.test.nimbus.observer', + subject: '/observer/1', + data: { index: 1 }, + }), + ]); + + // Track events delivered to the handler + const received: EventSourcingDBEvent[] = []; + let resolveReceived: () => void; + const allReceived = new Promise((resolve) => { + resolveReceived = resolve; + }); + + // Start observing — the observer runs in the + // background and will pick up the existing events. + initEventObserver({ + subject: '/observer/1', + recursive: false, + eventHandler: (event) => { + received.push(event); + if (received.length >= 2) { + resolveReceived(); + } + }, + retryOptions: { + maxRetries: 1, + initialRetryDelayMs: 100, + }, + }); + + // Wait for the handler to collect both events + // (with a safety timeout) + await Promise.race([ + allReceived, + new Promise((_, reject) => + setTimeout( + () => + reject( + new Error( + 'Observer did not receive events within 5 s', + ), + ), + 5000, + ) + ), + ]); + + assertEquals(received.length, 2); + assertEquals( + received[0].type, + 'at.test.nimbus.observer', + ); + assertEquals( + received[1].type, + 'at.test.nimbus.observer', + ); + }, + ); + } finally { + await container.stop(); + } + }, +}); diff --git a/packages/eventsourcingdb/src/lib/readEvents.ts b/packages/eventsourcingdb/src/lib/readEvents.ts new file mode 100644 index 0000000..4d05414 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/readEvents.ts @@ -0,0 +1,32 @@ +import type { Event, ReadEventsOptions } from 'eventsourcingdb'; +import { getEventSourcingDBClient } from './client.ts'; +import { withAsyncGeneratorSpan } from './tracing.ts'; + +/** + * Reads events from EventSourcingDB for a given subject. + * + * Returns an async generator that yields raw EventSourcingDB + * {@link Event} instances (not Nimbus events). Use + * {@link eventSourcingDBEventToNimbusEvent} to convert them + * into Nimbus events if needed. + * + * @param subject - The subject to read events for. + * @param options - Options to control which events are read. + * @param signal - An optional abort signal to cancel the read. + * @returns An async generator yielding EventSourcingDB events. + */ +export const readEvents = ( + subject: string, + options: ReadEventsOptions, + signal?: AbortSignal, +): AsyncGenerator => { + return withAsyncGeneratorSpan('readEvents', () => { + const eventSourcingDBClient = getEventSourcingDBClient(); + + return eventSourcingDBClient.readEvents( + subject, + options, + signal, + ); + }); +}; diff --git a/packages/eventsourcingdb/src/lib/tracing.test.ts b/packages/eventsourcingdb/src/lib/tracing.test.ts new file mode 100644 index 0000000..b89d036 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/tracing.test.ts @@ -0,0 +1,84 @@ +import { assertEquals, assertRejects } from '@std/assert'; +import { withAsyncGeneratorSpan, withSpan } from './tracing.ts'; + +// --------------------------------------------------------------------------- +// withSpan +// --------------------------------------------------------------------------- + +Deno.test('withSpan returns the result of the wrapped function', async () => { + const result = await withSpan( + 'testOp', + () => Promise.resolve(42), + ); + + assertEquals(result, 42); +}); + +Deno.test('withSpan re-throws errors from the wrapped function', async () => { + await assertRejects( + () => withSpan('testOp', () => Promise.reject(new Error('boom'))), + Error, + 'boom', + ); +}); + +Deno.test('withSpan accepts an optional traceContext without error', async () => { + const result = await withSpan( + 'testOp', + () => Promise.resolve('traced'), + { + traceparent: + '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01', + tracestate: 'vendor=value', + }, + ); + + assertEquals(result, 'traced'); +}); + +// --------------------------------------------------------------------------- +// withAsyncGeneratorSpan +// --------------------------------------------------------------------------- + +async function* threeValues() { + yield 1; + yield 2; + yield 3; +} + +async function* failingAfterFirst(): AsyncGenerator { + yield 1; + throw new Error('generator failed'); +} + +Deno.test('withAsyncGeneratorSpan yields all values from the inner generator', async () => { + const values: number[] = []; + + for await (const value of withAsyncGeneratorSpan('testOp', threeValues)) { + values.push(value); + } + + assertEquals(values, [1, 2, 3]); +}); + +Deno.test('withAsyncGeneratorSpan re-throws errors from the inner generator', async () => { + const values: number[] = []; + + await assertRejects( + async () => { + for await ( + const value of withAsyncGeneratorSpan( + 'testOp', + failingAfterFirst, + ) + ) { + values.push(value); + } + }, + Error, + 'generator failed', + ); + + // The value yielded before the error should still have been received + assertEquals(values, [1]); +}); diff --git a/packages/eventsourcingdb/src/lib/tracing.ts b/packages/eventsourcingdb/src/lib/tracing.ts new file mode 100644 index 0000000..d8292f0 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/tracing.ts @@ -0,0 +1,176 @@ +import { + context as otelContext, + metrics, + propagation, + SpanKind, + SpanStatusCode, + trace, +} from '@opentelemetry/api'; + +export const tracer = trace.getTracer('nimbus'); + +export const DB_SYSTEM = 'eventsourcingdb'; + +const meter = metrics.getMeter('nimbus'); + +const operationCounter = meter.createCounter( + 'eventsourcingdb_operation_total', + { + description: 'Total number of EventSourcingDB operations', + }, +); + +const operationDuration = meter.createHistogram( + 'eventsourcingdb_operation_duration_seconds', + { + description: 'Duration of EventSourcingDB operations in seconds', + unit: 's', + }, +); + +/** + * Trace context extracted from an EventSourcingDB event, used to link + * the processing span to the span that originally wrote the event. + */ +export type TraceContext = { + traceparent: string; + tracestate?: string; +}; + +/** + * Wraps an async function with OpenTelemetry tracing and metrics. + * + * Records: + * - `eventsourcingdb_operation_total` counter with operation and status labels + * - `eventsourcingdb_operation_duration_seconds` histogram with operation label + * + * @param operation - The EventSourcingDB operation name (e.g., 'readEvents', 'writeEvents') + * @param fn - The async function to execute within the span + * @param traceContext - Optional trace context from an EventSourcingDB event to + * continue a distributed trace from the event writer. + * @returns The result of the async function + */ +export const withSpan = ( + operation: string, + fn: () => Promise, + traceContext?: TraceContext, +): Promise => { + const startTime = performance.now(); + const metricLabels = { + operation, + }; + + const parentContext = traceContext + ? propagation.extract(otelContext.active(), traceContext) + : otelContext.active(); + + return tracer.startActiveSpan( + `eventsourcingdb.${operation}`, + { + kind: SpanKind.CLIENT, + attributes: { + 'db.system': DB_SYSTEM, + 'db.operation': operation, + }, + }, + parentContext, + async (span) => { + try { + const result = await fn(); + + // Record success metrics + operationCounter.add(1, { + ...metricLabels, + status: 'success', + }); + operationDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + + return result; + } catch (error) { + // Record error metrics + operationCounter.add(1, { + ...metricLabels, + status: 'error', + }); + operationDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error + ? error.message + : 'Unknown error', + }); + span.recordException( + error instanceof Error ? error : new Error('Unknown error'), + ); + throw error; + } finally { + span.end(); + } + }, + ); +}; + +/** + * Wraps an async generator with OpenTelemetry tracing and metrics. + * + * Records: + * - `eventsourcingdb_operation_total` counter with operation and status labels + * - `eventsourcingdb_operation_duration_seconds` histogram with operation label + * + * @param operation - The EventSourcingDB operation name (e.g., 'readEvents') + * @param fn - The function returning an async generator to execute within the span + * @returns An async generator that yields the same values as the inner generator + */ +export async function* withAsyncGeneratorSpan( + operation: string, + fn: () => AsyncGenerator, +): AsyncGenerator { + const startTime = performance.now(); + const metricLabels = { + operation, + }; + + const span = tracer.startSpan(`eventsourcingdb.${operation}`, { + kind: SpanKind.CLIENT, + attributes: { + 'db.system': DB_SYSTEM, + 'db.operation': operation, + }, + }); + + try { + yield* fn(); + + // Record success metrics + operationCounter.add(1, { ...metricLabels, status: 'success' }); + operationDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + } catch (error) { + // Record error metrics + operationCounter.add(1, { ...metricLabels, status: 'error' }); + operationDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error ? error.message : 'Unknown error', + }); + span.recordException( + error instanceof Error ? error : new Error('Unknown error'), + ); + throw error; + } finally { + span.end(); + } +} diff --git a/packages/eventsourcingdb/src/lib/writeEvents.ts b/packages/eventsourcingdb/src/lib/writeEvents.ts new file mode 100644 index 0000000..fbb7a90 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/writeEvents.ts @@ -0,0 +1,38 @@ +import type { Event } from '@nimbus/core'; +import { context, propagation } from '@opentelemetry/api'; +import type { EventCandidate, Precondition } from 'eventsourcingdb'; +import { getEventSourcingDBClient } from './client.ts'; +import { nimbusEventToEventSourcingDBEventCandidate } from './eventMapping.ts'; +import { withSpan } from './tracing.ts'; + +/** + * Writes one or more Nimbus events to EventSourcingDB. Each event is + * converted into an EventSourcingDB event candidate before being persisted. + * + * @param events - The Nimbus events to write. + * @param preconditions - Optional preconditions that must be met for the write to succeed. + */ +export const writeEvents = ( + events: Event[], + preconditions?: Precondition[], +): Promise => { + return withSpan('writeEvents', async () => { + const eventSourcingDBClient = getEventSourcingDBClient(); + + const carrier: Record = {}; + propagation.inject(context.active(), carrier); + + const eventCandidates: EventCandidate[] = events.map((event) => + nimbusEventToEventSourcingDBEventCandidate( + event, + carrier['traceparent'], + carrier['tracestate'], + ) + ); + + await eventSourcingDBClient.writeEvents( + eventCandidates, + preconditions, + ); + }); +};