From 41ce8a1fc25593648e8ffd61284dff99b46ba8f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Tue, 26 Aug 2025 10:51:58 +0200 Subject: [PATCH 01/37] adds app example with hexagonal architecture --- deno.json | 1 + deno.lock | 39 +++++++-- examples/gustav/deno.json | 36 ++++++++ .../infrastructure/http/auth.middleware.ts | 53 ++++++++++++ .../gustav/src/contexts/meal-planner/.gitkeep | 0 .../gustav/src/contexts/notification/.gitkeep | 0 .../recipe/core/commands/addRecipe.ts | 66 +++++++++++++++ .../contexts/recipe/core/commands/command.md | 6 ++ .../contexts/recipe/core/domain/ingredient.ts | 9 ++ .../src/contexts/recipe/core/domain/model.md | 3 + .../src/contexts/recipe/core/domain/recipe.ts | 10 +++ .../src/contexts/recipe/core/errors/error.md | 3 + .../src/contexts/recipe/core/events/event.md | 3 + .../recipe/core/events/recipeAdded.ts | 11 +++ .../src/contexts/recipe/core/ports/port.md | 21 +++++ .../core/ports/recipeEventOutputPort.ts | 5 ++ .../recipe/core/ports/recipeRepository.ts | 27 ++++++ .../contexts/recipe/core/queries/getRecipe.ts | 20 +++++ .../src/contexts/recipe/core/queries/query.md | 6 ++ .../infrastructure/http/addRecipe.handler.ts | 17 ++++ .../infrastructure/http/getRecipe.handler.ts | 16 ++++ .../infrastructure/http/recipeRouter.ts | 21 +++++ .../repository/recipeMemoryRepository.ts | 84 +++++++++++++++++++ examples/gustav/src/eventBus.ts | 19 +++++ examples/gustav/src/main.ts | 61 ++++++++++++++ examples/gustav/src/mongodb.ts | 33 ++++++++ examples/gustav/src/router.ts | 45 ++++++++++ examples/gustav/src/shared/types/unit.ts | 33 ++++++++ 28 files changed, 639 insertions(+), 9 deletions(-) create mode 100644 examples/gustav/deno.json create mode 100644 examples/gustav/src/contexts/iam/infrastructure/http/auth.middleware.ts create mode 100644 examples/gustav/src/contexts/meal-planner/.gitkeep create mode 100644 examples/gustav/src/contexts/notification/.gitkeep create mode 100644 examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts create mode 100644 examples/gustav/src/contexts/recipe/core/commands/command.md create mode 100644 examples/gustav/src/contexts/recipe/core/domain/ingredient.ts create mode 100644 examples/gustav/src/contexts/recipe/core/domain/model.md create mode 100644 examples/gustav/src/contexts/recipe/core/domain/recipe.ts create mode 100644 examples/gustav/src/contexts/recipe/core/errors/error.md create mode 100644 examples/gustav/src/contexts/recipe/core/events/event.md create mode 100644 examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts create mode 100644 examples/gustav/src/contexts/recipe/core/ports/port.md create mode 100644 examples/gustav/src/contexts/recipe/core/ports/recipeEventOutputPort.ts create mode 100644 examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts create mode 100644 examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts create mode 100644 examples/gustav/src/contexts/recipe/core/queries/query.md create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/addRecipe.handler.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/getRecipe.handler.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/recipeRouter.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts create mode 100644 examples/gustav/src/eventBus.ts create mode 100644 examples/gustav/src/main.ts create mode 100644 examples/gustav/src/mongodb.ts create mode 100644 examples/gustav/src/router.ts create mode 100644 examples/gustav/src/shared/types/unit.ts diff --git a/deno.json b/deno.json index 695a9fe..cdfa951 100644 --- a/deno.json +++ b/deno.json @@ -7,6 +7,7 @@ "./packages/mongodb", "./packages/oak", "./packages/utils", + "./examples/gustav", "./examples/the-expense" ], "nodeModulesDir": "none" diff --git a/deno.lock b/deno.lock index f74a6e7..4d97c66 100644 --- a/deno.lock +++ b/deno.lock @@ -22,6 +22,7 @@ "jsr:@std/ulid@1": "1.0.0", "jsr:@tajpouria/cors@^1.2.1": "1.2.1", "npm:@types/node@*": "22.5.4", + "npm:effect@^3.17.9": "3.17.9", "npm:mongodb@^6.12.0": "6.12.0", "npm:path-to-regexp@6.2.1": "6.2.1", "npm:path-to-regexp@^6.3.0": "6.3.0", @@ -127,6 +128,9 @@ "sparse-bitfield" ] }, + "@standard-schema/spec@1.0.0": { + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==" + }, "@types/node@22.5.4": { "integrity": "sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==", "dependencies": [ @@ -146,6 +150,19 @@ "integrity": "sha512-P92xmHDQjSKPLHqFxefqMxASNq/aWJMEZugpCjf+AF/pgcUpMMQCg7t7+ewko0/u8AapvF3luf/FoehddEK+sA==", "deprecated": true }, + "effect@3.17.9": { + "integrity": "sha512-Nkkn9n1zhy30Dq0MpQatDCH7nfYnOIiebkOHNxmmvoVnEDKCto+2ZwDDWFGzcN/ojwfqjRXWGC9Lo91K5kwZCg==", + "dependencies": [ + "@standard-schema/spec", + "fast-check" + ] + }, + "fast-check@3.23.2": { + "integrity": "sha512-h5+1OzzfCC3Ef7VbtKdcv7zsstUQwUDlYpUTvjeUsJAssPgLn7QzbboPtL5ro04Mq0rPOsMzl7q5hIbRs2wD1A==", + "dependencies": [ + "pure-rand" + ] + }, "memory-pager@1.5.0": { "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" }, @@ -162,15 +179,6 @@ "@mongodb-js/saslprep", "bson", "mongodb-connection-string-url" - ], - "optionalPeers": [ - "@aws-sdk/credential-providers@^3.188.0", - "@mongodb-js/zstd@^1.1.0 || ^2.0.0", - "gcp-metadata@^5.2.0", - "kerberos@^2.0.1", - "mongodb-client-encryption@>=6.0.0 <7", - "snappy@^7.2.2", - "socks@^2.7.1" ] }, "path-to-regexp@6.2.1": { @@ -182,6 +190,9 @@ "punycode@2.3.1": { "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==" }, + "pure-rand@6.1.0": { + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==" + }, "sparse-bitfield@3.0.3": { "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", "dependencies": [ @@ -252,6 +263,16 @@ }, "workspace": { "members": { + "examples/gustav": { + "dependencies": [ + "jsr:@oak/oak@^17.1.4", + "jsr:@std/ulid@1", + "jsr:@tajpouria/cors@^1.2.1", + "npm:effect@^3.17.9", + "npm:mongodb@^6.12.0", + "npm:zod@^3.24.1" + ] + }, "examples/the-expense": { "dependencies": [ "jsr:@oak/oak@^17.1.4", diff --git a/examples/gustav/deno.json b/examples/gustav/deno.json new file mode 100644 index 0000000..781f981 --- /dev/null +++ b/examples/gustav/deno.json @@ -0,0 +1,36 @@ +{ + "tasks": { + "dev": "deno run -A --watch src/main.ts", + "test": "deno test -A" + }, + "lint": { + "include": ["src/"], + "exclude": [], + "rules": { + "tags": ["recommended"], + "include": [], + "exclude": ["no-explicit-any"] + } + }, + "fmt": { + "include": ["src/"], + "exclude": [], + "useTabs": false, + "lineWidth": 80, + "indentWidth": 4, + "semiColons": true, + "singleQuote": true, + "proseWrap": "always" + }, + "test": { + "include": ["src/"] + }, + "imports": { + "@oak/oak": "jsr:@oak/oak@^17.1.4", + "@std/ulid": "jsr:@std/ulid@^1.0.0", + "@tajpouria/cors": "jsr:@tajpouria/cors@^1.2.1", + "mongodb": "npm:mongodb@^6.12.0", + "zod": "npm:zod@^3.24.1", + "effect": "npm:effect@^3.17.9" + } +} diff --git a/examples/gustav/src/contexts/iam/infrastructure/http/auth.middleware.ts b/examples/gustav/src/contexts/iam/infrastructure/http/auth.middleware.ts new file mode 100644 index 0000000..fa8bcb8 --- /dev/null +++ b/examples/gustav/src/contexts/iam/infrastructure/http/auth.middleware.ts @@ -0,0 +1,53 @@ +import { AuthContext, getLogger } from '@nimbus/core'; +import type { Context } from '@oak/oak/context'; +import type { Next } from '@oak/oak/middleware'; + +/** + * ! NOT FOR PRODUCTION USE + * + * This is just a simple example of how to implement a middleware for authentication. + */ +export const exampleAuthMiddleware = async ( + ctx: Context, + next: Next, +) => { + const authorization = ctx.request.headers.get('authorization'); + + if (!authorization) { + const anonymousAuthContext: AuthContext = { + sub: 'anonymous', + groups: [], + }; + + ctx.state.authContext = anonymousAuthContext; + + await next(); + } else { + try { + const token = authorization?.replace('Bearer ', ''); + + if (token === 'very-special-secret') { + const adminAuthContext: AuthContext = { + sub: '02e50464-b051-70fa-25ef-63038890d80c', + groups: ['admin'], + }; + + ctx.state.authContext = adminAuthContext; + } else { + throw new Error('Invalid token'); + } + + await next(); + } catch (error: any) { + getLogger().error({ + message: 'Failed to authenticate user', + error, + }); + + ctx.response.status = 401; + ctx.response.body = { + message: 'Unauthorized', + }; + } + } +}; diff --git a/examples/gustav/src/contexts/meal-planner/.gitkeep b/examples/gustav/src/contexts/meal-planner/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/examples/gustav/src/contexts/notification/.gitkeep b/examples/gustav/src/contexts/notification/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts new file mode 100644 index 0000000..b0e25d5 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts @@ -0,0 +1,66 @@ +import { AuthContext, Command, InvalidInputException } from '@nimbus/core'; +import { ulid } from '@std/ulid'; +import { z } from 'zod'; +import { Unit } from '../../../../shared/types/unit.ts'; +import type { Recipe } from '../domain/recipe.ts'; +import { RecipeAddedEvent } from '../events/recipeAdded.ts'; +import { RecipeEventOutputPort } from '../ports/recipeEventOutputPort.ts'; +import { RecipeRepository } from '../ports/recipeRepository.ts'; + +export const AddRecipeCommand = Command( + z.literal('recipe.add'), + z.object({ + name: z.string(), + instructions: z.array(z.string()), + ingredients: z.array(z.object({ + name: z.string(), + quantity: z.number(), + unit: Unit, + })), + }), + AuthContext, +); +export type AddRecipeCommand = z.infer; + +export const addRecipe = async ( + command: AddRecipeCommand, + repository: RecipeRepository, + eventBus: RecipeEventOutputPort, +): Promise => { + const count = await repository.count({ + filter: { + name: command.data.payload.name, + }, + }); + + if (count > 0) { + throw new InvalidInputException('Recipe already exists', { + errorCode: 'DUPLICATE_RECIPE', + reason: + 'A recipe with this name already exists. The name for each recipe must be unique, please choose a different name.', + }); + } + + const recipe = await repository.insert({ + id: repository.generateId(), + name: command.data.payload.name, + instructions: command.data.payload.instructions, + ingredients: command.data.payload.ingredients, + }); + + eventBus.putEvent({ + specversion: '1.0', + id: ulid(), + source: command.source, + type: 'recipe.added', + data: { + correlationId: command.data.correlationId, + payload: { + id: recipe.id, + name: recipe.name, + }, + }, + }); + + return recipe; +}; diff --git a/examples/gustav/src/contexts/recipe/core/commands/command.md b/examples/gustav/src/contexts/recipe/core/commands/command.md new file mode 100644 index 0000000..9d03304 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/commands/command.md @@ -0,0 +1,6 @@ +./src/contexts//core/commands/.ts + +We define the commands and command processors here. + +In other libraries or frameworks this place might be referred to as use-cases. +But as we want to emphasize the CQRS pattern we intentionally split up the write use-cases which occur through commands and the read use-cases which occur through queries. diff --git a/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts b/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts new file mode 100644 index 0000000..d1f9741 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts @@ -0,0 +1,9 @@ +import { z } from 'zod'; +import { Unit } from '../../../../shared/types/unit.ts'; + +export const Ingredient = z.object({ + name: z.string(), + quantity: z.number(), + unit: Unit, +}); +export type Ingredient = z.infer; diff --git a/examples/gustav/src/contexts/recipe/core/domain/model.md b/examples/gustav/src/contexts/recipe/core/domain/model.md new file mode 100644 index 0000000..4302618 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/domain/model.md @@ -0,0 +1,3 @@ +./src/contexts//core/domain/.ts + +We define the domain specific models and types here. diff --git a/examples/gustav/src/contexts/recipe/core/domain/recipe.ts b/examples/gustav/src/contexts/recipe/core/domain/recipe.ts new file mode 100644 index 0000000..adc1483 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/domain/recipe.ts @@ -0,0 +1,10 @@ +import { z } from 'zod'; +import { Ingredient } from './ingredient.ts'; + +export const Recipe = z.object({ + id: z.string(), + name: z.string(), + instructions: z.array(z.string()), + ingredients: z.array(Ingredient), +}); +export type Recipe = z.infer; diff --git a/examples/gustav/src/contexts/recipe/core/errors/error.md b/examples/gustav/src/contexts/recipe/core/errors/error.md new file mode 100644 index 0000000..e43d0d3 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/errors/error.md @@ -0,0 +1,3 @@ +./src/contexts//core/errors/.ts + +We define domain specific errors that can occur during runtime here. diff --git a/examples/gustav/src/contexts/recipe/core/events/event.md b/examples/gustav/src/contexts/recipe/core/events/event.md new file mode 100644 index 0000000..7778c05 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/events/event.md @@ -0,0 +1,3 @@ +./src/contexts//core/events/.ts + +We define the events here. diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts b/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts new file mode 100644 index 0000000..8f13831 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts @@ -0,0 +1,11 @@ +import { Event } from '@nimbus/core'; +import { z } from 'zod'; + +export const RecipeAddedEvent = Event( + z.literal('recipe.added'), + z.object({ + id: z.string(), + name: z.string(), + }), +); +export type RecipeAddedEvent = z.infer; diff --git a/examples/gustav/src/contexts/recipe/core/ports/port.md b/examples/gustav/src/contexts/recipe/core/ports/port.md new file mode 100644 index 0000000..ac95467 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/ports/port.md @@ -0,0 +1,21 @@ +./src/contexts//core/ports/.ts + +We define specific ports in here which are the interfaces for external systems implemented in the infrastructure layer of the software. The core defines the contract that is needed by the command and query handler to work. + +For example a repository (a thing to store data) might be needed for commands and queries to change data and read data from. +So here in the core we will define a port for it. + +```typescript +export interface RecipeRepository { + insert: (recipe: Recipe) => Recipe; + update: (recipe: Recipe) => Recipe; + delete: (id: string) => void; + getById: (id: string) => Recipe; + listByOwner: ( + ownerId: string, + options?: { readonly offset?: number; readonly limit?: number } + ) => Recipe[]; +} +``` + +No we can use the repository in the command and query handler and know there is an insert method available for us. But we do not care about any implementation details like if it is a PostPostgreSQL, MongoDB or a memory store. diff --git a/examples/gustav/src/contexts/recipe/core/ports/recipeEventOutputPort.ts b/examples/gustav/src/contexts/recipe/core/ports/recipeEventOutputPort.ts new file mode 100644 index 0000000..e2d70b1 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/ports/recipeEventOutputPort.ts @@ -0,0 +1,5 @@ +import { CloudEvent } from '@nimbus/core'; + +export interface RecipeEventOutputPort { + putEvent: >(event: TEvent) => void; +} diff --git a/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts b/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts new file mode 100644 index 0000000..a3ccffa --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts @@ -0,0 +1,27 @@ +import type { Recipe } from '../domain/recipe.ts'; + +export interface RecipeRepository { + generateId: () => string; + + insert: (recipe: Recipe) => Promise; + + update: (recipe: Recipe) => Promise; + + delete: (id: string) => Promise; + + getById: (id: string) => Promise; + + list: ( + options?: { + limit?: number; + offset?: number; + filter?: Partial; + }, + ) => Promise; + + count: ( + options?: { + filter?: Partial; + }, + ) => Promise; +} diff --git a/examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts b/examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts new file mode 100644 index 0000000..c0828ca --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts @@ -0,0 +1,20 @@ +import { AuthContext, Query } from '@nimbus/core'; +import { z } from 'zod'; +import type { Recipe } from '../domain/recipe.ts'; +import { RecipeRepository } from '../ports/recipeRepository.ts'; + +export const GetRecipeQuery = Query( + z.literal('recipe.get'), + z.object({ + id: z.string(), + }), + AuthContext, +); +export type GetRecipeQuery = z.infer; + +export const getRecipe = async ( + query: GetRecipeQuery, + repository: RecipeRepository, +): Promise => { + return await repository.getById(query.data.payload.id); +}; diff --git a/examples/gustav/src/contexts/recipe/core/queries/query.md b/examples/gustav/src/contexts/recipe/core/queries/query.md new file mode 100644 index 0000000..f7cc217 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/queries/query.md @@ -0,0 +1,6 @@ +./src/contexts//core/queries/.ts + +We define the queries and query processors here. + +In other libraries or frameworks this place might be referred to as use-cases. +But as we want to emphasize the CQRS pattern we intentionally split up the write use-cases which occur through commands and the read use-cases which occur through queries. diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/addRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/addRecipe.handler.ts new file mode 100644 index 0000000..2b1bfa5 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/addRecipe.handler.ts @@ -0,0 +1,17 @@ +import { RouteHandler } from '@nimbus/core'; +import { eventBus } from '../../../../eventBus.ts'; +import { addRecipe, AddRecipeCommand } from '../../core/commands/addRecipe.ts'; +import { Recipe } from '../../core/domain/recipe.ts'; +import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; + +export const addRecipeHandler: RouteHandler< + AddRecipeCommand, + Recipe +> = async (command) => { + const result = await addRecipe(command, recipeMemoryRepository, eventBus); + + return { + statusCode: 200, + data: result, + }; +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/getRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/getRecipe.handler.ts new file mode 100644 index 0000000..fa97e7f --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/getRecipe.handler.ts @@ -0,0 +1,16 @@ +import { RouteHandler } from '@nimbus/core'; +import { Recipe } from '../../core/domain/recipe.ts'; +import { getRecipe, GetRecipeQuery } from '../../core/queries/getRecipe.ts'; +import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; + +export const getRecipeHandler: RouteHandler< + GetRecipeQuery, + Recipe +> = async (query) => { + const recipe = await getRecipe(query, recipeMemoryRepository); + + return { + statusCode: 200, + data: recipe, + }; +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/recipeRouter.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/recipeRouter.ts new file mode 100644 index 0000000..378e922 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/recipeRouter.ts @@ -0,0 +1,21 @@ +import { NimbusOakRouter } from '@nimbus/oak'; +import { AddRecipeCommand } from '../../core/commands/addRecipe.ts'; +import { GetRecipeQuery } from '../../core/queries/getRecipe.ts'; +import { addRecipeHandler } from './addRecipe.handler.ts'; +import { getRecipeHandler } from './getRecipe.handler.ts'; + +export const recipeRouter = new NimbusOakRouter(); + +recipeRouter.command( + '/add-recipe', + 'recipe.add', + AddRecipeCommand, + addRecipeHandler, +); + +recipeRouter.query( + '/:id', + 'recipe.get', + GetRecipeQuery, + getRecipeHandler, +); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts b/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts new file mode 100644 index 0000000..0fb70c5 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts @@ -0,0 +1,84 @@ +import { NotFoundException } from '@nimbus/core'; +import { ulid } from '@std/ulid'; +import type { Recipe } from '../../core/domain/recipe.ts'; +import { RecipeRepository } from '../../core/ports/recipeRepository.ts'; + +const makeMemoryRepository = (): RecipeRepository => { + const store = new Map(); + + return { + generateId: () => { + return ulid(); + }, + + insert: async (recipe) => { + store.set(recipe.id, recipe); + return recipe; + }, + + update: async (recipe) => { + const existingRecipe = store.get(recipe.id); + + if (!existingRecipe) { + throw new NotFoundException('Recipe not found', { + errorCode: 'RECIPE_NOT_FOUND', + }); + } + + store.set(recipe.id, recipe); + return recipe; + }, + + delete: async (id) => { + const recipe = store.get(id); + + if (!recipe) { + throw new NotFoundException('Recipe not found', { + errorCode: 'RECIPE_NOT_FOUND', + }); + } + + store.delete(id); + }, + + getById: async (id) => { + const recipe = store.get(id); + + if (!recipe) { + throw new NotFoundException('Recipe not found', { + errorCode: 'RECIPE_NOT_FOUND', + }); + } + + return recipe; + }, + + list: async (options) => { + const limit = options?.limit ?? 10; + const offset = options?.offset ?? 0; + const filter = options?.filter ?? {}; + + return Array.from(store.values()) + .filter((recipe) => { + return Object.entries(filter).every(([key, value]) => { + return recipe[key as keyof Recipe] === value; + }); + }) + .slice(offset, offset + limit); + }, + + count: async (options) => { + const filter = options?.filter ?? {}; + + return Array.from(store.values()) + .filter((recipe) => { + return Object.entries(filter).every(([key, value]) => { + return recipe[key as keyof Recipe] === value; + }); + }) + .length; + }, + }; +}; + +export const recipeMemoryRepository = makeMemoryRepository(); diff --git a/examples/gustav/src/eventBus.ts b/examples/gustav/src/eventBus.ts new file mode 100644 index 0000000..122f9f6 --- /dev/null +++ b/examples/gustav/src/eventBus.ts @@ -0,0 +1,19 @@ +import { NimbusEventBus, RouteHandlerMap } from '@nimbus/core'; + +export const eventBus = new NimbusEventBus({ + maxRetries: 3, +}); + +export const initEventBusSubscriptions = () => { + const subscriptions: Record = {}; + + for (const [, handlerMap] of Object.entries(subscriptions)) { + for (const eventName of Object.keys(handlerMap)) { + eventBus.subscribeEvent( + eventName, + handlerMap[eventName].inputType, + handlerMap[eventName].handler, + ); + } + } +}; diff --git a/examples/gustav/src/main.ts b/examples/gustav/src/main.ts new file mode 100644 index 0000000..b6589f4 --- /dev/null +++ b/examples/gustav/src/main.ts @@ -0,0 +1,61 @@ +import { + jsonLogFormatter, + parseLogLevel, + prettyLogFormatter, + setupLogger, +} from '@nimbus/core'; +import { requestCorrelationId } from '@nimbus/oak'; +import { Application } from '@oak/oak/application'; +import { oakCors } from '@tajpouria/cors'; +import 'jsr:@std/dotenv/load'; +import process from 'node:process'; +import { exampleAuthMiddleware } from './contexts/iam/infrastructure/http/auth.middleware.ts'; +import { initEventBusSubscriptions } from './eventBus.ts'; +import { initMongoConnectionManager } from './mongodb.ts'; +import { router } from './router.ts'; + +// +// Setup logging with basic options provided by Nimbus +// +// See https://nimbus.overlap.at/guide/logging.html for more information about logging of Nimbus. +// +setupLogger({ + logLevel: parseLogLevel(process.env.LOG_LEVEL), + formatter: process.env.LOG_FORMAT === 'pretty' + ? prettyLogFormatter + : jsonLogFormatter, + useConsoleColors: process.env.LOG_FORMAT === 'pretty', +}); + +// Initialize MongoDB Manager +initMongoConnectionManager(); + +// Initialize Event Bus Subscriptions +initEventBusSubscriptions(); + +// Oak HTTP Server APP +const app = new Application(); + +app.addEventListener('listen', ({ hostname, port, secure }) => { + console.log( + `Listening on: ${secure ? 'https://' : 'http://'}${ + hostname ?? 'localhost' + }:${port}`, + ); +}); + +// CORS Middleware +app.use(oakCors()); + +// Correlation ID Middleware +app.use(requestCorrelationId); + +// Auth Middleware +app.use(exampleAuthMiddleware); + +// API Routes +app.use(router.routes()); +app.use(router.allowedMethods()); + +// Get the server started +app.listen({ port: 3100 }); diff --git a/examples/gustav/src/mongodb.ts b/examples/gustav/src/mongodb.ts new file mode 100644 index 0000000..eed76b8 --- /dev/null +++ b/examples/gustav/src/mongodb.ts @@ -0,0 +1,33 @@ +import { MongoConnectionManager } from '@nimbus/mongodb'; +import { ServerApiVersion } from 'mongodb'; +import process from 'node:process'; + +export let mongoManager: MongoConnectionManager; + +export const initMongoConnectionManager = () => { + mongoManager = MongoConnectionManager.getInstance( + process.env['MONGO_URL'] ?? '', + { + connectionTimeout: 1000 * 60 * 5, + mongoClientOptions: { + appName: 'gustav', + serverApi: { + version: ServerApiVersion.v1, + strict: false, + deprecationErrors: true, + }, + maxPoolSize: 10, + minPoolSize: 0, + maxIdleTimeMS: 1000 * 60 * 1, // 1 minutes idle timeout + connectTimeoutMS: 1000 * 15, // 15 seconds connection timeout + socketTimeoutMS: 1000 * 30, // 30 seconds socket timeout + }, + }, + ); + + // Check to see if the MongoDB connection can be cleaned up + // This is to prevent the MongoDB connection from being left open for too long + setInterval(() => { + mongoManager.cleanup().catch(console.error); + }, 1000 * 60); // Check every minute +}; diff --git a/examples/gustav/src/router.ts b/examples/gustav/src/router.ts new file mode 100644 index 0000000..b3121a6 --- /dev/null +++ b/examples/gustav/src/router.ts @@ -0,0 +1,45 @@ +import { getLogger } from '@nimbus/core'; +import { NimbusOakRouter } from '@nimbus/oak'; +import { recipeRouter } from './contexts/recipe/infrastructure/http/recipeRouter.ts'; +import { mongoManager } from './mongodb.ts'; + +export const router = new NimbusOakRouter(); + +router.get('/health', async (ctx) => { + const logger = getLogger(); + const now = new Date().toISOString(); + + const mongoHealth = await mongoManager.healthCheck(); + + logger.info({ + message: 'Health check', + data: { + time: now, + database: { ...mongoHealth }, + ...(ctx.state.correlationId + ? { correlationId: ctx.state.correlationId } + : {}), + ...(ctx.state.authContext + ? { authContext: ctx.state.authContext } + : {}), + }, + }); + + ctx.response.body = { + status: mongoHealth.status === 'healthy' ? 'OK' : 'ERROR', + http: { + status: 'healthy', + }, + database: { ...mongoHealth }, + ...(ctx.state.correlationId + ? { correlationId: ctx.state.correlationId } + : {}), + time: now, + }; +}); + +router.use( + '/recipes', + recipeRouter.routes(), + recipeRouter.allowedMethods(), +); diff --git a/examples/gustav/src/shared/types/unit.ts b/examples/gustav/src/shared/types/unit.ts new file mode 100644 index 0000000..81a03b6 --- /dev/null +++ b/examples/gustav/src/shared/types/unit.ts @@ -0,0 +1,33 @@ +import { z } from 'zod'; + +export const Piece = z.object({ + name: z.literal('Piece'), + short: z.literal('piece'), +}); +export type Piece = z.infer; + +export const TableSpoon = z.object({ + name: z.literal('Table Spoon'), + short: z.literal('EL'), +}); +export type TableSpoon = z.infer; + +export const Gram = z.object({ + name: z.literal('Gram'), + short: z.literal('g'), +}); +export type Gram = z.infer; + +export const Liter = z.object({ + name: z.literal('Liter'), + short: z.literal('l'), +}); +export type Liter = z.infer; + +export const Unit = z.discriminatedUnion('name', [ + Piece, + TableSpoon, + Gram, + Liter, +]); +export type Unit = z.infer; From b6d1f423ce4da77fb14d50d7a152bfff3cd4c1f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Tue, 26 Aug 2025 16:07:11 +0200 Subject: [PATCH 02/37] work in progress to implement event-sourcing and CQRS patterns --- deno.lock | 1 + .../recipe/core/commands/addRecipe.ts | 86 +++++------ .../src/contexts/recipe/core/domain/recipe.ts | 10 +- .../recipe/core/events/recipeAdded.ts | 14 +- ...peEventOutputPort.ts => recipeEventBus.ts} | 2 +- .../recipe/core/ports/recipeEventStore.ts | 10 ++ .../recipe/core/ports/recipeRepository.ts | 2 +- .../infrastructure/http/addRecipe.handler.ts | 8 +- .../messaging/recipeAdded.handler.ts | 17 +++ .../messaging/recipeEventSourcingDBStore.ts | 43 ++++++ .../messaging/recipeEventSubscriptions.ts | 10 ++ .../repository/recipeMemoryRepository.ts | 16 +- examples/gustav/src/eventBus.ts | 5 +- .../adapters/eventSourcingDBEventStore.ts | 140 ++++++++++++++++++ .../gustav/src/shared/ports/eventStore.ts | 40 +++++ packages/core/deno.json | 1 + packages/core/src/lib/event/event.ts | 41 ++++- 17 files changed, 386 insertions(+), 60 deletions(-) rename examples/gustav/src/contexts/recipe/core/ports/{recipeEventOutputPort.ts => recipeEventBus.ts} (75%) create mode 100644 examples/gustav/src/contexts/recipe/core/ports/recipeEventStore.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeAdded.handler.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSourcingDBStore.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSubscriptions.ts create mode 100644 examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts create mode 100644 examples/gustav/src/shared/ports/eventStore.ts diff --git a/deno.lock b/deno.lock index 4d97c66..08606d3 100644 --- a/deno.lock +++ b/deno.lock @@ -286,6 +286,7 @@ "dependencies": [ "jsr:@std/assert@^1.0.10", "jsr:@std/fmt@^1.0.5", + "jsr:@std/ulid@1", "npm:zod@^3.24.1" ] }, diff --git a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts index b0e25d5..47b5bfa 100644 --- a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts @@ -1,66 +1,66 @@ -import { AuthContext, Command, InvalidInputException } from '@nimbus/core'; -import { ulid } from '@std/ulid'; +import { + AuthContext, + Command, + createEvent, + InvalidInputException, +} from '@nimbus/core'; +import { getEnv } from '@nimbus/utils'; import { z } from 'zod'; -import { Unit } from '../../../../shared/types/unit.ts'; -import type { Recipe } from '../domain/recipe.ts'; +import { EventStore } from '../../../../shared/ports/eventStore.ts'; +import { Recipe } from '../domain/recipe.ts'; import { RecipeAddedEvent } from '../events/recipeAdded.ts'; -import { RecipeEventOutputPort } from '../ports/recipeEventOutputPort.ts'; -import { RecipeRepository } from '../ports/recipeRepository.ts'; +import { RecipeEventBus } from '../ports/recipeEventBus.ts'; export const AddRecipeCommand = Command( z.literal('recipe.add'), - z.object({ - name: z.string(), - instructions: z.array(z.string()), - ingredients: z.array(z.object({ - name: z.string(), - quantity: z.number(), - unit: Unit, - })), - }), + Recipe, AuthContext, ); export type AddRecipeCommand = z.infer; export const addRecipe = async ( command: AddRecipeCommand, - repository: RecipeRepository, - eventBus: RecipeEventOutputPort, + eventStore: EventStore, + eventBus: RecipeEventBus, ): Promise => { - const count = await repository.count({ - filter: { - name: command.data.payload.name, - }, + const { EVENT_SOURCE, EVENT_TYPE_PREFIX } = getEnv({ + variables: ['EVENT_SOURCE', 'EVENT_TYPE_PREFIX'], }); - if (count > 0) { + const recipeAddedEvent = createEvent({ + source: EVENT_SOURCE, + subject: `/recipes/${command.data.payload.slug}`, + type: `${EVENT_TYPE_PREFIX}.recipe-added`, + data: command.data.payload, + datacontenttype: 'application/json', + }); + + const replayedEvents = await eventStore.readEvents( + recipeAddedEvent.subject, + ); + + if (replayedEvents.length > 0) { throw new InvalidInputException('Recipe already exists', { errorCode: 'DUPLICATE_RECIPE', reason: - 'A recipe with this name already exists. The name for each recipe must be unique, please choose a different name.', + 'A recipe with this slug already exists. The slug for each recipe must be unique, please choose a different slug.', }); } - const recipe = await repository.insert({ - id: repository.generateId(), - name: command.data.payload.name, - instructions: command.data.payload.instructions, - ingredients: command.data.payload.ingredients, - }); - - eventBus.putEvent({ - specversion: '1.0', - id: ulid(), - source: command.source, - type: 'recipe.added', - data: { - correlationId: command.data.correlationId, - payload: { - id: recipe.id, - name: recipe.name, - }, + const writtenEvents = await eventStore.writeEvents([ + { + source: recipeAddedEvent.source, + subject: recipeAddedEvent.subject, + type: recipeAddedEvent.type, + data: recipeAddedEvent.data, }, - }); + ]); + + console.log('writtenEvents', writtenEvents); + + eventBus.putEvent(recipeAddedEvent); + // TODO: Next work on the readModels and the projectors which update the readModels based on the events. + // On application startup we need to replay all events to rebuild the readModels. - return recipe; + return command.data.payload; }; diff --git a/examples/gustav/src/contexts/recipe/core/domain/recipe.ts b/examples/gustav/src/contexts/recipe/core/domain/recipe.ts index adc1483..333997a 100644 --- a/examples/gustav/src/contexts/recipe/core/domain/recipe.ts +++ b/examples/gustav/src/contexts/recipe/core/domain/recipe.ts @@ -1,8 +1,16 @@ import { z } from 'zod'; import { Ingredient } from './ingredient.ts'; +export const RecipeSlug = z.string() + .min(1) + .regex(/^[a-z0-9-]+$/, { + message: + 'Slug must contain only lowercase letters, numbers, and hyphens', + }); +export type RecipeSlug = z.infer; + export const Recipe = z.object({ - id: z.string(), + slug: RecipeSlug, name: z.string(), instructions: z.array(z.string()), ingredients: z.array(Ingredient), diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts b/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts index 8f13831..eaf1b13 100644 --- a/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts +++ b/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts @@ -1,11 +1,21 @@ -import { Event } from '@nimbus/core'; +import { Event, getLogger } from '@nimbus/core'; import { z } from 'zod'; +import { RecipeSlug } from '../domain/recipe.ts'; export const RecipeAddedEvent = Event( z.literal('recipe.added'), z.object({ - id: z.string(), + slug: RecipeSlug, name: z.string(), }), ); export type RecipeAddedEvent = z.infer; + +export const recipeAdded = ( + event: RecipeAddedEvent, +) => { + getLogger().info({ + message: 'recipeAdded Handler', + data: event, + }); +}; diff --git a/examples/gustav/src/contexts/recipe/core/ports/recipeEventOutputPort.ts b/examples/gustav/src/contexts/recipe/core/ports/recipeEventBus.ts similarity index 75% rename from examples/gustav/src/contexts/recipe/core/ports/recipeEventOutputPort.ts rename to examples/gustav/src/contexts/recipe/core/ports/recipeEventBus.ts index e2d70b1..6a1a062 100644 --- a/examples/gustav/src/contexts/recipe/core/ports/recipeEventOutputPort.ts +++ b/examples/gustav/src/contexts/recipe/core/ports/recipeEventBus.ts @@ -1,5 +1,5 @@ import { CloudEvent } from '@nimbus/core'; -export interface RecipeEventOutputPort { +export interface RecipeEventBus { putEvent: >(event: TEvent) => void; } diff --git a/examples/gustav/src/contexts/recipe/core/ports/recipeEventStore.ts b/examples/gustav/src/contexts/recipe/core/ports/recipeEventStore.ts new file mode 100644 index 0000000..6082c8d --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/ports/recipeEventStore.ts @@ -0,0 +1,10 @@ +type EventSourcingDBEvent = { + source: string; + subject: string; + type: string; + data: any; +}; + +export interface RecipeEventStore { + writeEvents: (events: EventSourcingDBEvent[]) => Promise; +} diff --git a/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts b/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts index a3ccffa..85f85de 100644 --- a/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts +++ b/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts @@ -9,7 +9,7 @@ export interface RecipeRepository { delete: (id: string) => Promise; - getById: (id: string) => Promise; + getBySlug: (id: string) => Promise; list: ( options?: { diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/addRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/addRecipe.handler.ts index 2b1bfa5..8d3dfa4 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/addRecipe.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/addRecipe.handler.ts @@ -1,14 +1,18 @@ import { RouteHandler } from '@nimbus/core'; import { eventBus } from '../../../../eventBus.ts'; +import { eventSourcingDBEventStore } from '../../../../shared/adapters/eventsourcingdbEventStore.ts'; import { addRecipe, AddRecipeCommand } from '../../core/commands/addRecipe.ts'; import { Recipe } from '../../core/domain/recipe.ts'; -import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; export const addRecipeHandler: RouteHandler< AddRecipeCommand, Recipe > = async (command) => { - const result = await addRecipe(command, recipeMemoryRepository, eventBus); + const result = await addRecipe( + command, + eventSourcingDBEventStore, + eventBus, + ); return { statusCode: 200, diff --git a/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeAdded.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeAdded.handler.ts new file mode 100644 index 0000000..6eb0ae7 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeAdded.handler.ts @@ -0,0 +1,17 @@ +import { RouteHandler } from '@nimbus/core'; +import { + recipeAdded, + RecipeAddedEvent, +} from '../../core/events/recipeAdded.ts'; + +export const recipeAddedHandler: RouteHandler< + RecipeAddedEvent, + RecipeAddedEvent +> = async (event) => { + recipeAdded(event); + + return { + statusCode: 200, + data: event, + }; +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSourcingDBStore.ts b/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSourcingDBStore.ts new file mode 100644 index 0000000..7883540 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSourcingDBStore.ts @@ -0,0 +1,43 @@ +import { GenericException } from '@nimbus/core'; +import process from 'node:process'; +import { RecipeEventStore } from '../../core/ports/recipeEventStore.ts'; + +const makeEventSourcingDBStore = (): RecipeEventStore => { + return { + writeEvents: async (events) => { + const payload = JSON.stringify({ + events: events, + }); + + console.log('writeEvents', payload); + + const response = await fetch( + `${process.env.EVENTSOURCINGDB_API}/write-events`, + { + method: 'POST', + headers: { + 'authorization': + `Bearer ${process.env.EVENTSOURCINGDB_SECRET}`, + 'content-type': 'application/json', + }, + body: payload, + }, + ); + + const body = await response.text(); + + if (!response.ok) { + throw new GenericException('Failed to write events', { + status: response.status, + statusText: response.statusText, + url: response.url, + body, + }); + } + + return body; + }, + }; +}; + +export const recipeEventSourcingDBStore = makeEventSourcingDBStore(); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSubscriptions.ts b/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSubscriptions.ts new file mode 100644 index 0000000..0df0306 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSubscriptions.ts @@ -0,0 +1,10 @@ +import { RouteHandlerMap } from '@nimbus/core'; +import { RecipeAddedEvent } from '../../core/events/recipeAdded.ts'; +import { recipeAddedHandler } from './recipeAdded.handler.ts'; + +export const recipeEventSubscriptions: RouteHandlerMap = { + 'recipe.added': { + handler: recipeAddedHandler, + inputType: RecipeAddedEvent, + }, +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts b/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts index 0fb70c5..50e0ca7 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts @@ -12,12 +12,12 @@ const makeMemoryRepository = (): RecipeRepository => { }, insert: async (recipe) => { - store.set(recipe.id, recipe); + store.set(recipe.slug, recipe); return recipe; }, update: async (recipe) => { - const existingRecipe = store.get(recipe.id); + const existingRecipe = store.get(recipe.slug); if (!existingRecipe) { throw new NotFoundException('Recipe not found', { @@ -25,12 +25,12 @@ const makeMemoryRepository = (): RecipeRepository => { }); } - store.set(recipe.id, recipe); + store.set(recipe.slug, recipe); return recipe; }, - delete: async (id) => { - const recipe = store.get(id); + delete: async (slug) => { + const recipe = store.get(slug); if (!recipe) { throw new NotFoundException('Recipe not found', { @@ -38,11 +38,11 @@ const makeMemoryRepository = (): RecipeRepository => { }); } - store.delete(id); + store.delete(slug); }, - getById: async (id) => { - const recipe = store.get(id); + getBySlug: async (slug) => { + const recipe = store.get(slug); if (!recipe) { throw new NotFoundException('Recipe not found', { diff --git a/examples/gustav/src/eventBus.ts b/examples/gustav/src/eventBus.ts index 122f9f6..d3c39df 100644 --- a/examples/gustav/src/eventBus.ts +++ b/examples/gustav/src/eventBus.ts @@ -1,11 +1,14 @@ import { NimbusEventBus, RouteHandlerMap } from '@nimbus/core'; +import { recipeEventSubscriptions } from './contexts/recipe/infrastructure/messaging/recipeEventSubscriptions.ts'; export const eventBus = new NimbusEventBus({ maxRetries: 3, }); export const initEventBusSubscriptions = () => { - const subscriptions: Record = {}; + const subscriptions: Record = { + recipe: recipeEventSubscriptions, + }; for (const [, handlerMap] of Object.entries(subscriptions)) { for (const eventName of Object.keys(handlerMap)) { diff --git a/examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts b/examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts new file mode 100644 index 0000000..274773a --- /dev/null +++ b/examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts @@ -0,0 +1,140 @@ +import { CloudEvent, GenericException } from '@nimbus/core'; +import { getEnv } from '@nimbus/utils'; +import { z } from 'zod'; +import { EventStore, EventStoreReadOptions } from '../ports/eventStore.ts'; + +// TODO: this implementation should be moved to @nimbus/eventsourcingdb + +const defaultReadOptions: EventStoreReadOptions = { + recursive: false, +}; + +const makeEventSourcingDBEventStore = (): EventStore => { + const { EVENTSOURCINGDB_API, EVENTSOURCINGDB_SECRET } = getEnv({ + variables: ['EVENTSOURCINGDB_API', 'EVENTSOURCINGDB_SECRET'], + }); + + return { + writeEvents: async (events) => { + const payload = JSON.stringify({ + events: events, + }); + + const response = await fetch( + `${EVENTSOURCINGDB_API}/write-events`, + { + method: 'POST', + headers: { + 'authorization': `Bearer ${EVENTSOURCINGDB_SECRET}`, + 'content-type': 'application/json', + }, + body: payload, + }, + ); + + const body = await response.text(); + + if (!response.ok) { + throw new GenericException('Failed to write events', { + status: response.status, + statusText: response.statusText, + url: response.url, + body, + }); + } + + if (body.startsWith('[')) { + let items: any[]; + + try { + items = JSON.parse(body); + } catch (error: any) { + throw new GenericException('Failed to parse events', { + reason: error.message, + }); + } + + const writtenEvents = items.map((item: any) => + CloudEvent(z.string(), z.any()).parse(item) + ); + + return writtenEvents; + } else { + throw new GenericException('Failed to parse events', { + reason: 'Response was not an array of events', + }); + } + }, + + readEvents: async (subject, options = defaultReadOptions) => { + console.log('readEvents', subject, options); + + const response = await fetch( + `${EVENTSOURCINGDB_API}/read-events`, + { + method: 'POST', + headers: { + 'authorization': `Bearer ${EVENTSOURCINGDB_SECRET}`, + 'content-type': 'application/json', + }, + body: JSON.stringify({ + subject, + options: { + recursive: options.recursive, + ...(options.order && { order: options.order }), + ...(options.lowerBound && { + lowerBound: options.lowerBound, + }), + ...(options.upperBound && { + upperBound: options.upperBound, + }), + ...(options.fromLatestEvent && { + fromLatestEvent: options.fromLatestEvent, + }), + }, + }), + }, + ); + + const body = await response.text(); + + if (!response.ok) { + throw new GenericException('Failed to read events', { + status: response.status, + statusText: response.statusText, + url: response.url, + body, + }); + } + + let items: any[] = []; + + try { + // We return an empty array if there are no events at all. + if (body.length === 0) { + return []; + } + + // Otherwise we turn the NDJSON response into an array of items. + // https://docs.eventsourcingdb.io/getting-started/reading-events + items = body + .split('\n') + .filter((item) => item.startsWith('{')) + .map((item) => JSON.parse(item)); + } catch (error: any) { + throw new GenericException('Failed to parse events', { + reason: error.message, + }); + } + + // Parse all items to Nimbus objects and ensure type safety. + const events = items.map((item) => + CloudEvent(z.string(), z.any()).parse(item.payload) + ); + + return events; + }, + }; +}; + +export const eventSourcingDBEventStore = makeEventSourcingDBEventStore(); diff --git a/examples/gustav/src/shared/ports/eventStore.ts b/examples/gustav/src/shared/ports/eventStore.ts new file mode 100644 index 0000000..3663f6d --- /dev/null +++ b/examples/gustav/src/shared/ports/eventStore.ts @@ -0,0 +1,40 @@ +import { CloudEvent } from '@nimbus/core'; + +// TODO: this interface should be moved to @nimbus/core + +export type EventStoreWriteEvent = { + source: string; + subject: string; + type: string; + data: any; +}; + +export type EventStoreReadOptions = { + recursive?: boolean; + order?: 'chronological' | 'antichronological'; + lowerBound?: EventStoreBound; + upperBound?: EventStoreBound; + fromLatestEvent?: EventStoreMarker; +}; + +export type EventStoreBound = { + id: number; + type: 'inclusive' | 'exclusive'; +}; + +export type EventStoreMarker = { + subject: string; + type: string; + ifEventIsMissing: 'read-everything' | 'read-nothing'; +}; + +export interface EventStore { + writeEvents: ( + events: EventStoreWriteEvent[], + ) => Promise[]>; + + readEvents: ( + subject: string, + options?: EventStoreReadOptions, + ) => Promise[]>; +} diff --git a/packages/core/deno.json b/packages/core/deno.json index 28ad3dc..139bbe2 100644 --- a/packages/core/deno.json +++ b/packages/core/deno.json @@ -33,6 +33,7 @@ "imports": { "@std/assert": "jsr:@std/assert@^1.0.10", "@std/fmt": "jsr:@std/fmt@^1.0.5", + "@std/ulid": "jsr:@std/ulid@^1.0.0", "zod": "npm:zod@^3.24.1" } } diff --git a/packages/core/src/lib/event/event.ts b/packages/core/src/lib/event/event.ts index 21b5752..ed9f49d 100644 --- a/packages/core/src/lib/event/event.ts +++ b/packages/core/src/lib/event/event.ts @@ -1,3 +1,4 @@ +import { ulid } from '@std/ulid'; import { z, type ZodType } from 'zod'; import { CloudEvent } from '../cloudEvent/cloudEvent.ts'; import { MessageEnvelope } from '../messageEnvelope.ts'; @@ -17,7 +18,9 @@ export const Event = < return CloudEvent( typeType, MessageEnvelope(dataType, z.never()), - ); + ).extend({ + subject: z.string().min(1), + }); }; /** @@ -34,3 +37,39 @@ type EventType< export type Event = z.infer< EventType, ZodType> >; + +/** + * Input type for the createEvent function. + */ +export type CreateEventInput = { + source: string; + type: string; + subject: string; + data: any; + datacontenttype?: string; + dataschema?: string; +}; + +/** + * Create a new event. + */ +export const createEvent = >({ + source, + type, + subject, + data, + datacontenttype, + dataschema, +}: CreateEventInput): TEvent => { + return { + specversion: '1.0', + id: ulid(), + source: source, + type: type, + data: data, + subject: subject, + time: new Date().toISOString(), + ...(datacontenttype && { datacontenttype }), + ...(dataschema && { dataschema }), + } as TEvent; +}; From 235b9c40a4104ced52627acbbf44a3c5168511a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Thu, 4 Sep 2025 18:33:51 +0200 Subject: [PATCH 03/37] reworks router and messages with better validation design --- packages/core/deno.json | 3 +- packages/core/src/index.ts | 25 +- packages/core/src/lib/command/command.ts | 39 --- packages/core/src/lib/event/event.ts | 75 ----- packages/core/src/lib/exception/index.ts | 6 - packages/core/src/lib/log/index.ts | 4 - packages/core/src/lib/message/command.ts | 123 +++++++++ packages/core/src/lib/message/event.ts | 127 +++++++++ packages/core/src/lib/message/message.ts | 21 ++ packages/core/src/lib/message/query.ts | 93 +++++++ packages/core/src/lib/message/validation.ts | 45 +++ packages/core/src/lib/query/query.ts | 39 --- packages/core/src/lib/router/index.ts | 1 - packages/core/src/lib/router/router.test.ts | 276 ++++++++++++------- packages/core/src/lib/router/router.ts | 183 ++++++++---- packages/core/src/lib/router/testCommand.ts | 50 ++-- packages/core/src/lib/router/testEvent.ts | 104 +++++-- packages/core/src/lib/router/testQuery.ts | 35 ++- packages/core/src/lib/validator/validator.ts | 228 +++++++++++++++ 19 files changed, 1082 insertions(+), 395 deletions(-) delete mode 100644 packages/core/src/lib/command/command.ts delete mode 100644 packages/core/src/lib/event/event.ts delete mode 100644 packages/core/src/lib/exception/index.ts delete mode 100644 packages/core/src/lib/log/index.ts create mode 100644 packages/core/src/lib/message/command.ts create mode 100644 packages/core/src/lib/message/event.ts create mode 100644 packages/core/src/lib/message/message.ts create mode 100644 packages/core/src/lib/message/query.ts create mode 100644 packages/core/src/lib/message/validation.ts delete mode 100644 packages/core/src/lib/query/query.ts delete mode 100644 packages/core/src/lib/router/index.ts create mode 100644 packages/core/src/lib/validator/validator.ts diff --git a/packages/core/deno.json b/packages/core/deno.json index 139bbe2..eaaca74 100644 --- a/packages/core/deno.json +++ b/packages/core/deno.json @@ -34,6 +34,7 @@ "@std/assert": "jsr:@std/assert@^1.0.10", "@std/fmt": "jsr:@std/fmt@^1.0.5", "@std/ulid": "jsr:@std/ulid@^1.0.0", - "zod": "npm:zod@^3.24.1" + "ajv": "npm:ajv@^8.17.1", + "ajv-formats": "npm:ajv-formats@^3.0.1" } } diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index cdf9ef0..989ed74 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,10 +1,17 @@ -export * from './lib/authContext.ts'; -export * from './lib/cloudEvent/cloudEvent.ts'; -export * from './lib/command/command.ts'; -export * from './lib/event/event.ts'; export * from './lib/event/eventBus.ts'; -export * from './lib/exception/index.ts'; -export * from './lib/log/index.ts'; -export * from './lib/messageEnvelope.ts'; -export * from './lib/query/query.ts'; -export * from './lib/router/index.ts'; +export * from './lib/exception/exception.ts'; +export * from './lib/exception/forbiddenException.ts'; +export * from './lib/exception/genericException.ts'; +export * from './lib/exception/invalidInputException.ts'; +export * from './lib/exception/notFoundException.ts'; +export * from './lib/exception/unauthorizedException.ts'; +export * from './lib/log/logFormatter.ts'; +export * from './lib/log/logger.ts'; +export * from './lib/log/logLevel.ts'; +export * from './lib/log/options.ts'; +export * from './lib/message/command.ts'; +export * from './lib/message/event.ts'; +export * from './lib/message/message.ts'; +export * from './lib/message/query.ts'; +export * from './lib/router/router.ts'; +export * from './lib/validator/validator.ts'; diff --git a/packages/core/src/lib/command/command.ts b/packages/core/src/lib/command/command.ts deleted file mode 100644 index 4c9f9d7..0000000 --- a/packages/core/src/lib/command/command.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type { z, ZodType } from 'zod'; -import { CloudEvent } from '../cloudEvent/cloudEvent.ts'; -import { MessageEnvelope } from '../messageEnvelope.ts'; - -// TODO: fix slow type issue - -/** - * Zod schema for the Command object. - */ -export const Command = < - TType extends ZodType, - TData extends ZodType, - TAuthContext extends ZodType, ->( - typeType: TType, - dataType: TData, - authContextType: TAuthContext, -) => { - return CloudEvent( - typeType, - MessageEnvelope(dataType, authContextType), - ); -}; - -/** - * Inference type to create the Command type. - */ -type CommandType< - TType extends ZodType, - TData extends ZodType, - TAuthContext extends ZodType, -> = ReturnType>; - -/** - * The type of the Command object. - */ -export type Command = z.infer< - CommandType, ZodType, ZodType> ->; diff --git a/packages/core/src/lib/event/event.ts b/packages/core/src/lib/event/event.ts deleted file mode 100644 index ed9f49d..0000000 --- a/packages/core/src/lib/event/event.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { ulid } from '@std/ulid'; -import { z, type ZodType } from 'zod'; -import { CloudEvent } from '../cloudEvent/cloudEvent.ts'; -import { MessageEnvelope } from '../messageEnvelope.ts'; - -// TODO: fix slow type issue - -/** - * Zod schema for the Event object. - */ -export const Event = < - TType extends ZodType, - TData extends ZodType, ->( - typeType: TType, - dataType: TData, -) => { - return CloudEvent( - typeType, - MessageEnvelope(dataType, z.never()), - ).extend({ - subject: z.string().min(1), - }); -}; - -/** - * Inference type to create the Event type. - */ -type EventType< - TType extends ZodType, - TData extends ZodType, -> = ReturnType>; - -/** - * The type of the Event object. - */ -export type Event = z.infer< - EventType, ZodType> ->; - -/** - * Input type for the createEvent function. - */ -export type CreateEventInput = { - source: string; - type: string; - subject: string; - data: any; - datacontenttype?: string; - dataschema?: string; -}; - -/** - * Create a new event. - */ -export const createEvent = >({ - source, - type, - subject, - data, - datacontenttype, - dataschema, -}: CreateEventInput): TEvent => { - return { - specversion: '1.0', - id: ulid(), - source: source, - type: type, - data: data, - subject: subject, - time: new Date().toISOString(), - ...(datacontenttype && { datacontenttype }), - ...(dataschema && { dataschema }), - } as TEvent; -}; diff --git a/packages/core/src/lib/exception/index.ts b/packages/core/src/lib/exception/index.ts deleted file mode 100644 index 3628465..0000000 --- a/packages/core/src/lib/exception/index.ts +++ /dev/null @@ -1,6 +0,0 @@ -export * from './exception.ts'; -export * from './forbiddenException.ts'; -export * from './genericException.ts'; -export * from './invalidInputException.ts'; -export * from './notFoundException.ts'; -export * from './unauthorizedException.ts'; diff --git a/packages/core/src/lib/log/index.ts b/packages/core/src/lib/log/index.ts deleted file mode 100644 index 79cac34..0000000 --- a/packages/core/src/lib/log/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -export * from './logFormatter.ts'; -export * from './logger.ts'; -export * from './logLevel.ts'; -export * from './options.ts'; diff --git a/packages/core/src/lib/message/command.ts b/packages/core/src/lib/message/command.ts new file mode 100644 index 0000000..78130f8 --- /dev/null +++ b/packages/core/src/lib/message/command.ts @@ -0,0 +1,123 @@ +/** + * A command is a message that is sent to tell the system + * to perform an action. Typically commands come in via an API + * like HTTP POST requests, gRPC calls, or similar inbound traffic. + * + * Nimbus sticks to the CloudEvents specifications for all messages + * to make it easier to work with these messages across multiple systems. + * + * @see https://cloudevents.io/ for more information. + * + * @property {string} specversion - The version of the CloudEvents specification which the event uses. + * @property {string} id - A globally unique identifier of the event. + * @property {string} correlationid - A globally unique identifier that indicates a correlation to previous and subsequent messages. + * @property {string} time - The time when the command was created. + * @property {string} source - A URI reference that identifies the system that is constructing the command. + * @property {string} type - The type must follow the CloudEvents naming convention, which uses a reversed domain name as a namespace, followed by a domain-specific name. + * @property {string} subject - An identifier for an object or entity the command is about (optional). + * @property {TData} data - The actual data, containing the specific business payload. + * @property {string} datacontenttype - A MIME type that indicates the format that the data is in (optional). + * @property {string} dataschema - An absolute URL to the schema that the data adheres to (optional). + * + * @template TData - The type of the data. + * + * @example + * const submitOrderCommand: Command = { + * specversion: '1.0', + * id: '123', + * correlationid: '456', + * time: '2025-01-01T00:00:00Z', + * source: 'https://nimbus.overlap.at', + * type: 'at.overlap.nimbus.submit-order', + * data: { + * customerId: '666', + * cartId: '123', + * }, + * datacontenttype: 'application/json', + * }; + */ +export type Command = { + specversion: '1.0'; + id: string; + correlationid: string; + time: string; + source: string; + type: string; + subject?: string; + data: TData; + datacontenttype?: string; + dataschema?: string; +}; + +export const commandSchema = { + $id: 'https://nimbus.overlap.at/schemas/command/v1', + type: 'object', + required: [ + 'specversion', + 'id', + 'correlationid', + 'time', + 'source', + 'type', + 'data', + ], + properties: { + specversion: { + const: '1.0', + }, + id: { + type: 'string', + minLength: 1, + }, + correlationid: { + type: 'string', + minLength: 1, + }, + time: { + type: 'string', + format: 'date-time', + minLength: 1, + }, + source: { + type: 'string', + format: 'uri-reference', + minLength: 1, + }, + type: { + type: 'string', + minLength: 1, + }, + subject: { + type: 'string', + minLength: 1, + }, + data: { + anyOf: [ + { + type: 'object', + }, + { + type: 'string', + }, + { + type: 'number', + }, + { + type: 'array', + }, + { + type: 'boolean', + }, + ], + }, + datacontenttype: { + type: 'string', + minLength: 1, + }, + dataschema: { + type: 'string', + format: 'uri', + minLength: 1, + }, + }, +}; diff --git a/packages/core/src/lib/message/event.ts b/packages/core/src/lib/message/event.ts new file mode 100644 index 0000000..b7a113c --- /dev/null +++ b/packages/core/src/lib/message/event.ts @@ -0,0 +1,127 @@ +/** + * An event is a message that is emitted by the system to notify + * subscribers that something has happened. Typically events are + * the result of a command that was executed before. + * + * Nimbus sticks to the CloudEvents specifications for all messages + * to make it easier to work with these messages across multiple systems. + * + * @see https://cloudevents.io/ for more information. + * + * @property {string} specversion - The version of the CloudEvents specification which the event uses. + * @property {string} id - A globally unique identifier of the event. + * @property {string} correlationid - A globally unique identifier that indicates a correlation to previous and subsequent messages to this event. + * @property {string} time - The time when the event was created. + * @property {string} source - A URI reference that identifies the system that is constructing the event. + * @property {string} type - The type must follow the CloudEvents naming convention, which uses a reversed domain name as a namespace, followed by a domain-specific name. + * @property {string} subject - An identifier for an object or entity the event is about (optional). + * @property {TData} data - The actual data, containing the specific business payload. + * @property {string} datacontenttype - A MIME type that indicates the format that the data is in (optional). + * @property {string} dataschema - An absolute URL to the schema that the data adheres to (optional). + * + * @template TData - The type of the data. + * + * @example + * const orderSubmittedEvent: Event = { + * specversion: '1.0', + * id: '123', + * correlationid: '456', + * time: '2025-01-01T00:00:00Z', + * source: 'https://nimbus.overlap.at', + * type: 'at.overlap.nimbus.submit-order', + * subject: '/orders/42', + * data: { + * orderId: '42', + * customerId: '666', + * cartId: '123', + * status: 'submitted', + * }, + * datacontenttype: 'application/json', + * }; + */ +export type Event = { + specversion: '1.0'; + id: string; + correlationid: string; + time: string; + source: string; + type: string; + subject: string; + data: TData; + datacontenttype?: string; + dataschema?: string; +}; + +export const eventSchema = { + $id: 'https://nimbus.overlap.at/schemas/event/v1', + type: 'object', + required: [ + 'specversion', + 'id', + 'correlationid', + 'time', + 'source', + 'type', + 'subject', + 'data', + ], + properties: { + specversion: { + const: '1.0', + }, + id: { + type: 'string', + minLength: 1, + }, + correlationid: { + type: 'string', + minLength: 1, + }, + time: { + type: 'string', + format: 'date-time', + minLength: 1, + }, + source: { + type: 'string', + format: 'uri-reference', + minLength: 1, + }, + type: { + type: 'string', + minLength: 1, + }, + subject: { + type: 'string', + minLength: 1, + }, + data: { + anyOf: [ + { + type: 'object', + }, + { + type: 'string', + }, + { + type: 'number', + }, + { + type: 'array', + }, + { + type: 'boolean', + }, + ], + }, + datacontenttype: { + type: 'string', + minLength: 1, + }, + dataschema: { + type: 'string', + format: 'uri', + minLength: 1, + }, + }, +}; diff --git a/packages/core/src/lib/message/message.ts b/packages/core/src/lib/message/message.ts new file mode 100644 index 0000000..0b4eb26 --- /dev/null +++ b/packages/core/src/lib/message/message.ts @@ -0,0 +1,21 @@ +import type { Command } from './command.ts'; +import type { Event } from './event.ts'; +import type { Query } from './query.ts'; + +/** + * A message is a communication object that can be passed between + * systems, modules, functions etc. + * + * In the Nimbus ecosystem it is either a Command, Event or Query. + * + * Nimbus sticks to the CloudEvents specifications for all messages + * to make it easier to work with these messages across multiple systems. + * + * @see https://cloudevents.io/ for more information. + * + * @template TData - The type of the data. + */ +export type Message = + | Command + | Event + | Query; diff --git a/packages/core/src/lib/message/query.ts b/packages/core/src/lib/message/query.ts new file mode 100644 index 0000000..daa730d --- /dev/null +++ b/packages/core/src/lib/message/query.ts @@ -0,0 +1,93 @@ +/** + * A query is a message that is sent to the system to request + * information. + * + * Nimbus sticks to the CloudEvents specifications for all messages + * to make it easier to work with these messages across multiple systems. + * + * @see https://cloudevents.io/ for more information. + * + * @property {string} specversion - The version of the CloudEvents specification which the query uses. + * @property {string} id - A globally unique identifier of the query. + * @property {string} time - The time when the query was created. + * @property {string} source - A URI reference that identifies the system that is constructing the query. + * @property {string} type - The type must follow the CloudEvents naming convention, which uses a reversed domain name as a namespace, followed by a domain-specific name. + * @property {TData} data - The actual data, containing the specific business payload. + * @property {string} datacontenttype - A MIME type that indicates the format that the data is in (optional). + * @property {string} dataschema - An absolute URL to the schema that the data adheres to (optional). + * + * @template TData - The type of the data. + * + * @example + * const getOrdersQuery: Query = { + * specversion: '1.0', + * id: '123', + * time: '2025-01-01T00:00:00Z', + * source: 'https://nimbus.overlap.at', + * type: 'at.overlap.nimbus.get-orders', + * data: { + * customerId: '666', + * status: 'fulfilled', + * }, + * datacontenttype: 'application/json', + * }; + */ +export type Query = { + specversion: '1.0'; + id: string; + time: string; + source: string; + type: string; + data: TData; + datacontenttype?: string; + dataschema?: string; +}; + +export const querySchema = { + $id: 'https://nimbus.overlap.at/schemas/query/v1', + type: 'object', + required: [ + 'specversion', + 'id', + 'time', + 'source', + 'type', + 'data', + ], + properties: { + specversion: { + const: '1.0', + }, + id: { + type: 'string', + minLength: 1, + }, + time: { + type: 'string', + format: 'date-time', + minLength: 1, + }, + source: { + type: 'string', + format: 'uri-reference', + minLength: 1, + }, + type: { + type: 'string', + minLength: 1, + }, + data: { + type: 'object', + additionalProperties: true, + }, + datacontenttype: { + type: 'string', + minLength: 1, + }, + dataschema: { + type: 'string', + format: 'uri', + minLength: 1, + }, + }, +}; diff --git a/packages/core/src/lib/message/validation.ts b/packages/core/src/lib/message/validation.ts new file mode 100644 index 0000000..90110de --- /dev/null +++ b/packages/core/src/lib/message/validation.ts @@ -0,0 +1,45 @@ +import { Ajv, type AnySchema } from 'ajv'; +import { InvalidInputException } from '../exception/invalidInputException.ts'; + +// Some import shenanigans to make it work in here... +import _addFormats from 'ajv-formats'; +const addFormats = _addFormats as unknown as typeof _addFormats.default; + +const ajv = new Ajv(); +addFormats(ajv); + +export type ValidationResult = { + data: TData; + error: undefined; +} | { + data: undefined; + error: InvalidInputException; +}; + +// TODO: find a way to inject a custom AJV instance from outside, or provide a generic interface and functions to register and precompile schemas +// +// Idea: +// Use asyncApi Document for messages, and precompile all schemas for messages +// with a set $id and then reference the precompiled schemas with the +// dataschema property on the messages +// +// https://ajv.js.org/guide/managing-schemas.html#compiling-during-initialization + +export const validate = ( + schema: AnySchema, + data: unknown, +): ValidationResult => { + const validate = ajv.compile(schema); + const isValid = validate(data); + + if (isValid) { + return { data: data as TData, error: undefined }; + } else { + return { + data: undefined, + error: new InvalidInputException('The provided input is invalid', { + issues: validate.errors, + }), + }; + } +}; diff --git a/packages/core/src/lib/query/query.ts b/packages/core/src/lib/query/query.ts deleted file mode 100644 index da124c5..0000000 --- a/packages/core/src/lib/query/query.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type { z, ZodType } from 'zod'; -import { CloudEvent } from '../cloudEvent/cloudEvent.ts'; -import { MessageEnvelope } from '../messageEnvelope.ts'; - -// TODO: fix slow type issue - -/** - * Zod schema for the Query object. - */ -export const Query = < - TType extends ZodType, - TData extends ZodType, - TAuthContext extends ZodType, ->( - typeType: TType, - dataType: TData, - authContextType: TAuthContext, -) => { - return CloudEvent( - typeType, - MessageEnvelope(dataType, authContextType), - ); -}; - -/** - * Inference type to create the Query type. - */ -type QueryType< - TType extends ZodType, - TData extends ZodType, - TAuthContext extends ZodType, -> = ReturnType>; - -/** - * The type of the Query object. - */ -export type Query = z.infer< - QueryType, ZodType, ZodType> ->; diff --git a/packages/core/src/lib/router/index.ts b/packages/core/src/lib/router/index.ts deleted file mode 100644 index 8bb167e..0000000 --- a/packages/core/src/lib/router/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './router.ts'; diff --git a/packages/core/src/lib/router/router.test.ts b/packages/core/src/lib/router/router.test.ts index 30edb78..35fe89b 100644 --- a/packages/core/src/lib/router/router.test.ts +++ b/packages/core/src/lib/router/router.test.ts @@ -1,38 +1,39 @@ +import type { Event, Query } from '@nimbus/core'; import { assertEquals, assertInstanceOf } from '@std/assert'; import { GenericException } from '../exception/genericException.ts'; -import { - InvalidInputException, - NotFoundException, -} from '../exception/index.ts'; +import { InvalidInputException } from '../exception/invalidInputException.ts'; +import { NotFoundException } from '../exception/notFoundException.ts'; +import type { Command } from '../message/command.ts'; +import { getValidator } from '../validator/validator.ts'; import { createRouter } from './router.ts'; -import { commandHandlerMap, type TestCommand } from './testCommand.ts'; -import { eventHandlerMap, type TestEvent } from './testEvent.ts'; -import { queryHandlerMap, type TestQuery } from './testQuery.ts'; +import { commandHandlerMap, testCommand } from './testCommand.ts'; +import { + eventHandlerMap, + testEvent, + testEventSchema, + testEventWithException, + testEventWithInvalidData, +} from './testEvent.ts'; +import { queryHandlerMap, testQuery } from './testQuery.ts'; Deno.test('Router handles input with an unknown handler name', async () => { const router = createRouter({ + type: 'command', handlerMap: {}, }); - const input = { + const messageWithUnknownType: Command = { specversion: '1.0', id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'UNKNOWN_EVENT', - data: { - payload: { - testException: false, - aNumber: 1, - }, - correlationId: '123', - authContext: { - sub: 'admin@host.tld', - }, - }, + correlationid: '456', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.unknown-type', + data: {}, }; try { - const result = await router(input); + const result = await router(messageWithUnknownType); assertEquals(typeof result === 'undefined', true); } catch (exception: any) { assertInstanceOf(exception, NotFoundException); @@ -40,64 +41,141 @@ Deno.test('Router handles input with an unknown handler name', async () => { } }); -Deno.test('Router handles valid command input', async () => { - const commandRouter = createRouter({ - handlerMap: commandHandlerMap, +Deno.test('Router validates command input', async () => { + const router = createRouter({ + type: 'command', + handlerMap: {}, }); - const input: TestCommand = { + const messageWithUnknownType = { specversion: '1.0', id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'test.command', - data: { - payload: { - aNumber: 1, - }, - correlationId: '123', - authContext: { - sub: 'admin@host.tld', - }, - }, + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.unknown-type', + data: {}, + }; + + try { + const result = await router(messageWithUnknownType); + assertEquals(typeof result === 'undefined', true); + } catch (exception: any) { + assertInstanceOf(exception, InvalidInputException); + assertEquals(exception.message, 'The provided input is invalid'); + assertEquals(exception.details, { + issues: [ + { + instancePath: '', + schemaPath: '#/required', + keyword: 'required', + params: { missingProperty: 'correlationid' }, + message: "must have required property 'correlationid'", + }, + ], + }); + } +}); + +Deno.test('Router validates query input', async () => { + const router = createRouter({ + type: 'query', + handlerMap: {}, + }); + + const messageWithUnknownType = { + id: '123', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.unknown-type', + data: {}, + }; + + try { + const result = await router(messageWithUnknownType); + assertEquals(typeof result === 'undefined', true); + } catch (exception: any) { + assertInstanceOf(exception, InvalidInputException); + assertEquals(exception.message, 'The provided input is invalid'); + assertEquals(exception.details, { + issues: [ + { + instancePath: '', + schemaPath: '#/required', + keyword: 'required', + params: { missingProperty: 'specversion' }, + message: "must have required property 'specversion'", + }, + ], + }); + } +}); + +Deno.test('Router validates event input', async () => { + const router = createRouter({ + type: 'event', + handlerMap: {}, + }); + + const messageWithUnknownType = { + id: '123', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.unknown-type', + data: {}, }; try { - const result = await commandRouter(input); + const result = await router(messageWithUnknownType); + assertEquals(typeof result === 'undefined', true); + } catch (exception: any) { + assertInstanceOf(exception, InvalidInputException); + assertEquals(exception.message, 'The provided input is invalid'); + assertEquals(exception.details, { + issues: [ + { + instancePath: '', + schemaPath: '#/required', + keyword: 'required', + params: { missingProperty: 'specversion' }, + message: "must have required property 'specversion'", + }, + ], + }); + } +}); + +Deno.test('Router handles valid command input', async () => { + const commandRouter = createRouter>({ + type: 'command', + handlerMap: commandHandlerMap, + }); + + try { + const result = await commandRouter(testCommand); + assertEquals(result, { statusCode: 200, headers: { 'Content-Type': 'application/json', }, data: { - aNumber: 1, + aNumber: 42, }, }); } catch (exception: any) { + console.log(exception); assertEquals(typeof exception === 'undefined', true); } }); Deno.test('Router handles valid query input', async () => { - const queryRouter = createRouter({ + const queryRouter = createRouter>({ + type: 'query', handlerMap: queryHandlerMap, }); - const input: TestQuery = { - specversion: '1.0', - id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'test.query', - data: { - payload: {}, - correlationId: '123', - authContext: { - sub: 'admin@host.tld', - }, - }, - }; - try { - const result = await queryRouter(input); + const result = await queryRouter(testQuery); assertEquals(result, { statusCode: 200, headers: { @@ -108,28 +186,44 @@ Deno.test('Router handles valid query input', async () => { }, }); } catch (exception: any) { + console.log(exception); assertEquals(typeof exception === 'undefined', true); } }); -Deno.test('Router handles valid event input', async () => { - const eventRouter = createRouter({ +Deno.test('Router handles message with no dataschema correctly', async () => { + const validator = getValidator(); + validator.addSchema(testEventSchema); + + const eventRouter = createRouter>({ + type: 'event', handlerMap: eventHandlerMap, }); - const input: TestEvent = { - specversion: '1.0', - id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'test.event', - data: { - testException: false, - aNumber: 1, - }, - }; + try { + const result = await eventRouter({ + ...testEvent, + dataschema: undefined, + }); + assertEquals(typeof result === 'undefined', true); + } catch (exception: any) { + console.log(exception); + assertInstanceOf(exception, InvalidInputException); + assertEquals(exception.message, 'No dataschema provided for message'); + } +}); + +Deno.test('Router handles valid event input', async () => { + const validator = getValidator(); + validator.addSchema(testEventSchema); + + const eventRouter = createRouter>({ + type: 'event', + handlerMap: eventHandlerMap, + }); try { - const result = await eventRouter(input); + const result = await eventRouter(testEvent); assertEquals(result, { statusCode: 200, headers: { @@ -137,7 +231,7 @@ Deno.test('Router handles valid event input', async () => { }, data: { testException: false, - aNumber: 1, + aNumber: 42, }, }); } catch (exception: any) { @@ -146,23 +240,16 @@ Deno.test('Router handles valid event input', async () => { }); Deno.test('Router handles invalid event input', async () => { - const eventRouter = createRouter({ + const validator = getValidator(); + validator.addSchema(testEventSchema); + + const eventRouter = createRouter>({ + type: 'event', handlerMap: eventHandlerMap, }); - const invalidInput = { - specversion: '1.0', - id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'test.event', - data: { - testException: false, - aNumber: '123', // This should trigger a validation error - }, - }; - try { - const result = await eventRouter(invalidInput); + const result = await eventRouter(testEventWithInvalidData); assertEquals(typeof result === 'undefined', true); } catch (exception: any) { assertInstanceOf(exception, InvalidInputException); @@ -170,14 +257,15 @@ Deno.test('Router handles invalid event input', async () => { exception.message, 'The provided input is invalid', ); + assertEquals(exception.details, { issues: [ { - code: 'invalid_type', - expected: 'number', - received: 'string', - path: ['data', 'aNumber'], - message: 'Expected number, received string', + instancePath: '/data/aNumber', + schemaPath: '#/properties/data/properties/aNumber/type', + keyword: 'type', + params: { type: 'number' }, + message: 'must be number', }, ], }); @@ -185,23 +273,13 @@ Deno.test('Router handles invalid event input', async () => { }); Deno.test('Router handles valid event input but handler returns an exception', async () => { - const eventRouter = createRouter({ + const eventRouter = createRouter>({ + type: 'event', handlerMap: eventHandlerMap, }); - const input: TestEvent = { - specversion: '1.0', - id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'test.event', - data: { - testException: true, - aNumber: 1, - }, - }; - try { - const result = await eventRouter(input); + const result = await eventRouter(testEventWithException); assertEquals(typeof result === 'undefined', true); } catch (exception: any) { assertInstanceOf(exception, GenericException); diff --git a/packages/core/src/lib/router/router.ts b/packages/core/src/lib/router/router.ts index a93a10b..ea6574c 100644 --- a/packages/core/src/lib/router/router.ts +++ b/packages/core/src/lib/router/router.ts @@ -1,64 +1,75 @@ -import { ZodError, type ZodType } from 'zod'; +import { getLogger, InvalidInputException } from '@nimbus/core'; import { GenericException } from '../exception/genericException.ts'; -import { InvalidInputException } from '../exception/invalidInputException.ts'; import { NotFoundException } from '../exception/notFoundException.ts'; +import { type Command, commandSchema } from '../message/command.ts'; +import { type Event, eventSchema } from '../message/event.ts'; +import type { Message } from '../message/message.ts'; +import { type Query, querySchema } from '../message/query.ts'; +import { getValidator } from '../validator/validator.ts'; /** - * The result of a route handler. + * The result type of a route handler. * * @template TData - The type of the data returned by the route handler. */ -export type RouteHandlerResult = { +export type RouteHandlerResult = { statusCode: number; headers?: Record; data?: TData; }; /** - * A route handler. + * The route handler type. * * @template TInput - The type of the input to the route handler. - * @template TResultData - The type of the data returned by the route handler. + * @template TOutputData - The type of the data returned by the route handler. */ -export type RouteHandler = ( +export type RouteHandler = ( input: TInput, -) => Promise>; +) => Promise>; -export type RouteHandlerMap = Record< +/** + * The RouteHandlerMap type. + * + * @template TInput - The type of the input to the route handler. + * @template TOutputData - The type of the data returned by the route handler. + */ +export type RouteHandlerMap = Record< string, { - handler: RouteHandler; - inputType: ZodType; + handler: RouteHandler; + allowUnsafeInput?: boolean; } >; /** - * A Nimbus router. + * The router type. * - * @template TInput - The type of the input to the router. - * @template TResultData - The type of the data returned by the router. + * @template TOutputData - The type of the data returned by the router. */ -export type Router = ( - input: TInput, -) => Promise>; +export type Router = ( + input: unknown, +) => Promise>; /** - * The input for creating a Nimbus router. + * The input type for creating a Nimbus router. * * @template TInput - The type of the input to the router. * @template TResultData - The type of the data returned by the router. */ -export type CreateRouterInput = { - handlerMap: RouteHandlerMap; - inputLogFunc?: (input: TInput) => void; +export type CreateRouterInput = { + type: 'command' | 'query' | 'event'; + handlerMap: RouteHandlerMap; + inputLogFunc?: (input: unknown) => void; }; /** * Creates a Nimbus router. * * @param {CreateRouterInput} input + * @param {'command' | 'query' | 'event'} type - The type of input messages the router handles * @param {RouteHandlerMap} input.handlerMap - The map of route handlers. - * @param {Function} input.inputLogFunc - Optional function to log input received by the router. + * @param {Function} input.inputLogFunc - Function to log input received by the router (optional). * * @returns {Router} The Nimbus router. * @@ -66,43 +77,53 @@ export type CreateRouterInput = { * ```ts * import { createRouter } from "@nimbus/core"; * - * import { getAccountHandler } from "./queries/getAccount.handler.ts"; - * import { GetAccountQuery } from "../core/queries/getAccount.ts"; - * - * import { addAccountHandler } from "./commands/addAccount.handler.ts"; - * import { AddAccountCommand } from "../core/command/addAccount.ts"; - * - * import { accountAddedHandler } from "./events/accountAdded.handler.ts"; - * import { AccountAddedEvent } from "../core/events/accountAdded.ts"; + * const commandRouter = createRouter({ + * type: 'command', + * handlerMap: { + * 'at.overlap.nimbus.get-account': { + * handler: getAccountHandler + * }, + * }, + * }); * - * const accountRouter = createRouter({ + * const queryRouter = createRouter({ + * type: 'query', * handlerMap: { - * GET_ACCOUNT: { + * 'at.overlap.nimbus.get-account': { * handler: getAccountHandler, - * inputType: GetAccountQuery, + * allowUnsafeInput: true, // Disables input validation (not recommended) * }, - * ADD_ACCOUNT: { - * handler: addAccountHandler, - * inputType: AddAccountCommand, - * }, - * ACCOUNT_ADDED: { + * }, + * }); + * + * const eventRouter = createRouter({ + * type: 'event', + * handlerMap: { + * 'at.overlap.nimbus.account-added': { * handler: accountAddedHandler, - * inputType: AccountAddedEvent, * }, * }, + * inputLogFunc: (input) => { + * getLogger().info({ + * category: 'Events', + * message: `Received event: ${input.type}`, + * }); + * }, * }); * ``` */ -export const createRouter = ({ +export const createRouter = ({ + type, handlerMap, inputLogFunc, -}: CreateRouterInput): Router => { +}: CreateRouterInput): Router => { + const validator = getValidator(); + /** - * The Nimbus router. - * - * Takes any input, validates the input and routes it to the appropriate handler. + * The Nimbus router takes unknown input, + * validates the input and routes it to the appropriate handler. * - * @param {any} input - The input to the router. + * @param {unknown} input - The input to the router. * * @returns {Promise} The result of the route handler. * @@ -110,33 +131,83 @@ export const createRouter = ({ * @throws {InvalidInputException} - If the input is invalid. * @throws {GenericException} - If an error occurs while handling the input. */ - const router: Router = (input) => { + const router: Router = ( + input: unknown, + ): Promise => { if (inputLogFunc) { inputLogFunc(input); } - if (!handlerMap[input.type]) { + let parseResult; + if (type === 'command') { + parseResult = validator.validate(commandSchema.$id, input); + } else if (type === 'query') { + parseResult = validator.validate(querySchema.$id, input); + } else if (type === 'event') { + parseResult = validator.validate(eventSchema.$id, input); + } else { + throw new GenericException( + 'Invalid router type', + { + reason: + `The router type must be either "command", "query" or "event"`, + }, + ); + } + + if (parseResult.error) { + throw parseResult.error; + } + + const message = parseResult.data; + + if (!handlerMap[message.type]) { throw new NotFoundException( 'Route handler not found', { - reason: `Could not find a handler for "${input.type}"`, + reason: + `Could not find a handler for message type: "${message.type}"`, }, ); } - const { handler, inputType } = handlerMap[input.type]; + const { handler, allowUnsafeInput } = handlerMap[message.type]; - try { - const validInput = inputType.parse(input); + let validMessage: TInput; + if (message.dataschema) { + const { data, error } = validator.validate( + message.dataschema, + message, + ); - return handler(validInput); - } catch (error) { - if (error instanceof ZodError) { - throw new InvalidInputException().fromZodError(error); + if (error) { + throw error; + } else { + validMessage = data; + } + } else { + if (allowUnsafeInput) { + getLogger().warn({ + category: 'Nimbus', + message: 'No dataschema found for message', + }); } else { - throw new GenericException().fromError(error as Error); + throw new InvalidInputException( + 'No dataschema provided for message', + { + errorCode: 'MISSING_DATASCHEMA', + reason: `The dataschema is missing on the message + and "allowUnsafeInput" is not enabled to the message type. + It is recommended to always provide a dataschema + for input validation. Otherwise set "allowUnsafeInput" + to true for the route handler.`, + }, + ); } + validMessage = message as TInput; } + + return handler(validMessage); }; return router; diff --git a/packages/core/src/lib/router/testCommand.ts b/packages/core/src/lib/router/testCommand.ts index 7ea0bf7..b513b58 100644 --- a/packages/core/src/lib/router/testCommand.ts +++ b/packages/core/src/lib/router/testCommand.ts @@ -1,38 +1,34 @@ -import { z } from 'zod'; -import { Command } from '../command/command.ts'; +import type { Command } from '../message/command.ts'; import type { RouteHandler, RouteHandlerMap } from './router.ts'; /** - * Zod schema for the TestCommandData. + * The type for the testCommand data */ -export const TestCommandData = z.object({ - aNumber: z.number(), -}); - -/** - * The type of the TestCommandData. - */ -export type TestCommandData = z.infer; - -/** - * Zod schema for the TestCommand. - */ -export const TestCommand = Command( - z.literal('test.command'), - TestCommandData, - z.object({}), -); +export type TestCommandData = { + aNumber: number; +}; /** - * The type of the TestCommand. + * A test command */ -export type TestCommand = z.infer; +export const testCommand: Command = { + specversion: '1.0', + id: '123', + correlationid: '456', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.test-command', + data: { + aNumber: 42, + }, + datacontenttype: 'application/json', +}; /** * The handler for the TestCommand. */ export const testCommandHandler: RouteHandler< - TestCommand, + Command, TestCommandData > = (event) => { return Promise.resolve({ @@ -40,16 +36,16 @@ export const testCommandHandler: RouteHandler< headers: { 'Content-Type': 'application/json', }, - data: event.data.payload, + data: event.data, }); }; /** * The handler map for the TestCommand. */ -export const commandHandlerMap: RouteHandlerMap = { - 'test.command': { +export const commandHandlerMap: RouteHandlerMap> = { + 'at.overlap.nimbus.test-command': { handler: testCommandHandler, - inputType: TestCommand, + allowUnsafeInput: true, }, }; diff --git a/packages/core/src/lib/router/testEvent.ts b/packages/core/src/lib/router/testEvent.ts index 55f967f..b980043 100644 --- a/packages/core/src/lib/router/testEvent.ts +++ b/packages/core/src/lib/router/testEvent.ts @@ -1,42 +1,97 @@ -import { z } from 'zod'; -import { CloudEvent } from '../cloudEvent/cloudEvent.ts'; -import { NotFoundException } from '../exception/notFoundException.ts'; +import type { SchemaObject } from 'ajv'; +import { GenericException } from '../exception/genericException.ts'; +import { type Event, eventSchema } from '../message/event.ts'; import type { RouteHandler, RouteHandlerMap } from './router.ts'; -/** - * Zod schema for the TestEventData. - */ -export const TestEventData = z.object({ - testException: z.boolean(), - aNumber: z.number(), -}); +type TestEventData = { + testException: boolean; + aNumber: number; +}; + +export const testEventSchema: SchemaObject = { + ...eventSchema, + $id: 'https://api.nimbus.overlap.at/schemas/event/test/v1', + properties: { + ...eventSchema.properties, + data: { + type: 'object', + required: ['testException', 'aNumber'], + properties: { + testException: { type: 'boolean' }, + aNumber: { type: 'number' }, + }, + }, + dataschema: { + const: 'https://api.nimbus.overlap.at/schemas/event/test/v1', + }, + }, +}; /** - * The type of the TestEventData. + * A test event without an exception */ -export type TestEventData = z.infer; +export const testEvent: Event = { + specversion: '1.0', + id: '123', + correlationid: '456', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.test-event', + subject: '/test', + data: { + testException: false, + aNumber: 42, + }, + datacontenttype: 'application/json', + dataschema: 'https://api.nimbus.overlap.at/schemas/event/test/v1', +}; /** - * Zod schema for the TestEvent. + * A test event without an exception */ -export const TestEvent = CloudEvent( - z.literal('test.event'), - TestEventData, -); +export const testEventWithException: Event = { + specversion: '1.0', + id: '123', + correlationid: '456', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.test-event', + subject: '/test', + data: { + testException: true, + aNumber: 42, + }, + datacontenttype: 'application/json', + dataschema: 'https://api.nimbus.overlap.at/schemas/event/test/v1', +}; /** - * The type of the TestEvent. + * A test event without an exception */ -export type TestEvent = z.infer; +export const testEventWithInvalidData: Event = { + specversion: '1.0', + id: '123', + correlationid: '456', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.test-event', + subject: '/test', + data: { + testException: true, + aNumber: '42', // This should trigger a validation error + }, + datacontenttype: 'application/json', + dataschema: 'https://api.nimbus.overlap.at/schemas/event/test/v1', +}; /** * The handler for the TestEvent. */ -export const testEventHandler: RouteHandler = ( +export const testEventHandler: RouteHandler> = ( event, ) => { - if (event.data.testException) { - throw new NotFoundException(); + if (event.data?.testException) { + throw new GenericException(); } return Promise.resolve({ @@ -51,9 +106,8 @@ export const testEventHandler: RouteHandler = ( /** * The handler map for the TestEvent. */ -export const eventHandlerMap: RouteHandlerMap = { - 'test.event': { +export const eventHandlerMap: RouteHandlerMap> = { + 'at.overlap.nimbus.test-event': { handler: testEventHandler, - inputType: TestEvent, }, }; diff --git a/packages/core/src/lib/router/testQuery.ts b/packages/core/src/lib/router/testQuery.ts index b14809c..104519c 100644 --- a/packages/core/src/lib/router/testQuery.ts +++ b/packages/core/src/lib/router/testQuery.ts @@ -1,25 +1,32 @@ -import { z } from 'zod'; -import { Query } from '../query/query.ts'; +import type { Query } from '../message/query.ts'; import type { RouteHandler, RouteHandlerMap } from './router.ts'; /** - * Zod schema for the TestQuery. + * The type for the testQuery data */ -export const TestQuery = Query( - z.literal('test.query'), - z.object({}), - z.object({}), -); +export type TestQueryData = { + filter: string; +}; /** - * The type of the TestQuery. + * A test query */ -export type TestQuery = z.infer; +export const testQuery: Query = { + specversion: '1.0', + id: '123', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.test-query', + data: { + filter: '42', + }, + datacontenttype: 'application/json', +}; /** * The handler for the TestQuery. */ -export const testQueryHandler: RouteHandler = () => { +export const testQueryHandler: RouteHandler> = () => { return Promise.resolve({ statusCode: 200, headers: { @@ -34,9 +41,9 @@ export const testQueryHandler: RouteHandler = () => { /** * The handler map for the TestQuery. */ -export const queryHandlerMap: RouteHandlerMap = { - 'test.query': { +export const queryHandlerMap: RouteHandlerMap> = { + 'at.overlap.nimbus.test-query': { handler: testQueryHandler, - inputType: TestQuery, + allowUnsafeInput: true, }, }; diff --git a/packages/core/src/lib/validator/validator.ts b/packages/core/src/lib/validator/validator.ts new file mode 100644 index 0000000..1dec137 --- /dev/null +++ b/packages/core/src/lib/validator/validator.ts @@ -0,0 +1,228 @@ +import type { SchemaObject, ValidateFunction } from 'ajv'; +import { Ajv } from 'ajv'; +import { GenericException } from '../exception/genericException.ts'; +import { InvalidInputException } from '../exception/invalidInputException.ts'; +import { commandSchema } from '../message/command.ts'; +import { eventSchema } from '../message/event.ts'; +import { querySchema } from '../message/query.ts'; + +// Some import shenanigans to make it work in here... +import _addFormats from 'ajv-formats'; +const addFormats = _addFormats as unknown as typeof _addFormats.default; + +export type ValidatorOptions = { + ajv?: Ajv; +}; + +export type ValidationResult = { + data: TData; + error: undefined; +} | { + data: undefined; + error: InvalidInputException | GenericException; +}; + +/** + * The Validator is used to validate data against a schema. + * This way e.g. the Nimbus router validates the the route inputs. + * + * Use the `setupValidator` function to configure the Validator. + * It is possible to pass a custom Ajv instance to the Validator if needed. + * + * Use the `getValidator` function to get the instance of the Validator. + * + * Use the validators `addSchema` method to add schemas to the Validator. + * + * Use the validators `validate` method to validate data against a schema. + * Pass in a schemaId to use a precompiled schema added to the validator before + * or pass in a schema object to compile the schema on the fly. + * + * @example + * ```ts + * import { setupValidator, getValidator } from "@nimbus/core"; + * + * setupValidator({ + * ajv: myCustomAjvInstance, + * }); + * + * const validator = getValidator(); + * + * validator.addSchema(mySchema); + * + * const result1 = validator.validate('theSchemaId', myData); + * const result2 = validator.validate(mySchema, myData); + * ``` + */ +export class Validator { + private static _instance: Validator; + + private readonly _ajv: Ajv; + + constructor(options?: ValidatorOptions) { + if (options?.ajv) { + this._ajv = options.ajv; + } else { + this._ajv = new Ajv(); + addFormats(this._ajv); + } + } + + /** + * Configure the Validator. + */ + public static configure(options?: ValidatorOptions): void { + const validator = new Validator(options); + + validator.addSchema(querySchema); + validator.addSchema(eventSchema); + validator.addSchema(commandSchema); + + Validator._instance = validator; + } + + /** + * Get the Validator instance. + * + * @returns {Validator} The Validator instance + */ + public static getInstance(): Validator { + if (!Validator._instance) { + this.configure(); + } + + return Validator._instance; + } + + /** + * Validate data against a schema. + * + * @param {string | SchemaObject} schema - Either a schema id to use a precompiled schema or a schema object. + * @param {unknown} data - The data to validate. + * + * @returns {ValidationResult} The validation result with either the data, an GenericException if the schema is not found, or an InvalidInputException if the data is invalid. + * + * @template TData - The type of the valid data. + * + * @example + * ```ts + * import { getValidator } from "@nimbus/core"; + * + * getValidator().validate( + * 'https://api.nimbus.overlap.at/schemas/command/v1', + * { + * foo: 'bar', + * }, + * ); + * ``` + */ + public validate( + schema: string | SchemaObject, + data: unknown, + ): ValidationResult { + let validateFunc: ValidateFunction; + + if (typeof schema === 'string') { + const func = this._ajv.getSchema(schema); + + if (!func) { + return { + data: undefined, + error: new GenericException('Schema not found', { + reason: `A string was provided for the schema + but no schema with id "${schema}" added to the validator. + Use the addSchema method to add a schema to the validator.`, + }), + }; + } + + validateFunc = func; + } else { + validateFunc = this._ajv.compile(schema); + } + + const isValid = validateFunc(data); + + if (isValid) { + return { data: data as TData, error: undefined }; + } else { + return { + data: undefined, + error: new InvalidInputException( + 'The provided input is invalid', + { + issues: validateFunc.errors, + }, + ), + }; + } + } + + /** + * Add a schema to the validator. + * + * @param {SchemaObject} schema - The JSON Schema to add. + * + * @throws {GenericException} If the schema does not have a $id property. + * + * @example + * ```ts + * import { getValidator } from "@nimbus/core"; + * + * getValidator().addSchema({ + * $id: "https://api.nimbus.overlap.at/schemas/recipe/v1", + * type: "object", + * properties: { + * foo: { + * type: "string", + * }, + * }, + * }); + * ``` + */ + public addSchema(schema: SchemaObject): void { + if (!schema.$id) { + throw new GenericException('Schema must have a $id', { + reason: + 'The schema must have a $id to be added to the validator.', + }); + } + + this._ajv.addSchema(schema, schema.$id); + } +} + +/** + * Configure the Validator. + * + * @param {ValidatorOptions} options - The options for the Validator + * + * @example + * ```ts + * import { + * setupValidator, + * } from "@nimbus/core"; + * + * setupValidator({ + * ajv: myCustomAjvInstance, + * }); + * ``` + */ +export const setupValidator = (options?: ValidatorOptions): void => { + Validator.configure(options); +}; + +/** + * Get the Validator instance. + * + * @returns {Validator} The Validator instance + * + * @example + * ```ts + * import { getValidator } from "@nimbus/core"; + * + * const validator = getValidator(); + * ``` + */ +export const getValidator = (): Validator => { + return Validator.getInstance(); +}; From 60aa55de308b072ca4aa485e0fb24dd03506ccd2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Thu, 4 Sep 2025 18:38:29 +0200 Subject: [PATCH 04/37] adds client package with todo --- deno.json | 1 + packages/client/README.md | 125 +++++++++++++++++++++++++++++++++ packages/client/deno.json | 34 +++++++++ packages/client/src/index.ts | 1 + packages/client/src/lib/sdk.ts | 6 ++ 5 files changed, 167 insertions(+) create mode 100644 packages/client/README.md create mode 100644 packages/client/deno.json create mode 100644 packages/client/src/index.ts create mode 100644 packages/client/src/lib/sdk.ts diff --git a/deno.json b/deno.json index cdfa951..a886b64 100644 --- a/deno.json +++ b/deno.json @@ -4,6 +4,7 @@ }, "workspace": [ "./packages/core", + "./packages/client", "./packages/mongodb", "./packages/oak", "./packages/utils", diff --git a/packages/client/README.md b/packages/client/README.md new file mode 100644 index 0000000..91635a3 --- /dev/null +++ b/packages/client/README.md @@ -0,0 +1,125 @@ +Nimbus + +# Nimbus Core + +The core package of the Nimbus framework. + +Refer to the [Nimbus main repository](https://github.com/overlap-dev/Nimbus) or the [Nimbus documentation](https://nimbus.overlap.at) for more information about the Nimbus framework. + +# Examples + +These are some quick examples on how the basics of the Nimbus framework work. + +For detailed documentation, please refer to the [Nimbus documentation](https://nimbus.overlap.at). + +## Command + +```typescript +import { AuthContext, Command } from "@nimbus/core"; +import { z } from "zod"; + +export const AddAccountData = z.object({ + name: z.string(), +}); +export type AddAccountData = z.infer; + +export const AddAccountCommand = Command( + z.literal("account.add"), + AddAccountData, + AuthContext +); +export type AddAccountCommand = z.infer; +``` + +## Query + +```typescript +import { AuthContext, Query } from "@nimbus/core"; +import { z } from "zod"; + +export const GetAccountQuery = Query( + z.literal("account.get"), + z.object({ + id: z.string().length(24), + }), + AuthContext +); +export type GetAccountQuery = z.infer; +``` + +## Event + +```typescript +import { Event } from "@nimbus/core"; +import { z } from "zod"; +import { Account } from "../account.type.ts"; + +export const AccountAddedData = z.object({ + account: Account, +}); +export type AccountAddedData = z.infer; + +export const AccountAddedEvent = Event( + z.literal("account.added"), + AccountAddedData +); +export type AccountAddedEvent = z.infer; +``` + +## Router + +```typescript +import { createRouter } from "@nimbus/core"; + +// ... + +const accountRouter = createRouter({ + handlerMap: { + "account.get": { + handler: getAccountHandler, + inputType: GetAccountQuery, + }, + "account.add": { + handler: addAccountHandler, + inputType: AddAccountCommand, + }, + }, +}); +``` + +## EventBus + +```typescript +import { NimbusEventBus } from "@nimbus/core"; + +// ... + +export const eventBus = new NimbusEventBus({ + maxRetries: 3, + retryDelay: 3000, +}); + +eventBus.subscribeEvent( + "account.added", + AccountAddedEvent, + accountAddedHandler +); + +eventBus.putEvent({ + specversion: "1.0", + id: "123", + source: command.source, + type: "account.added", + data: { + correlationId: command.metadata.correlationId, + payload: { account: account }, + }, +}); +``` + +# License + +The MIT License (MIT) diff --git a/packages/client/deno.json b/packages/client/deno.json new file mode 100644 index 0000000..5bc4e5e --- /dev/null +++ b/packages/client/deno.json @@ -0,0 +1,34 @@ +{ + "name": "@nimbus/client", + "version": "0.17.0", + "license": "MIT", + "author": "Daniel Gördes (https://overlap.at)", + "repository": { + "type": "git", + "url": "https://github.com/overlap-dev/Nimbus" + }, + "bugs": { + "url": "https://github.com/overlap-dev/Nimbus/issues" + }, + "homepage": "https://nimbus.overlap.at", + "exports": "./src/index.ts", + "fmt": { + "include": ["src/"], + "useTabs": false, + "lineWidth": 80, + "indentWidth": 4, + "semiColons": true, + "singleQuote": true, + "proseWrap": "always" + }, + "lint": { + "include": ["src/"], + "rules": { + "exclude": ["no-explicit-any", "no-slow-types"] + } + }, + "test": { + "include": ["src/"] + }, + "imports": {} +} diff --git a/packages/client/src/index.ts b/packages/client/src/index.ts new file mode 100644 index 0000000..949e442 --- /dev/null +++ b/packages/client/src/index.ts @@ -0,0 +1 @@ +export * from "./lib/sdk.ts"; diff --git a/packages/client/src/lib/sdk.ts b/packages/client/src/lib/sdk.ts new file mode 100644 index 0000000..2f3da34 --- /dev/null +++ b/packages/client/src/lib/sdk.ts @@ -0,0 +1,6 @@ +// TODO: implement some kind of SDK creator +// Support the messages handling like creating commands, events and queries. + +export const sdk = () => { + console.log("To be implemented"); +}; From 3ea4b08301d5f58f36df87170966be149f1ba464 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Thu, 4 Sep 2025 18:50:32 +0200 Subject: [PATCH 05/37] refactors eventBus to work with new router --- packages/core/src/lib/event/eventBus.test.ts | 19 ++-- packages/core/src/lib/event/eventBus.ts | 105 +++++++++++-------- 2 files changed, 69 insertions(+), 55 deletions(-) diff --git a/packages/core/src/lib/event/eventBus.test.ts b/packages/core/src/lib/event/eventBus.test.ts index 2c95fae..ff84121 100644 --- a/packages/core/src/lib/event/eventBus.test.ts +++ b/packages/core/src/lib/event/eventBus.test.ts @@ -1,6 +1,6 @@ -import { assertInstanceOf } from '@std/assert'; +import { assertEquals, assertInstanceOf } from '@std/assert'; import { GenericException } from '../exception/genericException.ts'; -import type { Event } from './event.ts'; +import type { Event } from '../message/event.ts'; import { NimbusEventBus } from './eventBus.ts'; Deno.test('EventBus rejects event that exceeds the 64KB size limit', () => { @@ -8,16 +8,16 @@ Deno.test('EventBus rejects event that exceeds the 64KB size limit', () => { maxRetries: 3, }); - const event: Event = { + const event: Event = { specversion: '1.0', id: '123', - source: 'https://nimbus.overlap.at/api/test', - type: 'oversized.event', + correlationid: '456', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.test-event', + subject: '/test', data: { - correlationId: '123', - payload: { - bigData: 'x'.repeat(65 * 1024), - }, + bigData: 'x'.repeat(65 * 1024), }, }; @@ -29,4 +29,5 @@ Deno.test('EventBus rejects event that exceeds the 64KB size limit', () => { } assertInstanceOf(exception, GenericException); + assertEquals(exception.message, 'Event size exceeds the limit of 64KB'); }); diff --git a/packages/core/src/lib/event/eventBus.ts b/packages/core/src/lib/event/eventBus.ts index 047941d..c98661b 100644 --- a/packages/core/src/lib/event/eventBus.ts +++ b/packages/core/src/lib/event/eventBus.ts @@ -1,14 +1,24 @@ -import { - createRouter, - GenericException, - getLogger, - type RouteHandler, - type Router, -} from '@nimbus/core'; import EventEmitter from 'node:events'; -import type { ZodType } from 'zod'; -import type { CloudEvent } from '../cloudEvent/cloudEvent.ts'; +import { GenericException } from '../exception/genericException.ts'; +import { getLogger } from '../log/logger.ts'; +import type { Event } from '../message/event.ts'; +import type { RouteHandler, Router } from '../router/router.ts'; +import { createRouter } from '../router/router.ts'; +/** + * The input type for subscribing to an event. + */ +export type SubscribeEventInput = { + type: string; + handler: RouteHandler; + allowUnsafeInput?: boolean; + onError?: (error: any, event: Event) => void; + options?: NimbusEventBusOptions; +}; + +/** + * The type for the NimbusEventBus options. + */ export type NimbusEventBusOptions = { maxRetries?: number; retryDelay?: number; @@ -25,21 +35,23 @@ export type NimbusEventBusOptions = { * retryDelay: 3000, * }); * - * eventBus.subscribeEvent( - * 'account.added', - * AccountAddedEvent, - * accountAddedHandler, - * ); + * eventBus.subscribeEvent({ + * type: 'at.overlap.nimbus.account-added', + * handler: accountAddedHandler, + * }); * * eventBus.putEvent({ * specversion: '1.0', * id: '123', - * source: 'https://nimbus.overlap.at/account/add-account', - * type: 'account.added', + * correlationid: '456', + * time: '2025-01-01T00:00:00Z', + * source: 'https://nimbus.overlap.at', + * type: 'at.overlap.nimbus.account-added', * data: { - * correlationId: command.metadata.correlationId, - * payload: { account: account }, + * accountId: '123', * }, + * datacontenttype: 'application/json', + * dataschema: 'https://nimbus.overlap.at/schemas/events/account-added/v1', * }); * ``` */ @@ -80,18 +92,19 @@ export class NimbusEventBus { * eventBus.putEvent({ * specversion: '1.0', * id: '123', - * source: 'https://nimbus.overlap.at/api/account/add', - * type: 'account.added', + * correlationid: '456', + * time: '2025-01-01T00:00:00Z', + * source: 'https://nimbus.overlap.at', + * type: 'at.overlap.nimbus.account-added', * data: { - * correlationId: command.metadata.correlationId, - * payload: { account: account }, + * accountId: '123', * }, + * datacontenttype: 'application/json', + * dataschema: 'https://nimbus.overlap.at/schemas/events/account-added/v1', * }); * ``` */ - public putEvent>( - event: TEvent, - ): void { + public putEvent(event: Event): void { this._validateEventSize(event); this._eventEmitter.emit(event.type, event); @@ -101,7 +114,6 @@ export class NimbusEventBus { * Subscribe to an event. * * @param {string} eventType - The type of event to subscribe to. - * @param {ZodType} eventSchema - The schema used for validation of the event to subscribe to. * @param {RouteHandler} handler - The handler to call when the event got published. * @param {Function} [onError] - The function to call when the event could not be handled after the maximum number of retries. * @param {NimbusEventBusOptions} [options] - The options for the event bus. @@ -110,39 +122,40 @@ export class NimbusEventBus { * * @example * ```ts - * eventBus.subscribeEvent( - * 'account.added', - * AccountAddedEvent, - * accountAddedHandler, - * ); + * eventBus.subscribeEvent({ + * type: 'at.overlap.nimbus.account-added', + * handler: accountAddedHandler, + * allowUnsafeInput: true, // Disables input validation (not recommended) + * }); * ``` */ - public subscribeEvent( - eventType: string, - eventSchema: ZodType, - handler: RouteHandler, - onError?: (error: any, event: CloudEvent) => void, - options?: NimbusEventBusOptions, - ): void { + public subscribeEvent({ + type, + handler, + allowUnsafeInput, + onError, + options, + }: SubscribeEventInput): void { getLogger().info({ category: 'Nimbus', - message: `Subscribed to ${eventType} event`, + message: `Subscribed to ${type} event`, }); const maxRetries = options?.maxRetries ?? this._maxRetries; const retryDelay = options?.retryDelay ?? this._retryDelay; const nimbusRouter = createRouter({ + type: 'event', handlerMap: { - [eventType]: { + [type]: { handler, - inputType: eventSchema, + ...(allowUnsafeInput && { allowUnsafeInput }), }, }, inputLogFunc: this._logInput, }); - const handleEvent = async (event: CloudEvent) => { + const handleEvent = async (event: Event) => { try { await this._processEvent( nimbusRouter, @@ -163,7 +176,7 @@ export class NimbusEventBus { } }; - this._eventEmitter.on(eventType, handleEvent); + this._eventEmitter.on(type, handleEvent); } private _logInput(input: any) { @@ -179,7 +192,7 @@ export class NimbusEventBus { private async _processEvent( nimbusRouter: Router, - event: CloudEvent, + event: Event, maxRetries: number, retryDelay: number, ) { @@ -217,11 +230,11 @@ export class NimbusEventBus { * Validate the size of the event. * * To comply with the CloudEvent spec a transmitted event - * can not have a maximum size of 64KB. + * can only have a maximum size of 64KB. * * @param event - The event to validate. */ - private _validateEventSize(event: CloudEvent): void { + private _validateEventSize(event: Event): void { const eventJson = JSON.stringify(event); const eventSizeBytes = new TextEncoder().encode(eventJson).length; const maxSizeBytes = 64 * 1024; // 64KB From db02abd281531f553c7c6330127d933d34aa9c98 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Sat, 4 Oct 2025 10:17:31 +0200 Subject: [PATCH 06/37] wip: first iteration on routing and messages --- .claudeignore | 5 + TODO.md | 14 ++ deno.lock | 56 +++++- examples/gustav/deno.json | 3 +- .../recipe/core/commands/addRecipe.ts | 47 +++-- .../contexts/recipe/core/domain/ingredient.ts | 13 +- .../src/contexts/recipe/core/domain/recipe.ts | 23 +-- .../recipe/core/events/recipeAdded.ts | 28 +-- .../recipe/core/ports/recipeEventBus.ts | 5 - .../recipe/core/ports/recipeEventStore.ts | 10 - .../contexts/recipe/core/queries/getRecipe.ts | 22 +-- .../infrastructure/http/recipeRouter.ts | 21 -- .../messaging/recipeAdded.handler.ts | 17 -- .../messaging/recipeEventSourcingDBStore.ts | 43 ---- .../messaging/recipeEventSubscriptions.ts | 10 - .../repository/recipeMemoryRepository.ts | 6 + .../{http => router}/addRecipe.handler.ts | 4 +- .../{http => router}/getRecipe.handler.ts | 0 .../router/recipeAdded.handler.ts | 19 ++ examples/gustav/src/eventBus.ts | 22 --- examples/gustav/src/exampleApiSpec.json | 183 ++++++++++++++++++ examples/gustav/src/main.ts | 16 +- examples/gustav/src/router.ts | 45 ----- .../adapters/eventSourcingDBEventStore.ts | 13 +- examples/gustav/src/{ => shared}/mongodb.ts | 0 .../gustav/src/shared/ports/eventStore.ts | 6 +- .../src/shared/routing/commandRouter.ts | 20 ++ .../gustav/src/shared/routing/httpRouter.ts | 154 +++++++++++++++ .../gustav/src/shared/routing/queryRouter.ts | 20 ++ examples/gustav/src/shared/types/unit.ts | 38 +--- packages/core/src/lib/authContext.ts | 20 -- .../core/src/lib/cloudEvent/absoluteUri.ts | 21 -- .../core/src/lib/cloudEvent/cloudEvent.ts | 55 ------ packages/core/src/lib/cloudEvent/mediaType.ts | 55 ------ .../core/src/lib/cloudEvent/timeRFC3339.ts | 40 ---- .../core/src/lib/cloudEvent/uriReference.ts | 24 --- packages/core/src/lib/message/query.ts | 7 + packages/core/src/lib/message/validation.ts | 45 ----- packages/core/src/lib/messageEnvelope.ts | 53 ----- .../core/src/lib/router/asyncApiRouter.ts | 54 ++++++ .../core/src/lib/router/messageHandler.ts | 55 ++++++ packages/core/src/lib/router/router.ts | 17 +- packages/core/src/lib/validator/validator.ts | 2 + packages/oak/deno.json | 1 + .../lib/middleware/requestCorrelationId.ts | 17 -- packages/oak/src/lib/router.ts | 116 ++++++----- 46 files changed, 742 insertions(+), 703 deletions(-) create mode 100644 .claudeignore create mode 100644 TODO.md delete mode 100644 examples/gustav/src/contexts/recipe/core/ports/recipeEventBus.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/ports/recipeEventStore.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/recipeRouter.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeAdded.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSourcingDBStore.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSubscriptions.ts rename examples/gustav/src/contexts/recipe/infrastructure/{http => router}/addRecipe.handler.ts (82%) rename examples/gustav/src/contexts/recipe/infrastructure/{http => router}/getRecipe.handler.ts (100%) create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/router/recipeAdded.handler.ts delete mode 100644 examples/gustav/src/eventBus.ts create mode 100644 examples/gustav/src/exampleApiSpec.json delete mode 100644 examples/gustav/src/router.ts rename examples/gustav/src/{ => shared}/mongodb.ts (100%) create mode 100644 examples/gustav/src/shared/routing/commandRouter.ts create mode 100644 examples/gustav/src/shared/routing/httpRouter.ts create mode 100644 examples/gustav/src/shared/routing/queryRouter.ts delete mode 100644 packages/core/src/lib/authContext.ts delete mode 100644 packages/core/src/lib/cloudEvent/absoluteUri.ts delete mode 100644 packages/core/src/lib/cloudEvent/cloudEvent.ts delete mode 100644 packages/core/src/lib/cloudEvent/mediaType.ts delete mode 100644 packages/core/src/lib/cloudEvent/timeRFC3339.ts delete mode 100644 packages/core/src/lib/cloudEvent/uriReference.ts delete mode 100644 packages/core/src/lib/message/validation.ts delete mode 100644 packages/core/src/lib/messageEnvelope.ts create mode 100644 packages/core/src/lib/router/asyncApiRouter.ts create mode 100644 packages/core/src/lib/router/messageHandler.ts delete mode 100644 packages/oak/src/lib/middleware/requestCorrelationId.ts diff --git a/.claudeignore b/.claudeignore new file mode 100644 index 0000000..cf6b454 --- /dev/null +++ b/.claudeignore @@ -0,0 +1,5 @@ +.env +.env.* +.otel_token +*.pem +*.key diff --git a/TODO.md b/TODO.md new file mode 100644 index 0000000..848cc8a --- /dev/null +++ b/TODO.md @@ -0,0 +1,14 @@ +# Plan and ToDos + +## Vision + +Have a CLI tool to initialize a new Nimbus project. +This sets up the project structure and installs the necessary dependencies. + +The Structure will follow a Domain Driven Design (DDD) approach with a Hexagonal Architecture. + +The Source of truth for the API will be an AsyncAPI specification in JSON format. + +Based on the AsyncAPI specification the Nimbus framework should register the commands, queries and event on the router structure. +Also the router will provide a route for a health check and routes for a schema registry. +The user now only has to implement the business logic and connects it to the route handlers. diff --git a/deno.lock b/deno.lock index 08606d3..eec6298 100644 --- a/deno.lock +++ b/deno.lock @@ -3,12 +3,14 @@ "specifiers": { "jsr:@oak/commons@1": "1.0.0", "jsr:@oak/oak@^17.1.4": "17.1.4", + "jsr:@oak/oak@^17.1.6": "17.1.6", "jsr:@std/assert@1": "1.0.10", "jsr:@std/assert@^1.0.10": "1.0.10", "jsr:@std/bytes@1": "1.0.4", "jsr:@std/bytes@^1.0.2": "1.0.4", "jsr:@std/crypto@1": "1.0.3", "jsr:@std/dotenv@*": "0.225.3", + "jsr:@std/dotenv@~0.225.5": "0.225.5", "jsr:@std/encoding@1": "1.0.6", "jsr:@std/encoding@^1.0.5": "1.0.6", "jsr:@std/fmt@^1.0.4": "1.0.5", @@ -22,6 +24,8 @@ "jsr:@std/ulid@1": "1.0.0", "jsr:@tajpouria/cors@^1.2.1": "1.2.1", "npm:@types/node@*": "22.5.4", + "npm:ajv-formats@^3.0.1": "3.0.1_ajv@8.17.1", + "npm:ajv@^8.17.1": "8.17.1", "npm:effect@^3.17.9": "3.17.9", "npm:mongodb@^6.12.0": "6.12.0", "npm:path-to-regexp@6.2.1": "6.2.1", @@ -66,6 +70,18 @@ "npm:path-to-regexp@^6.3.0" ] }, + "@oak/oak@17.1.6": { + "integrity": "c7eef2eec733fba8e72b679bba3b8cf2fceccf5ef489a8b8fb43571908c0335d", + "dependencies": [ + "jsr:@oak/commons", + "jsr:@std/assert@1", + "jsr:@std/bytes@1", + "jsr:@std/http", + "jsr:@std/media-types", + "jsr:@std/path", + "npm:path-to-regexp@^6.3.0" + ] + }, "@std/assert@1.0.10": { "integrity": "59b5cbac5bd55459a19045d95cc7c2ff787b4f8527c0dd195078ff6f9481fbb3", "dependencies": [ @@ -81,6 +97,9 @@ "@std/dotenv@0.225.3": { "integrity": "a95e5b812c27b0854c52acbae215856d9cce9d4bbf774d938c51d212711e8d4a" }, + "@std/dotenv@0.225.5": { + "integrity": "9ce6f9d0ec3311f74a32535aa1b8c62ed88b1ab91b7f0815797d77a6f60c922f" + }, "@std/encoding@1.0.6": { "integrity": "ca87122c196e8831737d9547acf001766618e78cd8c33920776c7f5885546069" }, @@ -146,6 +165,24 @@ "@types/webidl-conversions" ] }, + "ajv-formats@3.0.1_ajv@8.17.1": { + "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==", + "dependencies": [ + "ajv" + ], + "optionalPeers": [ + "ajv" + ] + }, + "ajv@8.17.1": { + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dependencies": [ + "fast-deep-equal", + "fast-uri", + "json-schema-traverse", + "require-from-string" + ] + }, "bson@6.10.1": { "integrity": "sha512-P92xmHDQjSKPLHqFxefqMxASNq/aWJMEZugpCjf+AF/pgcUpMMQCg7t7+ewko0/u8AapvF3luf/FoehddEK+sA==", "deprecated": true @@ -163,6 +200,15 @@ "pure-rand" ] }, + "fast-deep-equal@3.1.3": { + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "fast-uri@3.1.0": { + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==" + }, + "json-schema-traverse@1.0.0": { + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, "memory-pager@1.5.0": { "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" }, @@ -193,6 +239,9 @@ "pure-rand@6.1.0": { "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==" }, + "require-from-string@2.0.2": { + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" + }, "sparse-bitfield@3.0.3": { "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", "dependencies": [ @@ -265,7 +314,8 @@ "members": { "examples/gustav": { "dependencies": [ - "jsr:@oak/oak@^17.1.4", + "jsr:@oak/oak@^17.1.6", + "jsr:@std/dotenv@~0.225.5", "jsr:@std/ulid@1", "jsr:@tajpouria/cors@^1.2.1", "npm:effect@^3.17.9", @@ -287,7 +337,8 @@ "jsr:@std/assert@^1.0.10", "jsr:@std/fmt@^1.0.5", "jsr:@std/ulid@1", - "npm:zod@^3.24.1" + "npm:ajv-formats@^3.0.1", + "npm:ajv@^8.17.1" ] }, "packages/mongodb": { @@ -302,6 +353,7 @@ "dependencies": [ "jsr:@oak/oak@^17.1.4", "jsr:@std/ulid@1", + "npm:ajv@^8.17.1", "npm:zod@^3.24.1" ] } diff --git a/examples/gustav/deno.json b/examples/gustav/deno.json index 781f981..d06c114 100644 --- a/examples/gustav/deno.json +++ b/examples/gustav/deno.json @@ -26,7 +26,8 @@ "include": ["src/"] }, "imports": { - "@oak/oak": "jsr:@oak/oak@^17.1.4", + "@oak/oak": "jsr:@oak/oak@^17.1.6", + "@std/dotenv": "jsr:@std/dotenv@^0.225.5", "@std/ulid": "jsr:@std/ulid@^1.0.0", "@tajpouria/cors": "jsr:@tajpouria/cors@^1.2.1", "mongodb": "npm:mongodb@^6.12.0", diff --git a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts index 47b5bfa..e1aaabb 100644 --- a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts @@ -1,39 +1,39 @@ -import { - AuthContext, - Command, - createEvent, - InvalidInputException, -} from '@nimbus/core'; +import { Command, InvalidInputException } from '@nimbus/core'; import { getEnv } from '@nimbus/utils'; -import { z } from 'zod'; +import { ulid } from '@std/ulid'; import { EventStore } from '../../../../shared/ports/eventStore.ts'; import { Recipe } from '../domain/recipe.ts'; -import { RecipeAddedEvent } from '../events/recipeAdded.ts'; -import { RecipeEventBus } from '../ports/recipeEventBus.ts'; +import { + RecipeAddedCommandType, + RecipeAddedEvent, +} from '../events/recipeAdded.ts'; -export const AddRecipeCommand = Command( - z.literal('recipe.add'), - Recipe, - AuthContext, -); -export type AddRecipeCommand = z.infer; +export const AddRecipeCommandType = 'at.overlap.nimbus.app-recipe' as const; + +export type AddRecipeCommand = Command & { + type: typeof AddRecipeCommandType; +}; export const addRecipe = async ( command: AddRecipeCommand, eventStore: EventStore, - eventBus: RecipeEventBus, ): Promise => { - const { EVENT_SOURCE, EVENT_TYPE_PREFIX } = getEnv({ + const { EVENT_SOURCE } = getEnv({ variables: ['EVENT_SOURCE', 'EVENT_TYPE_PREFIX'], }); - const recipeAddedEvent = createEvent({ + const recipeAddedEvent: RecipeAddedEvent = { + specversion: '1.0', + id: ulid(), + correlationid: command.correlationid, + time: new Date().toISOString(), source: EVENT_SOURCE, - subject: `/recipes/${command.data.payload.slug}`, - type: `${EVENT_TYPE_PREFIX}.recipe-added`, - data: command.data.payload, + type: RecipeAddedCommandType, + subject: `/recipes/${command.data.slug}`, + data: command.data, datacontenttype: 'application/json', - }); + // TODO: add dataschema + }; const replayedEvents = await eventStore.readEvents( recipeAddedEvent.subject, @@ -58,9 +58,8 @@ export const addRecipe = async ( console.log('writtenEvents', writtenEvents); - eventBus.putEvent(recipeAddedEvent); // TODO: Next work on the readModels and the projectors which update the readModels based on the events. // On application startup we need to replay all events to rebuild the readModels. - return command.data.payload; + return command.data; }; diff --git a/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts b/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts index d1f9741..e4c6806 100644 --- a/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts +++ b/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts @@ -1,9 +1,8 @@ -import { z } from 'zod'; import { Unit } from '../../../../shared/types/unit.ts'; -export const Ingredient = z.object({ - name: z.string(), - quantity: z.number(), - unit: Unit, -}); -export type Ingredient = z.infer; +export type Ingredient = { + name: string; + quantity: number; + unit: Unit; + productId?: string; +}; diff --git a/examples/gustav/src/contexts/recipe/core/domain/recipe.ts b/examples/gustav/src/contexts/recipe/core/domain/recipe.ts index 333997a..02fe95e 100644 --- a/examples/gustav/src/contexts/recipe/core/domain/recipe.ts +++ b/examples/gustav/src/contexts/recipe/core/domain/recipe.ts @@ -1,18 +1,9 @@ -import { z } from 'zod'; import { Ingredient } from './ingredient.ts'; -export const RecipeSlug = z.string() - .min(1) - .regex(/^[a-z0-9-]+$/, { - message: - 'Slug must contain only lowercase letters, numbers, and hyphens', - }); -export type RecipeSlug = z.infer; - -export const Recipe = z.object({ - slug: RecipeSlug, - name: z.string(), - instructions: z.array(z.string()), - ingredients: z.array(Ingredient), -}); -export type Recipe = z.infer; +export type Recipe = { + slug: string; + name: string; + instructions: string[]; + ingredients: Ingredient[]; + tags: string[]; +}; diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts b/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts index eaf1b13..0352045 100644 --- a/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts +++ b/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts @@ -1,21 +1,23 @@ -import { Event, getLogger } from '@nimbus/core'; -import { z } from 'zod'; -import { RecipeSlug } from '../domain/recipe.ts'; +import { type Event, getLogger } from '@nimbus/core'; +import { Recipe } from '../domain/recipe.ts'; +import { RecipeRepository } from '../ports/recipeRepository.ts'; -export const RecipeAddedEvent = Event( - z.literal('recipe.added'), - z.object({ - slug: RecipeSlug, - name: z.string(), - }), -); -export type RecipeAddedEvent = z.infer; +export const RecipeAddedCommandType = 'at.overlap.nimbus.recipe-added' as const; -export const recipeAdded = ( +export type RecipeAddedEvent = Event & { + type: typeof RecipeAddedCommandType; +}; + +export const recipeAdded = async ( event: RecipeAddedEvent, + repository: RecipeRepository, ) => { getLogger().info({ message: 'recipeAdded Handler', - data: event, + data: event.data, }); + + const recipe = await repository.insert(event.data); + + return recipe; }; diff --git a/examples/gustav/src/contexts/recipe/core/ports/recipeEventBus.ts b/examples/gustav/src/contexts/recipe/core/ports/recipeEventBus.ts deleted file mode 100644 index 6a1a062..0000000 --- a/examples/gustav/src/contexts/recipe/core/ports/recipeEventBus.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { CloudEvent } from '@nimbus/core'; - -export interface RecipeEventBus { - putEvent: >(event: TEvent) => void; -} diff --git a/examples/gustav/src/contexts/recipe/core/ports/recipeEventStore.ts b/examples/gustav/src/contexts/recipe/core/ports/recipeEventStore.ts deleted file mode 100644 index 6082c8d..0000000 --- a/examples/gustav/src/contexts/recipe/core/ports/recipeEventStore.ts +++ /dev/null @@ -1,10 +0,0 @@ -type EventSourcingDBEvent = { - source: string; - subject: string; - type: string; - data: any; -}; - -export interface RecipeEventStore { - writeEvents: (events: EventSourcingDBEvent[]) => Promise; -} diff --git a/examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts b/examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts index c0828ca..53aa864 100644 --- a/examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts @@ -1,20 +1,20 @@ -import { AuthContext, Query } from '@nimbus/core'; -import { z } from 'zod'; +import { Query } from '@nimbus/core'; import type { Recipe } from '../domain/recipe.ts'; import { RecipeRepository } from '../ports/recipeRepository.ts'; -export const GetRecipeQuery = Query( - z.literal('recipe.get'), - z.object({ - id: z.string(), - }), - AuthContext, -); -export type GetRecipeQuery = z.infer; +export const GetRecipeQueryType = 'at.overlap.nimbus.get-recipe' as const; + +export type GetRecipeParams = { + slug: string; +}; + +export type GetRecipeQuery = Query & { + type: typeof GetRecipeQueryType; +}; export const getRecipe = async ( query: GetRecipeQuery, repository: RecipeRepository, ): Promise => { - return await repository.getById(query.data.payload.id); + return await repository.getBySlug(query.data.slug); }; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/recipeRouter.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/recipeRouter.ts deleted file mode 100644 index 378e922..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/recipeRouter.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { NimbusOakRouter } from '@nimbus/oak'; -import { AddRecipeCommand } from '../../core/commands/addRecipe.ts'; -import { GetRecipeQuery } from '../../core/queries/getRecipe.ts'; -import { addRecipeHandler } from './addRecipe.handler.ts'; -import { getRecipeHandler } from './getRecipe.handler.ts'; - -export const recipeRouter = new NimbusOakRouter(); - -recipeRouter.command( - '/add-recipe', - 'recipe.add', - AddRecipeCommand, - addRecipeHandler, -); - -recipeRouter.query( - '/:id', - 'recipe.get', - GetRecipeQuery, - getRecipeHandler, -); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeAdded.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeAdded.handler.ts deleted file mode 100644 index 6eb0ae7..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeAdded.handler.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { RouteHandler } from '@nimbus/core'; -import { - recipeAdded, - RecipeAddedEvent, -} from '../../core/events/recipeAdded.ts'; - -export const recipeAddedHandler: RouteHandler< - RecipeAddedEvent, - RecipeAddedEvent -> = async (event) => { - recipeAdded(event); - - return { - statusCode: 200, - data: event, - }; -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSourcingDBStore.ts b/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSourcingDBStore.ts deleted file mode 100644 index 7883540..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSourcingDBStore.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { GenericException } from '@nimbus/core'; -import process from 'node:process'; -import { RecipeEventStore } from '../../core/ports/recipeEventStore.ts'; - -const makeEventSourcingDBStore = (): RecipeEventStore => { - return { - writeEvents: async (events) => { - const payload = JSON.stringify({ - events: events, - }); - - console.log('writeEvents', payload); - - const response = await fetch( - `${process.env.EVENTSOURCINGDB_API}/write-events`, - { - method: 'POST', - headers: { - 'authorization': - `Bearer ${process.env.EVENTSOURCINGDB_SECRET}`, - 'content-type': 'application/json', - }, - body: payload, - }, - ); - - const body = await response.text(); - - if (!response.ok) { - throw new GenericException('Failed to write events', { - status: response.status, - statusText: response.statusText, - url: response.url, - body, - }); - } - - return body; - }, - }; -}; - -export const recipeEventSourcingDBStore = makeEventSourcingDBStore(); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSubscriptions.ts b/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSubscriptions.ts deleted file mode 100644 index 0df0306..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/messaging/recipeEventSubscriptions.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { RouteHandlerMap } from '@nimbus/core'; -import { RecipeAddedEvent } from '../../core/events/recipeAdded.ts'; -import { recipeAddedHandler } from './recipeAdded.handler.ts'; - -export const recipeEventSubscriptions: RouteHandlerMap = { - 'recipe.added': { - handler: recipeAddedHandler, - inputType: RecipeAddedEvent, - }, -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts b/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts index 50e0ca7..c16d789 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts @@ -11,11 +11,13 @@ const makeMemoryRepository = (): RecipeRepository => { return ulid(); }, + // deno-lint-ignore require-await insert: async (recipe) => { store.set(recipe.slug, recipe); return recipe; }, + // deno-lint-ignore require-await update: async (recipe) => { const existingRecipe = store.get(recipe.slug); @@ -29,6 +31,7 @@ const makeMemoryRepository = (): RecipeRepository => { return recipe; }, + // deno-lint-ignore require-await delete: async (slug) => { const recipe = store.get(slug); @@ -41,6 +44,7 @@ const makeMemoryRepository = (): RecipeRepository => { store.delete(slug); }, + // deno-lint-ignore require-await getBySlug: async (slug) => { const recipe = store.get(slug); @@ -53,6 +57,7 @@ const makeMemoryRepository = (): RecipeRepository => { return recipe; }, + // deno-lint-ignore require-await list: async (options) => { const limit = options?.limit ?? 10; const offset = options?.offset ?? 0; @@ -67,6 +72,7 @@ const makeMemoryRepository = (): RecipeRepository => { .slice(offset, offset + limit); }, + // deno-lint-ignore require-await count: async (options) => { const filter = options?.filter ?? {}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/addRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/router/addRecipe.handler.ts similarity index 82% rename from examples/gustav/src/contexts/recipe/infrastructure/http/addRecipe.handler.ts rename to examples/gustav/src/contexts/recipe/infrastructure/router/addRecipe.handler.ts index 8d3dfa4..198845c 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/addRecipe.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/router/addRecipe.handler.ts @@ -1,6 +1,5 @@ import { RouteHandler } from '@nimbus/core'; -import { eventBus } from '../../../../eventBus.ts'; -import { eventSourcingDBEventStore } from '../../../../shared/adapters/eventsourcingdbEventStore.ts'; +import { eventSourcingDBEventStore } from '../../../../shared/adapters/eventSourcingDBEventStore.ts'; import { addRecipe, AddRecipeCommand } from '../../core/commands/addRecipe.ts'; import { Recipe } from '../../core/domain/recipe.ts'; @@ -11,7 +10,6 @@ export const addRecipeHandler: RouteHandler< const result = await addRecipe( command, eventSourcingDBEventStore, - eventBus, ); return { diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/getRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/router/getRecipe.handler.ts similarity index 100% rename from examples/gustav/src/contexts/recipe/infrastructure/http/getRecipe.handler.ts rename to examples/gustav/src/contexts/recipe/infrastructure/router/getRecipe.handler.ts diff --git a/examples/gustav/src/contexts/recipe/infrastructure/router/recipeAdded.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/router/recipeAdded.handler.ts new file mode 100644 index 0000000..1c322be --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/router/recipeAdded.handler.ts @@ -0,0 +1,19 @@ +import { RouteHandler } from '@nimbus/core'; +import { Recipe } from '../../core/domain/recipe.ts'; +import { + recipeAdded, + RecipeAddedEvent, +} from '../../core/events/recipeAdded.ts'; +import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; + +export const recipeAddedHandler: RouteHandler< + RecipeAddedEvent, + Recipe +> = async (query) => { + const recipe = await recipeAdded(query, recipeMemoryRepository); + + return { + statusCode: 200, + data: recipe, + }; +}; diff --git a/examples/gustav/src/eventBus.ts b/examples/gustav/src/eventBus.ts deleted file mode 100644 index d3c39df..0000000 --- a/examples/gustav/src/eventBus.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { NimbusEventBus, RouteHandlerMap } from '@nimbus/core'; -import { recipeEventSubscriptions } from './contexts/recipe/infrastructure/messaging/recipeEventSubscriptions.ts'; - -export const eventBus = new NimbusEventBus({ - maxRetries: 3, -}); - -export const initEventBusSubscriptions = () => { - const subscriptions: Record = { - recipe: recipeEventSubscriptions, - }; - - for (const [, handlerMap] of Object.entries(subscriptions)) { - for (const eventName of Object.keys(handlerMap)) { - eventBus.subscribeEvent( - eventName, - handlerMap[eventName].inputType, - handlerMap[eventName].handler, - ); - } - } -}; diff --git a/examples/gustav/src/exampleApiSpec.json b/examples/gustav/src/exampleApiSpec.json new file mode 100644 index 0000000..b6eb571 --- /dev/null +++ b/examples/gustav/src/exampleApiSpec.json @@ -0,0 +1,183 @@ +{ + "asyncapi": "3.0.0", + "info": { + "title": "Nimbus Example Gustav", + "version": "1.0.0", + "description": "A Nimbus example " + }, + "defaultContentType": "application/json", + "servers": { + "http-api": { + "host": "api.nimbus.overlp.at", + "protocol": "https", + "description": "HTTP Server", + "security": [ + { + "$ref": "#/components/securitySchemes/apiKey" + } + ] + } + }, + "channels": { + "commands": { + "address": "/api/command", + "description": "API to send commands", + "bindings": { + "http": { + "method": "POST" + } + }, + "messages": { + "AddRecipeCommand": { + "$ref": "#/components/messages/AddRecipeCommand" + } + } + }, + "queries": { + "address": "/api/query", + "description": "API to send queries", + "bindings": { + "http": { + "method": "POST" + } + } + } + }, + "operations": { + "command": { + "title": "Command", + "action": "send", + "channel": { + "$ref": "#/channels/commands" + } + }, + "query": { + "title": "Query", + "action": "send", + "channel": { + "$ref": "#/channels/queries" + } + } + }, + "components": { + "messages": { + "AddRecipeCommand": { + "name": "AddRecipeCommand", + "contentType": "application/cloudevents+json", + "payload": { + "$ref": "#/components/schemas/AddRecipeCommand" + } + } + }, + "schemas": { + "CommandSchema": { + "$id": "https://api.nimbus.overlap.at/schemas/command/v1", + "type": "object", + "required": [ + "specversion", + "id", + "correlationid", + "time", + "source", + "type", + "data" + ], + "properties": { + "specversion": { + "const": "1.0" + }, + "id": { + "type": "string", + "minLength": 1 + }, + "correlationid": { + "type": "string", + "minLength": 1 + }, + "time": { + "type": "string", + "format": "date-time", + "minLength": 1 + }, + "source": { + "type": "string", + "format": "uri-reference", + "minLength": 1 + }, + "type": { + "type": "string", + "minLength": 1 + }, + "subject": { + "type": "string", + "minLength": 1 + }, + "data": { + "type": ["object", "string", "number", "array", "boolean"] + }, + "datacontenttype": { + "type": "string", + "minLength": 1 + }, + "dataschema": { + "type": "string", + "format": "uri", + "minLength": 1 + } + } + }, + + "AddRecipeCommand": { + "$id": "https://api.nimbus.overlap.at/schemas/add-recipe-command/v1", + "allOf": [ + { "$ref": "#/components/schemas/CommandSchema" }, + { + "type": "object", + "properties": { + "source": { "const": "https://nimbus.overlap.at" }, + "type": { "const": "at.overlap.nimbus.app-recipe" }, + "data": { "$ref": "#/components/schemas/Recipe" }, + "datacontenttype": { "const": "application/json" }, + "dataschema": { + "const": "https://api.nimbus.overlap.at/schemas/add-recipe-command/v1" + } + } + } + ] + }, + + "Recipe": { + "$id": "https://api.nimbus.overlap.at/schemas/recipe/v1", + "type": "object", + "required": ["slug", "name", "instructions", "ingredients"], + "properties": { + "slug": { + "type": "string" + }, + "name": { + "type": "string" + }, + "instructions": { + "type": "array", + "items": { + "type": "string" + } + }, + "ingredients": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "securitySchemes": { + "apiKey": { + "type": "httpApiKey", + "in": "header", + "name": "X-API-Key" + } + } + } +} diff --git a/examples/gustav/src/main.ts b/examples/gustav/src/main.ts index b6589f4..8a272af 100644 --- a/examples/gustav/src/main.ts +++ b/examples/gustav/src/main.ts @@ -4,15 +4,13 @@ import { prettyLogFormatter, setupLogger, } from '@nimbus/core'; -import { requestCorrelationId } from '@nimbus/oak'; import { Application } from '@oak/oak/application'; +import '@std/dotenv/load'; import { oakCors } from '@tajpouria/cors'; -import 'jsr:@std/dotenv/load'; import process from 'node:process'; import { exampleAuthMiddleware } from './contexts/iam/infrastructure/http/auth.middleware.ts'; -import { initEventBusSubscriptions } from './eventBus.ts'; -import { initMongoConnectionManager } from './mongodb.ts'; -import { router } from './router.ts'; +import { initMongoConnectionManager } from './shared/mongodb.ts'; +import { router } from './shared/routing/httpRouter.ts'; // // Setup logging with basic options provided by Nimbus @@ -30,9 +28,6 @@ setupLogger({ // Initialize MongoDB Manager initMongoConnectionManager(); -// Initialize Event Bus Subscriptions -initEventBusSubscriptions(); - // Oak HTTP Server APP const app = new Application(); @@ -47,9 +42,6 @@ app.addEventListener('listen', ({ hostname, port, secure }) => { // CORS Middleware app.use(oakCors()); -// Correlation ID Middleware -app.use(requestCorrelationId); - // Auth Middleware app.use(exampleAuthMiddleware); @@ -58,4 +50,4 @@ app.use(router.routes()); app.use(router.allowedMethods()); // Get the server started -app.listen({ port: 3100 }); +app.listen({ hostname: '0.0.0.0', port: 3100 }); diff --git a/examples/gustav/src/router.ts b/examples/gustav/src/router.ts deleted file mode 100644 index b3121a6..0000000 --- a/examples/gustav/src/router.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { getLogger } from '@nimbus/core'; -import { NimbusOakRouter } from '@nimbus/oak'; -import { recipeRouter } from './contexts/recipe/infrastructure/http/recipeRouter.ts'; -import { mongoManager } from './mongodb.ts'; - -export const router = new NimbusOakRouter(); - -router.get('/health', async (ctx) => { - const logger = getLogger(); - const now = new Date().toISOString(); - - const mongoHealth = await mongoManager.healthCheck(); - - logger.info({ - message: 'Health check', - data: { - time: now, - database: { ...mongoHealth }, - ...(ctx.state.correlationId - ? { correlationId: ctx.state.correlationId } - : {}), - ...(ctx.state.authContext - ? { authContext: ctx.state.authContext } - : {}), - }, - }); - - ctx.response.body = { - status: mongoHealth.status === 'healthy' ? 'OK' : 'ERROR', - http: { - status: 'healthy', - }, - database: { ...mongoHealth }, - ...(ctx.state.correlationId - ? { correlationId: ctx.state.correlationId } - : {}), - time: now, - }; -}); - -router.use( - '/recipes', - recipeRouter.routes(), - recipeRouter.allowedMethods(), -); diff --git a/examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts b/examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts index 274773a..aa0717e 100644 --- a/examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts +++ b/examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts @@ -1,6 +1,5 @@ -import { CloudEvent, GenericException } from '@nimbus/core'; +import { GenericException } from '@nimbus/core'; import { getEnv } from '@nimbus/utils'; -import { z } from 'zod'; import { EventStore, EventStoreReadOptions } from '../ports/eventStore.ts'; // TODO: this implementation should be moved to @nimbus/eventsourcingdb @@ -54,11 +53,7 @@ const makeEventSourcingDBEventStore = (): EventStore => { }); } - const writtenEvents = items.map((item: any) => - CloudEvent(z.string(), z.any()).parse(item) - ); - - return writtenEvents; + return items; } else { throw new GenericException('Failed to parse events', { reason: 'Response was not an array of events', @@ -128,9 +123,7 @@ const makeEventSourcingDBEventStore = (): EventStore => { } // Parse all items to Nimbus objects and ensure type safety. - const events = items.map((item) => - CloudEvent(z.string(), z.any()).parse(item.payload) - ); + const events = items.map((item) => item.payload); return events; }, diff --git a/examples/gustav/src/mongodb.ts b/examples/gustav/src/shared/mongodb.ts similarity index 100% rename from examples/gustav/src/mongodb.ts rename to examples/gustav/src/shared/mongodb.ts diff --git a/examples/gustav/src/shared/ports/eventStore.ts b/examples/gustav/src/shared/ports/eventStore.ts index 3663f6d..709c440 100644 --- a/examples/gustav/src/shared/ports/eventStore.ts +++ b/examples/gustav/src/shared/ports/eventStore.ts @@ -1,4 +1,4 @@ -import { CloudEvent } from '@nimbus/core'; +import { Event } from '@nimbus/core'; // TODO: this interface should be moved to @nimbus/core @@ -31,10 +31,10 @@ export type EventStoreMarker = { export interface EventStore { writeEvents: ( events: EventStoreWriteEvent[], - ) => Promise[]>; + ) => Promise; readEvents: ( subject: string, options?: EventStoreReadOptions, - ) => Promise[]>; + ) => Promise; } diff --git a/examples/gustav/src/shared/routing/commandRouter.ts b/examples/gustav/src/shared/routing/commandRouter.ts new file mode 100644 index 0000000..7c31862 --- /dev/null +++ b/examples/gustav/src/shared/routing/commandRouter.ts @@ -0,0 +1,20 @@ +import { createRouter } from '@nimbus/core'; +import { AddRecipeCommandType } from '../../contexts/recipe/core/commands/addRecipe.ts'; +import { addRecipeHandler } from '../../contexts/recipe/infrastructure/router/addRecipe.handler.ts'; + +// TODO: We want to rework the NimbusRouter to be a class and we create an instance +// Then we add a registerCommand method that handles schema registration on the validator and we get rid of the handlerMap this way. +// We want to implement a default inputLogFunc that logs the input to the console. +// If one wants to omit input logs one can pass a function that does nothing. + +// Later on we need some helper for bridging the NimbusRouter to an Oak Route. Or other frameworks Routers. + +export const commandRouter = createRouter({ + type: 'command', + handlerMap: { + [AddRecipeCommandType]: { + handler: addRecipeHandler, + allowUnsafeInput: true, + }, + }, +}); diff --git a/examples/gustav/src/shared/routing/httpRouter.ts b/examples/gustav/src/shared/routing/httpRouter.ts new file mode 100644 index 0000000..e593dc1 --- /dev/null +++ b/examples/gustav/src/shared/routing/httpRouter.ts @@ -0,0 +1,154 @@ +import { getLogger, Query, RouteHandlerResult } from '@nimbus/core'; +import { Context } from '@oak/oak/context'; +import { Router as OakRouter } from '@oak/oak/router'; +import { mongoManager } from '../mongodb.ts'; +import { commandRouter } from './commandRouter.ts'; +import { queryRouter } from './queryRouter.ts'; + +export const router = new OakRouter(); + +router.get('/health', async (ctx) => { + const logger = getLogger(); + const now = new Date().toISOString(); + + const mongoHealth = await mongoManager.healthCheck(); + + logger.info({ + message: 'Health check', + data: { + time: now, + database: { ...mongoHealth }, + ...(ctx.state.correlationId + ? { correlationId: ctx.state.correlationId } + : {}), + ...(ctx.state.authContext + ? { authContext: ctx.state.authContext } + : {}), + }, + }); + + ctx.response.body = { + status: mongoHealth.status === 'healthy' ? 'OK' : 'ERROR', + http: { + status: 'healthy', + }, + database: { ...mongoHealth }, + ...(ctx.state.correlationId + ? { correlationId: ctx.state.correlationId } + : {}), + time: now, + }; +}); + +router.post('/command', async (ctx) => { + try { + const requestBody = await ctx.request.body.json(); + + // TODO: How do we implement the authentication context? + // data: { + // ...(ctx.state.authContext && { + // authContext: ctx.state.authContext, + // }), + // }, + + const result = await commandRouter(requestBody); + + _handleNimbusRouterSuccess(result, ctx); + } catch (error: any) { + _handleNimbusRouterError(error, ctx); + } +}); + +router.get('/query', async (ctx) => { + try { + const queryParams: Record = {}; + for ( + const [key, value] of ctx.request.url.searchParams.entries() + ) { + queryParams[key] = value; + } + + const queryObject: Query = { + specversion: '1.0', + id: queryParams.id, + correlationid: queryParams.correlationid, + time: queryParams.time, + source: queryParams.source, + type: queryParams.type, + ...(queryParams.datacontenttype && + { datacontenttype: queryParams.datacontenttype }), + ...(queryParams.dataschema && + { dataschema: queryParams.dataschema }), + data: {}, + }; + + if (queryObject.datacontenttype === 'application/json') { + queryObject.data = JSON.parse(queryParams.data); + } + + // TODO: How do we implement the authentication context? + // data: { + // ...(ctx.state.authContext && { + // authContext: ctx.state.authContext, + // }), + // }, + + const result = await queryRouter(queryObject); + + _handleNimbusRouterSuccess(result, ctx); + } catch (error: any) { + _handleNimbusRouterError(error, ctx); + } +}); + +const _handleNimbusRouterSuccess = ( + result: RouteHandlerResult, + ctx: Context, +) => { + ctx.response.status = result.statusCode; + + if (result.headers) { + for (const header of Object.keys(result.headers)) { + ctx.response.headers.set( + header, + result.headers[header], + ); + } + } + + if (result.data) { + ctx.response.body = result.data; + } +}; + +const _handleNimbusRouterError = ( + error: any, + ctx: Context, + onError?: (error: any, ctx: Context) => void, +) => { + if (onError) { + onError(error, ctx); + } else { + getLogger().error({ + category: 'Nimbus', + message: error.message, + error, + }); + + const statusCode = error.statusCode ?? 500; + ctx.response.status = statusCode; + + if (statusCode < 500) { + ctx.response.body = { + statusCode, + ...(error.details ? { code: error.name } : {}), + ...(error.message ? { message: error.message } : {}), + ...(error.details ? { details: error.details } : {}), + }; + } else { + ctx.response.body = { + message: 'Internal server error', + }; + } + } +}; diff --git a/examples/gustav/src/shared/routing/queryRouter.ts b/examples/gustav/src/shared/routing/queryRouter.ts new file mode 100644 index 0000000..5796a1b --- /dev/null +++ b/examples/gustav/src/shared/routing/queryRouter.ts @@ -0,0 +1,20 @@ +import { createRouter } from '@nimbus/core'; +import { GetRecipeQueryType } from '../../contexts/recipe/core/queries/getRecipe.ts'; +import { getRecipeHandler } from '../../contexts/recipe/infrastructure/router/getRecipe.handler.ts'; + +// TODO: We want to rework the NimbusRouter to be a class and we create an instance +// Then we add a registerCommand method that handles schema registration on the validator and we get rid of the handlerMap this way. +// We want to implement a default inputLogFunc that logs the input to the console. +// If one wants to omit input logs one can pass a function that does nothing. + +// Later on we need some helper for bridging the NimbusRouter to an Oak Route. Or other frameworks Routers. + +export const queryRouter = createRouter({ + type: 'query', + handlerMap: { + [GetRecipeQueryType]: { + handler: getRecipeHandler, + allowUnsafeInput: true, + }, + }, +}); diff --git a/examples/gustav/src/shared/types/unit.ts b/examples/gustav/src/shared/types/unit.ts index 81a03b6..853230c 100644 --- a/examples/gustav/src/shared/types/unit.ts +++ b/examples/gustav/src/shared/types/unit.ts @@ -1,33 +1,7 @@ -import { z } from 'zod'; +export const Units = [ + 'piece', + 'gram', + 'milliliter', +] as const; -export const Piece = z.object({ - name: z.literal('Piece'), - short: z.literal('piece'), -}); -export type Piece = z.infer; - -export const TableSpoon = z.object({ - name: z.literal('Table Spoon'), - short: z.literal('EL'), -}); -export type TableSpoon = z.infer; - -export const Gram = z.object({ - name: z.literal('Gram'), - short: z.literal('g'), -}); -export type Gram = z.infer; - -export const Liter = z.object({ - name: z.literal('Liter'), - short: z.literal('l'), -}); -export type Liter = z.infer; - -export const Unit = z.discriminatedUnion('name', [ - Piece, - TableSpoon, - Gram, - Liter, -]); -export type Unit = z.infer; +export type Unit = typeof Units[number]; diff --git a/packages/core/src/lib/authContext.ts b/packages/core/src/lib/authContext.ts deleted file mode 100644 index cff9798..0000000 --- a/packages/core/src/lib/authContext.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { z } from 'zod'; - -/** - * Zod schema for the AuthContext. - * - * This is a default AuthContext to store some basic information - * about a user triggering a command, query or event. - * - * Feel free to define and use your own AuthContext with more detailed - * information or a policy attached to handle access control. - */ -export const AuthContext = z.object({ - sub: z.string(), - groups: z.array(z.string()), -}); - -/** - * The AuthContext type. - */ -export type AuthContext = z.infer; diff --git a/packages/core/src/lib/cloudEvent/absoluteUri.ts b/packages/core/src/lib/cloudEvent/absoluteUri.ts deleted file mode 100644 index 29c8e4b..0000000 --- a/packages/core/src/lib/cloudEvent/absoluteUri.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { z } from 'zod'; - -/** - * Validation for absolute URIs - * based on the RFC 3986 specification. - */ -export const absoluteUri = z.string().refine((value) => { - if (!value || value.length === 0) { - return false; - } - - try { - new URL(value); - return true; - } catch { - return false; - } -}, { - message: - 'Must be a valid absolute URI according to RFC 3986. See https://datatracker.ietf.org/doc/html/rfc3986#section-4.3', -}); diff --git a/packages/core/src/lib/cloudEvent/cloudEvent.ts b/packages/core/src/lib/cloudEvent/cloudEvent.ts deleted file mode 100644 index ae412af..0000000 --- a/packages/core/src/lib/cloudEvent/cloudEvent.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { z, type ZodType } from 'zod'; -import { absoluteUri } from '../cloudEvent/absoluteUri.ts'; -import { mediaType } from '../cloudEvent/mediaType.ts'; -import { uriReference } from '../cloudEvent/uriReference.ts'; -import { timeRFC3339 } from './timeRFC3339.ts'; - -// TODO: fix slow type issue - -/** - * Zod schema for the CloudEvent object. - * - * Nimbus respects the CloudEvents specifications - * for messages like commands, queries and events. - * - * https://cloudevents.io/ - */ -export const CloudEvent = < - TType extends ZodType, - TData extends ZodType, ->( - typeType: TType, - dataType: TData, -) => { - return z.object({ - specversion: z.literal('1.0'), - id: z.string().min(1), - source: uriReference, - type: typeType, - data: dataType, - subject: z.string().min(1).optional(), - time: timeRFC3339.optional(), - datacontenttype: mediaType.optional(), - dataschema: absoluteUri.optional(), - }); -}; - -/** - * Inference type to create the CloudEvent type. - */ -type CloudEventType< - TType extends ZodType, - TData extends ZodType, -> = ReturnType>; - -/** - * The type of the CloudEvent object. - * - * Nimbus respects the CloudEvents specifications - * for messages like commands, queries and events. - * - * https://cloudevents.io/ - */ -export type CloudEvent = z.infer< - CloudEventType, ZodType> ->; diff --git a/packages/core/src/lib/cloudEvent/mediaType.ts b/packages/core/src/lib/cloudEvent/mediaType.ts deleted file mode 100644 index 3fdab2c..0000000 --- a/packages/core/src/lib/cloudEvent/mediaType.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { z } from 'zod'; - -/** - * MIME Media Type validation schema - * Validates media types according to RFC 2046 specification - * which defines the structure of MIME media types. - * - * Format: type/subtype[; parameter=value] - * Examples: - * - application/json - * - text/plain - * - text/plain; charset=utf-8 - * - application/cloudevents+json - * - multipart/form-data; boundary=something - */ -export const mediaType = z.string().refine((value) => { - if (!value || value.length === 0) { - return false; - } - - // RFC 2046 media type regex pattern - // Matches: type/subtype with optional parameters - // type = discrete-type / composite-type / extension-token - // subtype = extension-token - // parameter = attribute "=" value - const mediaTypeRegex = - /^([a-zA-Z][a-zA-Z0-9][a-zA-Z0-9!#$&\-^_]*|[xX]-[a-zA-Z0-9][a-zA-Z0-9!#$&\-^_]*)\/([a-zA-Z0-9][a-zA-Z0-9!#$&\-^_]*|[xX]-[a-zA-Z0-9][a-zA-Z0-9!#$&\-^_]*)(\s*;\s*[a-zA-Z0-9][a-zA-Z0-9!#$&\-^_]*\s*=\s*([a-zA-Z0-9!#$&\-^_]+|"[^"]*"))*$/; - - if (!mediaTypeRegex.test(value)) { - return false; - } - - // Split type and subtype - const parts = value.split('/'); - if (parts.length < 2) { - return false; - } - - const [type, subtypeAndParams] = parts; - const subtype = subtypeAndParams.split(';')[0].trim(); - - // Validate known discrete types - const discreteTypes = ['text', 'image', 'audio', 'video', 'application']; - const compositeTypes = ['message', 'multipart']; - const knownTypes = [...discreteTypes, ...compositeTypes]; - - // Allow extension types (starting with x- or X-) or known types - const isValidType = knownTypes.includes(type.toLowerCase()) || - /^[xX]-/.test(type); - - return isValidType && subtype.length > 0; -}, { - message: - 'Must be a valid MIME media type (e.g., "application/json", "text/plain; charset=utf-8"). See https://datatracker.ietf.org/doc/html/rfc2046', -}); diff --git a/packages/core/src/lib/cloudEvent/timeRFC3339.ts b/packages/core/src/lib/cloudEvent/timeRFC3339.ts deleted file mode 100644 index 98263ba..0000000 --- a/packages/core/src/lib/cloudEvent/timeRFC3339.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { z } from 'zod'; - -/** - * RFC 3339 date-time validation schema - * Validates timestamps according to RFC 3339 specification - * which is a profile of ISO 8601 for Internet protocols. - * - * Format: YYYY-MM-DDTHH:MM:SSZ or YYYY-MM-DDTHH:MM:SS±HH:MM - * Examples: - * - 2018-04-05T17:31:00Z - * - 2018-04-05T17:31:00.123Z - * - 2018-04-05T17:31:00+01:00 - * - 2018-04-05T17:31:00.123-05:00 - */ -export const timeRFC3339 = z.string().refine((value) => { - if (!value || value.length === 0) { - return false; - } - - // RFC 3339 regex pattern - // Matches: YYYY-MM-DDTHH:MM:SS[.fff]Z or YYYY-MM-DDTHH:MM:SS[.fff]±HH:MM - const rfc3339Regex = - /^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d{1,9})?(Z|[+-]\d{2}:\d{2})$/; - - if (!rfc3339Regex.test(value)) { - return false; - } - - // Additional validation using Date constructor to catch invalid dates - // Check if the date is valid (this catches cases like February 30th) - try { - const date = new Date(value); - return !isNaN(date.getTime()); - } catch { - return false; - } -}, { - message: - 'Must be a valid RFC 3339 timestamp (e.g., "2018-04-05T17:31:00Z" or "2018-04-05T17:31:00+01:00"). See https://datatracker.ietf.org/doc/html/rfc3339', -}); diff --git a/packages/core/src/lib/cloudEvent/uriReference.ts b/packages/core/src/lib/cloudEvent/uriReference.ts deleted file mode 100644 index a295f87..0000000 --- a/packages/core/src/lib/cloudEvent/uriReference.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { z } from 'zod'; - -/** - * URI-reference validation schema - * Validates both absolute URIs and relative references - * based on the RFC 3986 specification. - */ -export const uriReference = z.string().refine((value) => { - if (!value || value.length === 0) { - return false; - } - - try { - new URL(value); - return true; - } catch { - const relativeUriReferenceRegex = - /^([a-zA-Z][a-zA-Z0-9+.-]*:)?\/\/[^\s]*$|^[^\s]*$/; - return relativeUriReferenceRegex.test(value) && value.length > 0; - } -}, { - message: - 'Must be a valid URI-reference according to RFC 3986. See https://datatracker.ietf.org/doc/html/rfc3986#section-4.1', -}); diff --git a/packages/core/src/lib/message/query.ts b/packages/core/src/lib/message/query.ts index daa730d..9e76253 100644 --- a/packages/core/src/lib/message/query.ts +++ b/packages/core/src/lib/message/query.ts @@ -9,6 +9,7 @@ * * @property {string} specversion - The version of the CloudEvents specification which the query uses. * @property {string} id - A globally unique identifier of the query. + * @property {string} correlationid - A globally unique identifier that indicates a correlation to previous and subsequent messages to this query. * @property {string} time - The time when the query was created. * @property {string} source - A URI reference that identifies the system that is constructing the query. * @property {string} type - The type must follow the CloudEvents naming convention, which uses a reversed domain name as a namespace, followed by a domain-specific name. @@ -35,6 +36,7 @@ export type Query = { specversion: '1.0'; id: string; + correlationid: string; time: string; source: string; type: string; @@ -49,6 +51,7 @@ export const querySchema = { required: [ 'specversion', 'id', + 'correlationid', 'time', 'source', 'type', @@ -62,6 +65,10 @@ export const querySchema = { type: 'string', minLength: 1, }, + correlationid: { + type: 'string', + minLength: 1, + }, time: { type: 'string', format: 'date-time', diff --git a/packages/core/src/lib/message/validation.ts b/packages/core/src/lib/message/validation.ts deleted file mode 100644 index 90110de..0000000 --- a/packages/core/src/lib/message/validation.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { Ajv, type AnySchema } from 'ajv'; -import { InvalidInputException } from '../exception/invalidInputException.ts'; - -// Some import shenanigans to make it work in here... -import _addFormats from 'ajv-formats'; -const addFormats = _addFormats as unknown as typeof _addFormats.default; - -const ajv = new Ajv(); -addFormats(ajv); - -export type ValidationResult = { - data: TData; - error: undefined; -} | { - data: undefined; - error: InvalidInputException; -}; - -// TODO: find a way to inject a custom AJV instance from outside, or provide a generic interface and functions to register and precompile schemas -// -// Idea: -// Use asyncApi Document for messages, and precompile all schemas for messages -// with a set $id and then reference the precompiled schemas with the -// dataschema property on the messages -// -// https://ajv.js.org/guide/managing-schemas.html#compiling-during-initialization - -export const validate = ( - schema: AnySchema, - data: unknown, -): ValidationResult => { - const validate = ajv.compile(schema); - const isValid = validate(data); - - if (isValid) { - return { data: data as TData, error: undefined }; - } else { - return { - data: undefined, - error: new InvalidInputException('The provided input is invalid', { - issues: validate.errors, - }), - }; - } -}; diff --git a/packages/core/src/lib/messageEnvelope.ts b/packages/core/src/lib/messageEnvelope.ts deleted file mode 100644 index 798ef7a..0000000 --- a/packages/core/src/lib/messageEnvelope.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { z, type ZodType } from 'zod'; - -// TODO: fix slow type issue - -/** - * Zod schema for the MessageEnvelope. - * - * As Nimbus uses the CloudEvents specification - * for all messages like commands, queries and events, - * there needs to be a place to add Nimbus specific - * meta information to messages. - * - * This is what the MessageEnvelope is used for. - * It contains the a correlationId and an optional authContext - * along with the actual payload. - */ -export const MessageEnvelope = < - TPayload extends ZodType, - TAuthContext extends ZodType, ->( - payloadType: TPayload, - authContextType: TAuthContext, -) => { - return z.object({ - payload: payloadType, - correlationId: z.string(), - authContext: authContextType.optional(), - }); -}; - -/** - * Inference type to create the MessageEnvelope type. - */ -type MessageEnvelopeType< - TPayload extends ZodType, - TAuthContext extends ZodType, -> = ReturnType>; - -/** - * The type of the MessageEnvelope. - * - * As Nimbus uses the CloudEvents specification - * for all messages like commands, queries and events, - * there needs to be a place to add Nimbus specific - * meta information to messages. - * - * This is what the MessageEnvelope is used for. - * It contains the a correlationId and an optional authContext - * along with the actual payload. - */ -export type MessageEnvelope = z.infer< - MessageEnvelopeType, ZodType> ->; diff --git a/packages/core/src/lib/router/asyncApiRouter.ts b/packages/core/src/lib/router/asyncApiRouter.ts new file mode 100644 index 0000000..2e0e873 --- /dev/null +++ b/packages/core/src/lib/router/asyncApiRouter.ts @@ -0,0 +1,54 @@ +// import { Parser } from '@asyncapi/parser'; +// import { GenericException } from '../exception/genericException.ts'; +// import { getLogger } from '../log/logger.ts'; + +// export type AsyncApiRouterInput = { +// apiSpec: string; +// }; + +// export const asyncApiRouter = async ({ +// apiSpec, +// }: AsyncApiRouterInput) => { +// const parser = new Parser(); +// const { document, diagnostics } = await parser.parse(apiSpec); + +// if (!document) { +// getLogger().error({ +// message: 'AsyncAPI document parsed with errors', +// data: { +// diagnostics, +// }, +// }); + +// throw new GenericException('AsyncAPI document parsed with errors'); +// } + +// const asyncApiMessages = document.messages(); + +// const registeredMessages: Record = {}; +// for (const message of asyncApiMessages) { +// const msgJson = message.json(); + +// if (msgJson.payload) { +// // TODO: Validate the payload to be a JSONSchema with the mandatory fields. +// const type = msgJson.payload.properties.type.const; +// registeredMessages[type] = msgJson.payload; +// } +// } + +// getLogger().info({ +// message: `Router registered ${ +// Object.keys(registeredMessages).length +// } messages`, +// data: { +// registeredMessages: Object.keys(registeredMessages), +// }, +// }); + +// return (input: any) => { +// return Promise.resolve({ +// statusCode: 200, +// body: 'Hello, world!', +// }); +// }; +// }; diff --git a/packages/core/src/lib/router/messageHandler.ts b/packages/core/src/lib/router/messageHandler.ts new file mode 100644 index 0000000..80b5b0b --- /dev/null +++ b/packages/core/src/lib/router/messageHandler.ts @@ -0,0 +1,55 @@ +import { InvalidInputException } from '../exception/invalidInputException.ts'; +import { getLogger } from '../log/logger.ts'; +import type { Message } from '../message/message.ts'; +import { getValidator } from '../validator/validator.ts'; + +export const createMessageHandler = ( + { + handlerFunction, + allowUnsafeInput, + }: { + handlerFunction: (message: TInput) => Promise; + allowUnsafeInput?: boolean; + }, +): (message: TInput) => Promise => { + const validator = getValidator(); + + return async (message: TInput) => { + let validMessage: TInput; + + if (message.dataschema) { + const { data, error } = validator.validate( + message.dataschema, + message, + ); + + if (error) { + throw error; + } else { + validMessage = data; + } + } else { + if (allowUnsafeInput) { + getLogger().warn({ + category: 'Nimbus', + message: 'No dataschema found for message', + }); + } else { + throw new InvalidInputException( + 'No dataschema provided for message', + { + errorCode: 'MISSING_DATASCHEMA', + reason: `The dataschema is missing on the message + and "allowUnsafeInput" is not enabled to the message type. + It is recommended to always provide a dataschema + for input validation. Otherwise set "allowUnsafeInput" + to true for the route handler.`, + }, + ); + } + validMessage = message as TInput; + } + + return handlerFunction(validMessage); + }; +}; diff --git a/packages/core/src/lib/router/router.ts b/packages/core/src/lib/router/router.ts index ea6574c..ed9e444 100644 --- a/packages/core/src/lib/router/router.ts +++ b/packages/core/src/lib/router/router.ts @@ -7,6 +7,12 @@ import type { Message } from '../message/message.ts'; import { type Query, querySchema } from '../message/query.ts'; import { getValidator } from '../validator/validator.ts'; +// TODO: we want to rework the router concept. Do We ??? +// Nimbus should be framework agnostic and other Frameworks like Oak, Fastify, Elysia should used for routing. +// So we want to provide a higher order handler function that can take some options and a logic function. +// This higher order handler function will do the message validation, logging and tracing etc. and will execute the logic function. +// This way the full handler can be used in any other frameworks routing or event/message bus system. + /** * The result type of a route handler. * @@ -24,7 +30,10 @@ export type RouteHandlerResult = { * @template TInput - The type of the input to the route handler. * @template TOutputData - The type of the data returned by the route handler. */ -export type RouteHandler = ( +export type RouteHandler< + TInput extends Message = Message, + TOutputData = unknown, +> = ( input: TInput, ) => Promise>; @@ -34,7 +43,7 @@ export type RouteHandler = ( * @template TInput - The type of the input to the route handler. * @template TOutputData - The type of the data returned by the route handler. */ -export type RouteHandlerMap = Record< +export type RouteHandlerMap = Record< string, { handler: RouteHandler; @@ -57,7 +66,7 @@ export type Router = ( * @template TInput - The type of the input to the router. * @template TResultData - The type of the data returned by the router. */ -export type CreateRouterInput = { +export type CreateRouterInput = { type: 'command' | 'query' | 'event'; handlerMap: RouteHandlerMap; inputLogFunc?: (input: unknown) => void; @@ -112,7 +121,7 @@ export type CreateRouterInput = { * }); * ``` */ -export const createRouter = ({ +export const createRouter = ({ type, handlerMap, inputLogFunc, diff --git a/packages/core/src/lib/validator/validator.ts b/packages/core/src/lib/validator/validator.ts index 1dec137..93bddfd 100644 --- a/packages/core/src/lib/validator/validator.ts +++ b/packages/core/src/lib/validator/validator.ts @@ -10,6 +10,8 @@ import { querySchema } from '../message/query.ts'; import _addFormats from 'ajv-formats'; const addFormats = _addFormats as unknown as typeof _addFormats.default; +// TODO: check out https://github.com/standard-schema/standard-schema?tab=readme-ov-file to have it more agnostic like the elysia lib https://elysiajs.com/essential/validation.html + export type ValidatorOptions = { ajv?: Ajv; }; diff --git a/packages/oak/deno.json b/packages/oak/deno.json index 3ed557c..559da6b 100644 --- a/packages/oak/deno.json +++ b/packages/oak/deno.json @@ -33,6 +33,7 @@ "imports": { "@std/ulid": "jsr:@std/ulid@^1.0.0", "@oak/oak": "jsr:@oak/oak@^17.1.4", + "ajv": "npm:ajv@^8.17.1", "zod": "npm:zod@^3.24.1" } } diff --git a/packages/oak/src/lib/middleware/requestCorrelationId.ts b/packages/oak/src/lib/middleware/requestCorrelationId.ts deleted file mode 100644 index 2644d10..0000000 --- a/packages/oak/src/lib/middleware/requestCorrelationId.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type { Context } from '@oak/oak/context'; -import type { Next } from '@oak/oak/middleware'; -import { ulid } from '@std/ulid'; - -/** - * Middleware to add a correlation ID (ULID) to the request context. - * - * @param ctx - The Oak context - * @param next - The Oak next function - */ -export const requestCorrelationId = async ( - ctx: Context, - next: Next, -): Promise => { - ctx.state.correlationId = ulid(); - await next(); -}; diff --git a/packages/oak/src/lib/router.ts b/packages/oak/src/lib/router.ts index 48cb08a..53fd72f 100644 --- a/packages/oak/src/lib/router.ts +++ b/packages/oak/src/lib/router.ts @@ -1,13 +1,13 @@ import { + type Command, createRouter, getLogger, + type Query, type RouteHandler, type RouteHandlerResult, } from '@nimbus/core'; import type { Context } from '@oak/oak/context'; import { Router as OakRouter, type RouterOptions } from '@oak/oak/router'; -import { ulid } from '@std/ulid'; -import type { ZodType } from 'zod'; /** * The NimbusOakRouter extends the Oak Router @@ -24,17 +24,23 @@ export class NimbusOakRouter extends OakRouter { * * @param {string} path - Oak request path * @param {string} commandType - Type of the command - * @param {ZodType} commandSchema - Schema (ZodType) of the command * @param {RouteHandler} handler - Nimbus Route Handler function + * @param {AnySchema} commandSchema - JSON Schema of the command * @param {Function} onError - Optional function to customize error handling */ - command( - path: string, - commandType: string, - commandSchema: ZodType, - handler: RouteHandler, - onError?: (error: any, ctx: Context) => void, - ) { + command({ + path, + type, + handler, + allowUnsafeInput, + onError, + }: { + path: string; + type: string; + handler: RouteHandler; + allowUnsafeInput?: boolean; + onError?: (error: any, ctx: Context) => void; + }) { const inputLogFunc = (input: any) => { getLogger().info({ category: 'Nimbus', @@ -48,32 +54,27 @@ export class NimbusOakRouter extends OakRouter { super.post(path, async (ctx: Context) => { try { - const correlationId = ctx.state.correlationId ?? ulid(); const requestBody = await ctx.request.body.json(); const nimbusRouter = createRouter({ + type: 'command', handlerMap: { - [commandType]: { + [type]: { handler, - inputType: commandSchema, + allowUnsafeInput: allowUnsafeInput ?? false, }, }, inputLogFunc, }); - const result = await nimbusRouter({ - specversion: '1.0', - id: correlationId, - source: ctx.request.url.toString(), - type: commandType, - data: { - correlationId: correlationId, - payload: requestBody, - ...(ctx.state.authContext && { - authContext: ctx.state.authContext, - }), - }, - }); + // TODO: How do we implement the authentication context? + // data: { + // ...(ctx.state.authContext && { + // authContext: ctx.state.authContext, + // }), + // }, + + const result = await nimbusRouter(requestBody); this._handleNimbusRouterSuccess(result, ctx); } catch (error: any) { @@ -87,17 +88,23 @@ export class NimbusOakRouter extends OakRouter { * * @param {string} path - Oak request path * @param {string} queryType - Type of the query - * @param {ZodType} querySchema - Schema (ZodType) of the query + * @param {boolean} allowUnsafeInput - Allow unsafe input * @param {RouteHandler} handler - Nimbus Route Handler function * @param {Function} onError - Optional function to customize error handling */ - query( - path: string, - queryType: string, - querySchema: ZodType, - handler: RouteHandler, - onError?: (error: any, ctx: Context) => void, - ) { + query({ + path, + type, + allowUnsafeInput, + handler, + onError, + }: { + path: string; + type: string; + handler: RouteHandler; + allowUnsafeInput?: boolean; + onError?: (error: any, ctx: Context) => void; + }) { const inputLogFunc = (input: any) => { getLogger().info({ category: 'Nimbus', @@ -109,44 +116,29 @@ export class NimbusOakRouter extends OakRouter { }); }; - super.get(path, async (ctx: Context) => { + super.post(path, async (ctx: Context) => { try { - const correlationId = ctx.state.correlationId ?? ulid(); - const pathParams = (ctx as any).params; - - const queryParams: Record = {}; - for ( - const [key, value] of ctx.request.url.searchParams.entries() - ) { - queryParams[key] = value; - } + const requestBody = await ctx.request.body.json(); const nimbusRouter = createRouter({ + type: 'query', handlerMap: { - [queryType]: { + [type]: { handler, - inputType: querySchema, + allowUnsafeInput: allowUnsafeInput ?? false, }, }, inputLogFunc, }); - const result = await nimbusRouter({ - specversion: '1.0', - id: correlationId, - source: ctx.request.url.toString(), - type: queryType, - data: { - correlationId: correlationId, - payload: { - ...queryParams, - ...pathParams, - }, - ...(ctx.state.authContext && { - authContext: ctx.state.authContext, - }), - }, - }); + // TODO: How do we implement the authentication context? + // data: { + // ...(ctx.state.authContext && { + // authContext: ctx.state.authContext, + // }), + // }, + + const result = await nimbusRouter(requestBody); this._handleNimbusRouterSuccess(result, ctx); } catch (error: any) { From 02f91f10324efa0c737fe34af597e87501156274 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Sat, 4 Oct 2025 12:21:46 +0200 Subject: [PATCH 07/37] final iteration for the core message router? --- .claude/settings.local.json | 13 + TODO.md | 230 ++++++++++++++ deno.lock | 2 +- .../router/addRecipe.handler.ts | 11 +- .../router/getRecipe.handler.ts | 11 +- .../src/shared/routing/commandRouter.ts | 32 +- .../gustav/src/shared/routing/httpRouter.ts | 88 +----- .../gustav/src/shared/routing/queryRouter.ts | 32 +- packages/client/README.md | 119 +------ packages/core/README.md | 87 +----- packages/core/deno.json | 19 +- packages/core/src/index.ts | 1 + packages/core/src/lib/auth/authContext.ts | 7 + packages/core/src/lib/event/eventBus.ts | 29 +- .../exception/invalidInputException.test.ts | 36 --- .../lib/exception/invalidInputException.ts | 18 +- .../core/src/lib/router/asyncApiRouter.ts | 54 ---- .../core/src/lib/router/messageHandler.ts | 55 ---- .../core/src/lib/router/messageRouter.test.ts | 256 +++++++++++++++ packages/core/src/lib/router/router.test.ts | 287 ----------------- packages/core/src/lib/router/router.ts | 229 +++++++------- packages/core/src/lib/router/testCommand.ts | 51 --- .../router/{testEvent.ts => testFixtures.ts} | 110 +++++-- packages/core/src/lib/router/testQuery.ts | 49 --- packages/core/src/lib/validator/validator.ts | 2 - packages/oak/deno.json | 2 +- packages/oak/src/index.ts | 1 - packages/oak/src/lib/router.ts | 293 ++++++++---------- 28 files changed, 918 insertions(+), 1206 deletions(-) create mode 100644 .claude/settings.local.json create mode 100644 packages/core/src/lib/auth/authContext.ts delete mode 100644 packages/core/src/lib/router/asyncApiRouter.ts delete mode 100644 packages/core/src/lib/router/messageHandler.ts create mode 100644 packages/core/src/lib/router/messageRouter.test.ts delete mode 100644 packages/core/src/lib/router/router.test.ts delete mode 100644 packages/core/src/lib/router/testCommand.ts rename packages/core/src/lib/router/{testEvent.ts => testFixtures.ts} (52%) delete mode 100644 packages/core/src/lib/router/testQuery.ts diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 0000000..41a685c --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,13 @@ +{ + "permissions": { + "allow": [ + "Bash(deno test:*)", + "Bash(deno add:*)", + "Bash(deno task:*)", + "Bash(deno check:*)", + "Bash(find:*)" + ], + "deny": [], + "ask": [] + } +} \ No newline at end of file diff --git a/TODO.md b/TODO.md index 848cc8a..cf1b66a 100644 --- a/TODO.md +++ b/TODO.md @@ -12,3 +12,233 @@ The Source of truth for the API will be an AsyncAPI specification in JSON format Based on the AsyncAPI specification the Nimbus framework should register the commands, queries and event on the router structure. Also the router will provide a route for a health check and routes for a schema registry. The user now only has to implement the business logic and connects it to the route handlers. + +## Claude conversation: + +**Me:** +I like the simple aproach. When it comes to Schema management my overall vision is to have a asyncapi.json file which will hold the +messages and schemas for the application. And based on that file we could build a Nimbus CLI that generates the boilerplate code for +handlers routes and also schemas. That is the reason why i choose AJV and wrote the validator +(packages/core/src/lib/validator/validator.ts) so a schmea can be added with the addSchema method and if a message which is routet on the +router contains a dataschema value this schema is picked up in the validator. Also I would not reverse the allowUnsafeInput logic as I +want to follow the philosophy of have input validation required as best practive. What do you think is this a good way of handling +things? Or do you suggest something else? Also based on the asyncapi definition and the schema registration on the validator I had the +idea to then also have the option to publish a schema registry later on. + +**Claude:** +Your vision is excellent and very well thought out. This approach aligns perfectly with your simplicity goals while providing +professional-grade tooling. Let me validate your thinking: + +Why AsyncAPI + AJV + Schema Registry is Smart + +1. AsyncAPI as Single Source of Truth + +- Defines all messages (commands/queries/events) in one place +- Documents your API automatically +- Industry standard (like OpenAPI but for async/event-driven) +- Enables codegen → less boilerplate, fewer mistakes + +2. AJV Validator Design is Correct + +- Pre-register schemas via validator.addSchema() +- Messages reference schemas via dataschema URL +- Router validates automatically when dataschema is present +- If no dataschema → requires explicit allowUnsafeInput: true + +This forces intentional decisions: "I'm skipping validation" vs. accidental omission. + +3. Schema Registry Future + +- AsyncAPI → Schema Registry is a natural progression +- Enables schema evolution (versioning, compatibility checks) +- Supports microservices discovering each other's contracts +- You're building the foundation now (URL-based schema references) + +Validating Your Philosophy + +✅ Keep allowUnsafeInput as opt-in (unsafe = explicit flag) + +You're right. Best practice = validate by default. The current warning (router.ts:182-185) is good - it reminds developers during +prototyping that they're being unsafe. In production, messages should have schemas. + +✅ Keep per-type routers (command/query/event) + +I questioned this earlier, but with AsyncAPI in mind, it makes sense: + +- AsyncAPI documents commands, queries, events separately +- Different message types may have different routing strategies later (commands = 1:1, events = pub/sub) +- Codegen can generate separate routers based on AsyncAPI channels + +✅ Schema registration pattern is right + +// Generated by Nimbus CLI from asyncapi.json +validator.addSchema(addRecipeSchema); +validator.addSchema(getRecipeSchema); + +// Messages now reference these schemas +const command: Command = { +// ... +dataschema: 'https://nimbus.overlap.at/schemas/commands/add-recipe/v1', +data: { ... } +}; + +This is exactly how JSON Schema + HTTP APIs work. You're applying proven patterns to message-driven architecture. + +Suggested Refinements + +1. AsyncAPI Structure + +Your AsyncAPI should look something like: +asyncapi: 3.0.0 +info: +title: Gustav Recipe API +version: 1.0.0 + +channels: +recipeCommands: +address: /command +messages: +addRecipe: +$ref: '#/components/messages/AddRecipe' + + recipeQueries: + address: /query + messages: + getRecipe: + $ref: '#/components/messages/GetRecipe' + +components: +messages: +AddRecipe: +name: at.overlap.nimbus.add-recipe +payload: +$ref: '#/components/schemas/Recipe' +GetRecipe: +name: at.overlap.nimbus.get-recipe +payload: +$ref: '#/components/schemas/GetRecipeParams' + + schemas: + Recipe: + type: object + properties: + slug: { type: string } + title: { type: string } + # ... + +2. Nimbus CLI Codegen Flow + +nimbus generate --asyncapi asyncapi.json --output src/generated/ + +Should generate: + +- Schema registration (src/generated/schemas.ts): + import { getValidator } from '@nimbus/core'; + +export function registerSchemas() { +getValidator().addSchema(addRecipeSchema); +getValidator().addSchema(getRecipeSchema); +// ... +} + +- Type definitions (src/generated/types.ts): + export type AddRecipeCommand = Command & { + type: 'at.overlap.nimbus.add-recipe'; + }; +- Router setup (src/generated/routers.ts): + export const commandRouter = new MessageRouter('command'); + // Handlers registered manually or via convention + +3. Schema URL Convention + +Standardize schema URLs: +https://nimbus.overlap.at/schemas/{type}/{name}/{version} + +Examples: + +- https://nimbus.overlap.at/schemas/commands/add-recipe/v1 +- https://nimbus.overlap.at/schemas/queries/get-recipe/v1 +- https://nimbus.overlap.at/schemas/events/recipe-added/v1 + +This enables: + +- Versioning (v1, v2, etc.) +- Schema registry lookups +- Clear naming conventions + +4. Validator Schema Registration Helper + +Add a convenience method to validator: +// validator.ts +public addSchemas(schemas: SchemaObject[]): void { +for (const schema of schemas) { +this.addSchema(schema); +} +} + +Then generated code can do: +registerSchemas() { +getValidator().addSchemas([ +addRecipeSchema, +getRecipeSchema, +// ... +]); +} + +5. Runtime Schema Validation Reporting + +In Gustav, you're using allowUnsafeInput: true everywhere. Once you have AsyncAPI + codegen: +// Before (manual, no schema) +router.register(AddRecipeCommandType, handler, { allowUnsafeInput: true }); + +// After (generated, with schema) +router.register(AddRecipeCommandType, handler); +// No allowUnsafeInput needed - schema is in the message + +Implementation Phases + +Phase 1: Current (Manual) + +- Write handlers manually +- Use allowUnsafeInput: true during dev +- Validate manually if needed + +Phase 2: AsyncAPI Definition + +- Create asyncapi.json for Gustav +- Define all messages and schemas +- Keep manual handlers + +Phase 3: Nimbus CLI Codegen + +- Build CLI that reads AsyncAPI +- Generate schema registration code +- Generate TypeScript types +- Optionally generate router boilerplate + +Phase 4: Schema Registry + +- Host schemas at URLs (Nimbus schema registry service) +- Support schema versioning +- Schema compatibility checking + +My Take + +Your approach is exactly right. You're building: + +1. A simple developer experience (MessageRouter, clean handlers) +2. With professional tooling (AsyncAPI, codegen, schema validation) +3. That scales (schema registry, versioning, microservices) + +The current implementation already supports this vision - you just need to build the CLI tooling around it. The validator design with +addSchema() + URL-based dataschema references is the perfect foundation. + +Don't change anything about the core design. Focus on: + +- Defining AsyncAPI schema for Gustav +- Building the Nimbus CLI codegen +- Proving the workflow end-to-end + +Once that works, the schema registry is just HTTP hosting of your schemas with versioning. + +This is a really solid architecture. You're threading the needle between "simple for small apps" and "scales to enterprise." diff --git a/deno.lock b/deno.lock index eec6298..cb87160 100644 --- a/deno.lock +++ b/deno.lock @@ -351,7 +351,7 @@ }, "packages/oak": { "dependencies": [ - "jsr:@oak/oak@^17.1.4", + "jsr:@oak/oak@^17.1.6", "jsr:@std/ulid@1", "npm:ajv@^8.17.1", "npm:zod@^3.24.1" diff --git a/examples/gustav/src/contexts/recipe/infrastructure/router/addRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/router/addRecipe.handler.ts index 198845c..27d0783 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/router/addRecipe.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/router/addRecipe.handler.ts @@ -1,19 +1,14 @@ -import { RouteHandler } from '@nimbus/core'; +import { MessageHandler } from '@nimbus/core'; import { eventSourcingDBEventStore } from '../../../../shared/adapters/eventSourcingDBEventStore.ts'; import { addRecipe, AddRecipeCommand } from '../../core/commands/addRecipe.ts'; import { Recipe } from '../../core/domain/recipe.ts'; -export const addRecipeHandler: RouteHandler< +export const addRecipeHandler: MessageHandler< AddRecipeCommand, Recipe > = async (command) => { - const result = await addRecipe( + return await addRecipe( command, eventSourcingDBEventStore, ); - - return { - statusCode: 200, - data: result, - }; }; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/router/getRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/router/getRecipe.handler.ts index fa97e7f..8a62d01 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/router/getRecipe.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/router/getRecipe.handler.ts @@ -1,16 +1,11 @@ -import { RouteHandler } from '@nimbus/core'; +import { MessageHandler } from '@nimbus/core'; import { Recipe } from '../../core/domain/recipe.ts'; import { getRecipe, GetRecipeQuery } from '../../core/queries/getRecipe.ts'; import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; -export const getRecipeHandler: RouteHandler< +export const getRecipeHandler: MessageHandler< GetRecipeQuery, Recipe > = async (query) => { - const recipe = await getRecipe(query, recipeMemoryRepository); - - return { - statusCode: 200, - data: recipe, - }; + return await getRecipe(query, recipeMemoryRepository); }; diff --git a/examples/gustav/src/shared/routing/commandRouter.ts b/examples/gustav/src/shared/routing/commandRouter.ts index 7c31862..7a563fd 100644 --- a/examples/gustav/src/shared/routing/commandRouter.ts +++ b/examples/gustav/src/shared/routing/commandRouter.ts @@ -1,20 +1,22 @@ -import { createRouter } from '@nimbus/core'; +import { getLogger, MessageRouter } from '@nimbus/core'; import { AddRecipeCommandType } from '../../contexts/recipe/core/commands/addRecipe.ts'; import { addRecipeHandler } from '../../contexts/recipe/infrastructure/router/addRecipe.handler.ts'; -// TODO: We want to rework the NimbusRouter to be a class and we create an instance -// Then we add a registerCommand method that handles schema registration on the validator and we get rid of the handlerMap this way. -// We want to implement a default inputLogFunc that logs the input to the console. -// If one wants to omit input logs one can pass a function that does nothing. - -// Later on we need some helper for bridging the NimbusRouter to an Oak Route. Or other frameworks Routers. - -export const commandRouter = createRouter({ - type: 'command', - handlerMap: { - [AddRecipeCommandType]: { - handler: addRecipeHandler, - allowUnsafeInput: true, - }, +export const commandRouter = new MessageRouter('command', { + logInput: (input: any) => { + getLogger().info({ + category: 'Nimbus', + ...(input?.correlationid && { + correlationId: input.correlationid, + }), + message: + `${input?.correlationid} - [Command] ${input?.type} from ${input?.source}`, + }); }, }); + +commandRouter.register( + AddRecipeCommandType, + addRecipeHandler, + { allowUnsafeInput: true }, +); diff --git a/examples/gustav/src/shared/routing/httpRouter.ts b/examples/gustav/src/shared/routing/httpRouter.ts index e593dc1..9f59536 100644 --- a/examples/gustav/src/shared/routing/httpRouter.ts +++ b/examples/gustav/src/shared/routing/httpRouter.ts @@ -1,11 +1,10 @@ -import { getLogger, Query, RouteHandlerResult } from '@nimbus/core'; -import { Context } from '@oak/oak/context'; -import { Router as OakRouter } from '@oak/oak/router'; +import { getLogger, Query } from '@nimbus/core'; +import { handleOakError, NimbusOakRouter } from '@nimbus/oak'; import { mongoManager } from '../mongodb.ts'; import { commandRouter } from './commandRouter.ts'; import { queryRouter } from './queryRouter.ts'; -export const router = new OakRouter(); +export const router = new NimbusOakRouter(); router.get('/health', async (ctx) => { const logger = getLogger(); @@ -40,25 +39,13 @@ router.get('/health', async (ctx) => { }; }); -router.post('/command', async (ctx) => { - try { - const requestBody = await ctx.request.body.json(); - - // TODO: How do we implement the authentication context? - // data: { - // ...(ctx.state.authContext && { - // authContext: ctx.state.authContext, - // }), - // }, - - const result = await commandRouter(requestBody); - - _handleNimbusRouterSuccess(result, ctx); - } catch (error: any) { - _handleNimbusRouterError(error, ctx); - } +// Command endpoint - uses the Oak adapter to bridge MessageRouter to HTTP +router.command({ + path: '/command', + router: commandRouter, }); +// Query endpoint - uses the Oak adapter to bridge MessageRouter to HTTP router.get('/query', async (ctx) => { try { const queryParams: Record = {}; @@ -93,62 +80,11 @@ router.get('/query', async (ctx) => { // }), // }, - const result = await queryRouter(queryObject); + const result = await queryRouter.route(queryObject); - _handleNimbusRouterSuccess(result, ctx); + ctx.response.status = 200; + ctx.response.body = result as any; } catch (error: any) { - _handleNimbusRouterError(error, ctx); + handleOakError(error, ctx); } }); - -const _handleNimbusRouterSuccess = ( - result: RouteHandlerResult, - ctx: Context, -) => { - ctx.response.status = result.statusCode; - - if (result.headers) { - for (const header of Object.keys(result.headers)) { - ctx.response.headers.set( - header, - result.headers[header], - ); - } - } - - if (result.data) { - ctx.response.body = result.data; - } -}; - -const _handleNimbusRouterError = ( - error: any, - ctx: Context, - onError?: (error: any, ctx: Context) => void, -) => { - if (onError) { - onError(error, ctx); - } else { - getLogger().error({ - category: 'Nimbus', - message: error.message, - error, - }); - - const statusCode = error.statusCode ?? 500; - ctx.response.status = statusCode; - - if (statusCode < 500) { - ctx.response.body = { - statusCode, - ...(error.details ? { code: error.name } : {}), - ...(error.message ? { message: error.message } : {}), - ...(error.details ? { details: error.details } : {}), - }; - } else { - ctx.response.body = { - message: 'Internal server error', - }; - } - } -}; diff --git a/examples/gustav/src/shared/routing/queryRouter.ts b/examples/gustav/src/shared/routing/queryRouter.ts index 5796a1b..82f26d2 100644 --- a/examples/gustav/src/shared/routing/queryRouter.ts +++ b/examples/gustav/src/shared/routing/queryRouter.ts @@ -1,20 +1,22 @@ -import { createRouter } from '@nimbus/core'; +import { getLogger, MessageRouter } from '@nimbus/core'; import { GetRecipeQueryType } from '../../contexts/recipe/core/queries/getRecipe.ts'; import { getRecipeHandler } from '../../contexts/recipe/infrastructure/router/getRecipe.handler.ts'; -// TODO: We want to rework the NimbusRouter to be a class and we create an instance -// Then we add a registerCommand method that handles schema registration on the validator and we get rid of the handlerMap this way. -// We want to implement a default inputLogFunc that logs the input to the console. -// If one wants to omit input logs one can pass a function that does nothing. - -// Later on we need some helper for bridging the NimbusRouter to an Oak Route. Or other frameworks Routers. - -export const queryRouter = createRouter({ - type: 'query', - handlerMap: { - [GetRecipeQueryType]: { - handler: getRecipeHandler, - allowUnsafeInput: true, - }, +export const queryRouter = new MessageRouter('query', { + logInput: (input: any) => { + getLogger().info({ + category: 'Nimbus', + ...(input?.correlationid && { + correlationId: input.correlationid, + }), + message: + `${input?.correlationid} - [Query] ${input?.type} from ${input?.source}`, + }); }, }); + +queryRouter.register( + GetRecipeQueryType, + getRecipeHandler, + { allowUnsafeInput: true }, +); diff --git a/packages/client/README.md b/packages/client/README.md index 91635a3..7146660 100644 --- a/packages/client/README.md +++ b/packages/client/README.md @@ -3,123 +3,8 @@ alt="Nimbus" /> -# Nimbus Core +# Nimbus Client -The core package of the Nimbus framework. +The client package of the Nimbus framework. Refer to the [Nimbus main repository](https://github.com/overlap-dev/Nimbus) or the [Nimbus documentation](https://nimbus.overlap.at) for more information about the Nimbus framework. - -# Examples - -These are some quick examples on how the basics of the Nimbus framework work. - -For detailed documentation, please refer to the [Nimbus documentation](https://nimbus.overlap.at). - -## Command - -```typescript -import { AuthContext, Command } from "@nimbus/core"; -import { z } from "zod"; - -export const AddAccountData = z.object({ - name: z.string(), -}); -export type AddAccountData = z.infer; - -export const AddAccountCommand = Command( - z.literal("account.add"), - AddAccountData, - AuthContext -); -export type AddAccountCommand = z.infer; -``` - -## Query - -```typescript -import { AuthContext, Query } from "@nimbus/core"; -import { z } from "zod"; - -export const GetAccountQuery = Query( - z.literal("account.get"), - z.object({ - id: z.string().length(24), - }), - AuthContext -); -export type GetAccountQuery = z.infer; -``` - -## Event - -```typescript -import { Event } from "@nimbus/core"; -import { z } from "zod"; -import { Account } from "../account.type.ts"; - -export const AccountAddedData = z.object({ - account: Account, -}); -export type AccountAddedData = z.infer; - -export const AccountAddedEvent = Event( - z.literal("account.added"), - AccountAddedData -); -export type AccountAddedEvent = z.infer; -``` - -## Router - -```typescript -import { createRouter } from "@nimbus/core"; - -// ... - -const accountRouter = createRouter({ - handlerMap: { - "account.get": { - handler: getAccountHandler, - inputType: GetAccountQuery, - }, - "account.add": { - handler: addAccountHandler, - inputType: AddAccountCommand, - }, - }, -}); -``` - -## EventBus - -```typescript -import { NimbusEventBus } from "@nimbus/core"; - -// ... - -export const eventBus = new NimbusEventBus({ - maxRetries: 3, - retryDelay: 3000, -}); - -eventBus.subscribeEvent( - "account.added", - AccountAddedEvent, - accountAddedHandler -); - -eventBus.putEvent({ - specversion: "1.0", - id: "123", - source: command.source, - type: "account.added", - data: { - correlationId: command.metadata.correlationId, - payload: { account: account }, - }, -}); -``` - -# License - -The MIT License (MIT) diff --git a/packages/core/README.md b/packages/core/README.md index 91635a3..d08f588 100644 --- a/packages/core/README.md +++ b/packages/core/README.md @@ -18,106 +18,25 @@ For detailed documentation, please refer to the [Nimbus documentation](https://n ## Command ```typescript -import { AuthContext, Command } from "@nimbus/core"; -import { z } from "zod"; - -export const AddAccountData = z.object({ - name: z.string(), -}); -export type AddAccountData = z.infer; - -export const AddAccountCommand = Command( - z.literal("account.add"), - AddAccountData, - AuthContext -); -export type AddAccountCommand = z.infer; + ``` ## Query ```typescript -import { AuthContext, Query } from "@nimbus/core"; -import { z } from "zod"; - -export const GetAccountQuery = Query( - z.literal("account.get"), - z.object({ - id: z.string().length(24), - }), - AuthContext -); -export type GetAccountQuery = z.infer; + ``` ## Event ```typescript -import { Event } from "@nimbus/core"; -import { z } from "zod"; -import { Account } from "../account.type.ts"; - -export const AccountAddedData = z.object({ - account: Account, -}); -export type AccountAddedData = z.infer; - -export const AccountAddedEvent = Event( - z.literal("account.added"), - AccountAddedData -); -export type AccountAddedEvent = z.infer; + ``` ## Router ```typescript -import { createRouter } from "@nimbus/core"; - -// ... - -const accountRouter = createRouter({ - handlerMap: { - "account.get": { - handler: getAccountHandler, - inputType: GetAccountQuery, - }, - "account.add": { - handler: addAccountHandler, - inputType: AddAccountCommand, - }, - }, -}); -``` - -## EventBus -```typescript -import { NimbusEventBus } from "@nimbus/core"; - -// ... - -export const eventBus = new NimbusEventBus({ - maxRetries: 3, - retryDelay: 3000, -}); - -eventBus.subscribeEvent( - "account.added", - AccountAddedEvent, - accountAddedHandler -); - -eventBus.putEvent({ - specversion: "1.0", - id: "123", - source: command.source, - type: "account.added", - data: { - correlationId: command.metadata.correlationId, - payload: { account: account }, - }, -}); ``` # License diff --git a/packages/core/deno.json b/packages/core/deno.json index eaaca74..b7c2116 100644 --- a/packages/core/deno.json +++ b/packages/core/deno.json @@ -13,7 +13,9 @@ "homepage": "https://nimbus.overlap.at", "exports": "./src/index.ts", "fmt": { - "include": ["src/"], + "include": [ + "src/" + ], "useTabs": false, "lineWidth": 80, "indentWidth": 4, @@ -22,13 +24,20 @@ "proseWrap": "always" }, "lint": { - "include": ["src/"], + "include": [ + "src/" + ], "rules": { - "exclude": ["no-explicit-any", "no-slow-types"] + "exclude": [ + "no-explicit-any", + "no-slow-types" + ] } }, "test": { - "include": ["src/"] + "include": [ + "src/" + ] }, "imports": { "@std/assert": "jsr:@std/assert@^1.0.10", @@ -37,4 +46,4 @@ "ajv": "npm:ajv@^8.17.1", "ajv-formats": "npm:ajv-formats@^3.0.1" } -} +} \ No newline at end of file diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 989ed74..00b2a84 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,3 +1,4 @@ +export * from './lib/auth/authContext.ts'; export * from './lib/event/eventBus.ts'; export * from './lib/exception/exception.ts'; export * from './lib/exception/forbiddenException.ts'; diff --git a/packages/core/src/lib/auth/authContext.ts b/packages/core/src/lib/auth/authContext.ts new file mode 100644 index 0000000..97025c2 --- /dev/null +++ b/packages/core/src/lib/auth/authContext.ts @@ -0,0 +1,7 @@ +/** + * Authentication context that can be passed through the application. + */ +export type AuthContext = { + sub: string; + groups: string[]; +}; diff --git a/packages/core/src/lib/event/eventBus.ts b/packages/core/src/lib/event/eventBus.ts index c98661b..346c0b9 100644 --- a/packages/core/src/lib/event/eventBus.ts +++ b/packages/core/src/lib/event/eventBus.ts @@ -2,15 +2,15 @@ import EventEmitter from 'node:events'; import { GenericException } from '../exception/genericException.ts'; import { getLogger } from '../log/logger.ts'; import type { Event } from '../message/event.ts'; -import type { RouteHandler, Router } from '../router/router.ts'; -import { createRouter } from '../router/router.ts'; +import type { MessageHandler } from '../router/router.ts'; +import { MessageRouter } from '../router/router.ts'; /** * The input type for subscribing to an event. */ export type SubscribeEventInput = { type: string; - handler: RouteHandler; + handler: MessageHandler; allowUnsafeInput?: boolean; onError?: (error: any, event: Event) => void; options?: NimbusEventBusOptions; @@ -47,6 +47,7 @@ export type NimbusEventBusOptions = { * time: '2025-01-01T00:00:00Z', * source: 'https://nimbus.overlap.at', * type: 'at.overlap.nimbus.account-added', + * subject: '/accounts/123', * data: { * accountId: '123', * }, @@ -96,6 +97,7 @@ export class NimbusEventBus { * time: '2025-01-01T00:00:00Z', * source: 'https://nimbus.overlap.at', * type: 'at.overlap.nimbus.account-added', + * subject: '/accounts/123', * data: { * accountId: '123', * }, @@ -114,7 +116,7 @@ export class NimbusEventBus { * Subscribe to an event. * * @param {string} eventType - The type of event to subscribe to. - * @param {RouteHandler} handler - The handler to call when the event got published. + * @param {MessageHandler} handler - The handler to call when the event got published. * @param {Function} [onError] - The function to call when the event could not be handled after the maximum number of retries. * @param {NimbusEventBusOptions} [options] - The options for the event bus. * @param {number} [options.maxRetries] - The maximum number of retries for handling the event in case of an error. @@ -144,15 +146,12 @@ export class NimbusEventBus { const maxRetries = options?.maxRetries ?? this._maxRetries; const retryDelay = options?.retryDelay ?? this._retryDelay; - const nimbusRouter = createRouter({ - type: 'event', - handlerMap: { - [type]: { - handler, - ...(allowUnsafeInput && { allowUnsafeInput }), - }, - }, - inputLogFunc: this._logInput, + const nimbusRouter = new MessageRouter('event', { + logInput: this._logInput, + }); + + nimbusRouter.register(type, handler, { + allowUnsafeInput: allowUnsafeInput ?? false, }); const handleEvent = async (event: Event) => { @@ -191,7 +190,7 @@ export class NimbusEventBus { } private async _processEvent( - nimbusRouter: Router, + nimbusRouter: MessageRouter, event: Event, maxRetries: number, retryDelay: number, @@ -200,7 +199,7 @@ export class NimbusEventBus { while (attempt < maxRetries) { try { - await nimbusRouter(event); + await nimbusRouter.route(event); break; } catch (error: any) { attempt++; diff --git a/packages/core/src/lib/exception/invalidInputException.test.ts b/packages/core/src/lib/exception/invalidInputException.test.ts index bf8aa57..fba61b4 100644 --- a/packages/core/src/lib/exception/invalidInputException.test.ts +++ b/packages/core/src/lib/exception/invalidInputException.test.ts @@ -1,5 +1,4 @@ import { assertEquals, assertInstanceOf } from '@std/assert'; -import { z } from 'zod'; import { InvalidInputException } from './invalidInputException.ts'; Deno.test('InvalidInputException without constructor input', () => { @@ -66,38 +65,3 @@ Deno.test('InvalidInputException from error with constructor input', () => { assertEquals(exception.details, details); assertEquals(exception.stack, nativeError.stack); }); - -Deno.test('InvalidInputException from ZodError', () => { - const expectedPayload = z.object({ - foo: z.string(), - }); - const payload = { - foo: 123, - }; - - try { - expectedPayload.parse(payload); - } catch (error: any) { - const exception = new InvalidInputException() - .fromZodError(error); - - assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.name, 'INVALID_INPUT_EXCEPTION'); - assertEquals(exception.message, 'The provided input is invalid'); - assertEquals(exception.statusCode, 400); - assertEquals(exception.details, { - 'issues': [ - { - 'code': 'invalid_type', - 'expected': 'string', - 'received': 'number', - 'path': [ - 'foo', - ], - 'message': 'Expected string, received number', - }, - ], - }); - assertEquals(exception.stack, error.stack); - } -}); diff --git a/packages/core/src/lib/exception/invalidInputException.ts b/packages/core/src/lib/exception/invalidInputException.ts index 0cfe273..cffd3a2 100644 --- a/packages/core/src/lib/exception/invalidInputException.ts +++ b/packages/core/src/lib/exception/invalidInputException.ts @@ -1,4 +1,3 @@ -import type { ZodError } from 'zod'; import { Exception } from './exception.ts'; /** @@ -21,12 +20,19 @@ export class InvalidInputException extends Exception { * @param {ZodError} error - The Zod error. * * @returns {InvalidInputException} The InvalidInputException. + * + * @deprecated We do not want to depend on Zod anymore. */ - public fromZodError(error: ZodError): InvalidInputException { - this.stack = error.stack; - this.details = { - issues: error.issues, - }; + public fromZodError(error: any): InvalidInputException { + if (error.stack) { + this.stack = error.stack; + } + + if (error.issues) { + this.details = { + issues: error.issues, + }; + } return this; } diff --git a/packages/core/src/lib/router/asyncApiRouter.ts b/packages/core/src/lib/router/asyncApiRouter.ts deleted file mode 100644 index 2e0e873..0000000 --- a/packages/core/src/lib/router/asyncApiRouter.ts +++ /dev/null @@ -1,54 +0,0 @@ -// import { Parser } from '@asyncapi/parser'; -// import { GenericException } from '../exception/genericException.ts'; -// import { getLogger } from '../log/logger.ts'; - -// export type AsyncApiRouterInput = { -// apiSpec: string; -// }; - -// export const asyncApiRouter = async ({ -// apiSpec, -// }: AsyncApiRouterInput) => { -// const parser = new Parser(); -// const { document, diagnostics } = await parser.parse(apiSpec); - -// if (!document) { -// getLogger().error({ -// message: 'AsyncAPI document parsed with errors', -// data: { -// diagnostics, -// }, -// }); - -// throw new GenericException('AsyncAPI document parsed with errors'); -// } - -// const asyncApiMessages = document.messages(); - -// const registeredMessages: Record = {}; -// for (const message of asyncApiMessages) { -// const msgJson = message.json(); - -// if (msgJson.payload) { -// // TODO: Validate the payload to be a JSONSchema with the mandatory fields. -// const type = msgJson.payload.properties.type.const; -// registeredMessages[type] = msgJson.payload; -// } -// } - -// getLogger().info({ -// message: `Router registered ${ -// Object.keys(registeredMessages).length -// } messages`, -// data: { -// registeredMessages: Object.keys(registeredMessages), -// }, -// }); - -// return (input: any) => { -// return Promise.resolve({ -// statusCode: 200, -// body: 'Hello, world!', -// }); -// }; -// }; diff --git a/packages/core/src/lib/router/messageHandler.ts b/packages/core/src/lib/router/messageHandler.ts deleted file mode 100644 index 80b5b0b..0000000 --- a/packages/core/src/lib/router/messageHandler.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { InvalidInputException } from '../exception/invalidInputException.ts'; -import { getLogger } from '../log/logger.ts'; -import type { Message } from '../message/message.ts'; -import { getValidator } from '../validator/validator.ts'; - -export const createMessageHandler = ( - { - handlerFunction, - allowUnsafeInput, - }: { - handlerFunction: (message: TInput) => Promise; - allowUnsafeInput?: boolean; - }, -): (message: TInput) => Promise => { - const validator = getValidator(); - - return async (message: TInput) => { - let validMessage: TInput; - - if (message.dataschema) { - const { data, error } = validator.validate( - message.dataschema, - message, - ); - - if (error) { - throw error; - } else { - validMessage = data; - } - } else { - if (allowUnsafeInput) { - getLogger().warn({ - category: 'Nimbus', - message: 'No dataschema found for message', - }); - } else { - throw new InvalidInputException( - 'No dataschema provided for message', - { - errorCode: 'MISSING_DATASCHEMA', - reason: `The dataschema is missing on the message - and "allowUnsafeInput" is not enabled to the message type. - It is recommended to always provide a dataschema - for input validation. Otherwise set "allowUnsafeInput" - to true for the route handler.`, - }, - ); - } - validMessage = message as TInput; - } - - return handlerFunction(validMessage); - }; -}; diff --git a/packages/core/src/lib/router/messageRouter.test.ts b/packages/core/src/lib/router/messageRouter.test.ts new file mode 100644 index 0000000..29e84d6 --- /dev/null +++ b/packages/core/src/lib/router/messageRouter.test.ts @@ -0,0 +1,256 @@ +import { assertEquals, assertRejects } from '@std/assert'; +import { GenericException } from '../exception/genericException.ts'; +import { InvalidInputException } from '../exception/invalidInputException.ts'; +import { NotFoundException } from '../exception/notFoundException.ts'; +import type { Command } from '../message/command.ts'; +import { getValidator } from '../validator/validator.ts'; +import { MessageRouter } from './router.ts'; +import { + testCommand, + testCommandHandler, + testEvent, + testEventHandler, + testEventSchema, + testEventWithException, + testEventWithInvalidData, + testQuery, + testQueryHandler, +} from './testFixtures.ts'; + +Deno.test('MessageRouter - registers command handler', () => { + const router = new MessageRouter('command'); + router.register( + 'at.overlap.nimbus.test-command', + testCommandHandler, + ); + + // No error means success - registration is synchronous + assertEquals(true, true); +}); + +Deno.test('MessageRouter - routes valid command to handler', async () => { + const router = new MessageRouter('command'); + router.register( + 'at.overlap.nimbus.test-command', + testCommandHandler, + { allowUnsafeInput: true }, + ); + + const result = await router.route(testCommand); + + assertEquals(result, { + aNumber: 42, + }); +}); + +Deno.test('MessageRouter - routes valid query to handler', async () => { + const router = new MessageRouter('query'); + router.register( + 'at.overlap.nimbus.test-query', + testQueryHandler, + { allowUnsafeInput: true }, + ); + + const result = await router.route(testQuery); + + assertEquals(result, { + foo: 'bar', + }); +}); + +Deno.test('MessageRouter - routes valid event to handler with schema validation', async () => { + const validator = getValidator(); + validator.addSchema(testEventSchema); + + const router = new MessageRouter('event'); + router.register( + 'at.overlap.nimbus.test-event', + testEventHandler, + ); + + const result = await router.route(testEvent); + + assertEquals(result, { + testException: false, + aNumber: 42, + }); +}); + +Deno.test('MessageRouter - throws NotFoundException when handler not registered', async () => { + const router = new MessageRouter('command'); + + const unknownCommand: Command = { + specversion: '1.0', + id: '123', + correlationid: '456', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.unknown-command', + data: {}, + }; + + await assertRejects( + async () => { + await router.route(unknownCommand); + }, + NotFoundException, + 'Message handler not found', + ); +}); + +Deno.test('MessageRouter - throws InvalidInputException when command envelope is invalid', async () => { + const router = new MessageRouter('command'); + + const invalidCommand = { + specversion: '1.0', + id: '123', + // Missing correlationid + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.test-command', + data: {}, + }; + + await assertRejects( + async () => { + await router.route(invalidCommand); + }, + InvalidInputException, + 'The provided input is invalid', + ); +}); + +Deno.test('MessageRouter - throws InvalidInputException when dataschema is missing and allowUnsafeInput is false', async () => { + const router = new MessageRouter('event'); + router.register( + 'at.overlap.nimbus.test-event', + testEventHandler, + // allowUnsafeInput defaults to false + ); + + const eventWithoutSchema = { + ...testEvent, + dataschema: undefined, + }; + + await assertRejects( + async () => { + await router.route(eventWithoutSchema); + }, + InvalidInputException, + 'No dataschema provided for message', + ); +}); + +Deno.test('MessageRouter - allows missing dataschema when allowUnsafeInput is true', async () => { + const router = new MessageRouter('command'); + router.register( + 'at.overlap.nimbus.test-command', + testCommandHandler, + { allowUnsafeInput: true }, + ); + + const commandWithoutSchema = { + ...testCommand, + dataschema: undefined, + }; + + const result = await router.route(commandWithoutSchema); + + assertEquals(result, { + aNumber: 42, + }); +}); + +Deno.test('MessageRouter - validates data against dataschema when provided', async () => { + const router = new MessageRouter('event'); + router.register( + 'at.overlap.nimbus.test-event', + testEventHandler, + ); + + await assertRejects( + async () => { + await router.route(testEventWithInvalidData); + }, + InvalidInputException, + 'The provided input is invalid', + ); +}); + +Deno.test('MessageRouter - handler exceptions propagate correctly', async () => { + const router = new MessageRouter('event'); + router.register( + 'at.overlap.nimbus.test-event', + testEventHandler, + ); + + await assertRejects( + async () => { + await router.route(testEventWithException); + }, + GenericException, + 'Test exception thrown', + ); +}); + +Deno.test('MessageRouter - calls logInput hook when provided', async () => { + let loggedInput: any = null; + + const router = new MessageRouter('command', { + logInput: (input) => { + loggedInput = input; + }, + }); + + router.register( + 'at.overlap.nimbus.test-command', + testCommandHandler, + { allowUnsafeInput: true }, + ); + + await router.route(testCommand); + + assertEquals(loggedInput, testCommand); +}); + +Deno.test('MessageRouter - calls logOutput hook when provided', async () => { + let loggedOutput: any = null; + + const router = new MessageRouter('command', { + logOutput: (output) => { + loggedOutput = output; + }, + }); + + router.register( + 'at.overlap.nimbus.test-command', + testCommandHandler, + { allowUnsafeInput: true }, + ); + + await router.route(testCommand); + + assertEquals(loggedOutput, { + aNumber: 42, + }); +}); + +Deno.test('MessageRouter - throws GenericException for invalid router type', async () => { + // This test verifies internal type checking + // We can't actually create an invalid router type through the constructor + // due to TypeScript, but the runtime check exists for JavaScript users + + // Create a router with a valid type + const router = new MessageRouter('command'); + router.register( + 'at.overlap.nimbus.test-command', + testCommandHandler, + { allowUnsafeInput: true }, + ); + + // The type check happens internally during route() + // This test just confirms routing works normally + const result = await router.route(testCommand); + assertEquals(result, { aNumber: 42 }); +}); diff --git a/packages/core/src/lib/router/router.test.ts b/packages/core/src/lib/router/router.test.ts deleted file mode 100644 index 35fe89b..0000000 --- a/packages/core/src/lib/router/router.test.ts +++ /dev/null @@ -1,287 +0,0 @@ -import type { Event, Query } from '@nimbus/core'; -import { assertEquals, assertInstanceOf } from '@std/assert'; -import { GenericException } from '../exception/genericException.ts'; -import { InvalidInputException } from '../exception/invalidInputException.ts'; -import { NotFoundException } from '../exception/notFoundException.ts'; -import type { Command } from '../message/command.ts'; -import { getValidator } from '../validator/validator.ts'; -import { createRouter } from './router.ts'; -import { commandHandlerMap, testCommand } from './testCommand.ts'; -import { - eventHandlerMap, - testEvent, - testEventSchema, - testEventWithException, - testEventWithInvalidData, -} from './testEvent.ts'; -import { queryHandlerMap, testQuery } from './testQuery.ts'; - -Deno.test('Router handles input with an unknown handler name', async () => { - const router = createRouter({ - type: 'command', - handlerMap: {}, - }); - - const messageWithUnknownType: Command = { - specversion: '1.0', - id: '123', - correlationid: '456', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.unknown-type', - data: {}, - }; - - try { - const result = await router(messageWithUnknownType); - assertEquals(typeof result === 'undefined', true); - } catch (exception: any) { - assertInstanceOf(exception, NotFoundException); - assertEquals(exception.message, 'Route handler not found'); - } -}); - -Deno.test('Router validates command input', async () => { - const router = createRouter({ - type: 'command', - handlerMap: {}, - }); - - const messageWithUnknownType = { - specversion: '1.0', - id: '123', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.unknown-type', - data: {}, - }; - - try { - const result = await router(messageWithUnknownType); - assertEquals(typeof result === 'undefined', true); - } catch (exception: any) { - assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.message, 'The provided input is invalid'); - assertEquals(exception.details, { - issues: [ - { - instancePath: '', - schemaPath: '#/required', - keyword: 'required', - params: { missingProperty: 'correlationid' }, - message: "must have required property 'correlationid'", - }, - ], - }); - } -}); - -Deno.test('Router validates query input', async () => { - const router = createRouter({ - type: 'query', - handlerMap: {}, - }); - - const messageWithUnknownType = { - id: '123', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.unknown-type', - data: {}, - }; - - try { - const result = await router(messageWithUnknownType); - assertEquals(typeof result === 'undefined', true); - } catch (exception: any) { - assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.message, 'The provided input is invalid'); - assertEquals(exception.details, { - issues: [ - { - instancePath: '', - schemaPath: '#/required', - keyword: 'required', - params: { missingProperty: 'specversion' }, - message: "must have required property 'specversion'", - }, - ], - }); - } -}); - -Deno.test('Router validates event input', async () => { - const router = createRouter({ - type: 'event', - handlerMap: {}, - }); - - const messageWithUnknownType = { - id: '123', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.unknown-type', - data: {}, - }; - - try { - const result = await router(messageWithUnknownType); - assertEquals(typeof result === 'undefined', true); - } catch (exception: any) { - assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.message, 'The provided input is invalid'); - assertEquals(exception.details, { - issues: [ - { - instancePath: '', - schemaPath: '#/required', - keyword: 'required', - params: { missingProperty: 'specversion' }, - message: "must have required property 'specversion'", - }, - ], - }); - } -}); - -Deno.test('Router handles valid command input', async () => { - const commandRouter = createRouter>({ - type: 'command', - handlerMap: commandHandlerMap, - }); - - try { - const result = await commandRouter(testCommand); - - assertEquals(result, { - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: { - aNumber: 42, - }, - }); - } catch (exception: any) { - console.log(exception); - assertEquals(typeof exception === 'undefined', true); - } -}); - -Deno.test('Router handles valid query input', async () => { - const queryRouter = createRouter>({ - type: 'query', - handlerMap: queryHandlerMap, - }); - - try { - const result = await queryRouter(testQuery); - assertEquals(result, { - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: { - foo: 'bar', - }, - }); - } catch (exception: any) { - console.log(exception); - assertEquals(typeof exception === 'undefined', true); - } -}); - -Deno.test('Router handles message with no dataschema correctly', async () => { - const validator = getValidator(); - validator.addSchema(testEventSchema); - - const eventRouter = createRouter>({ - type: 'event', - handlerMap: eventHandlerMap, - }); - - try { - const result = await eventRouter({ - ...testEvent, - dataschema: undefined, - }); - assertEquals(typeof result === 'undefined', true); - } catch (exception: any) { - console.log(exception); - assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.message, 'No dataschema provided for message'); - } -}); - -Deno.test('Router handles valid event input', async () => { - const validator = getValidator(); - validator.addSchema(testEventSchema); - - const eventRouter = createRouter>({ - type: 'event', - handlerMap: eventHandlerMap, - }); - - try { - const result = await eventRouter(testEvent); - assertEquals(result, { - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: { - testException: false, - aNumber: 42, - }, - }); - } catch (exception: any) { - assertEquals(typeof exception === 'undefined', true); - } -}); - -Deno.test('Router handles invalid event input', async () => { - const validator = getValidator(); - validator.addSchema(testEventSchema); - - const eventRouter = createRouter>({ - type: 'event', - handlerMap: eventHandlerMap, - }); - - try { - const result = await eventRouter(testEventWithInvalidData); - assertEquals(typeof result === 'undefined', true); - } catch (exception: any) { - assertInstanceOf(exception, InvalidInputException); - assertEquals( - exception.message, - 'The provided input is invalid', - ); - - assertEquals(exception.details, { - issues: [ - { - instancePath: '/data/aNumber', - schemaPath: '#/properties/data/properties/aNumber/type', - keyword: 'type', - params: { type: 'number' }, - message: 'must be number', - }, - ], - }); - } -}); - -Deno.test('Router handles valid event input but handler returns an exception', async () => { - const eventRouter = createRouter>({ - type: 'event', - handlerMap: eventHandlerMap, - }); - - try { - const result = await eventRouter(testEventWithException); - assertEquals(typeof result === 'undefined', true); - } catch (exception: any) { - assertInstanceOf(exception, GenericException); - } -}); diff --git a/packages/core/src/lib/router/router.ts b/packages/core/src/lib/router/router.ts index ed9e444..7375735 100644 --- a/packages/core/src/lib/router/router.ts +++ b/packages/core/src/lib/router/router.ts @@ -7,152 +7,132 @@ import type { Message } from '../message/message.ts'; import { type Query, querySchema } from '../message/query.ts'; import { getValidator } from '../validator/validator.ts'; -// TODO: we want to rework the router concept. Do We ??? -// Nimbus should be framework agnostic and other Frameworks like Oak, Fastify, Elysia should used for routing. -// So we want to provide a higher order handler function that can take some options and a logic function. -// This higher order handler function will do the message validation, logging and tracing etc. and will execute the logic function. -// This way the full handler can be used in any other frameworks routing or event/message bus system. - /** - * The result type of a route handler. + * The message handler type - transport-agnostic, just returns domain data. * - * @template TData - The type of the data returned by the route handler. + * @template TInput - The type of the input message. + * @template TOutput - The type of the data returned by the handler. */ -export type RouteHandlerResult = { - statusCode: number; - headers?: Record; - data?: TData; -}; - -/** - * The route handler type. - * - * @template TInput - The type of the input to the route handler. - * @template TOutputData - The type of the data returned by the route handler. - */ -export type RouteHandler< +export type MessageHandler< TInput extends Message = Message, - TOutputData = unknown, + TOutput = unknown, > = ( input: TInput, -) => Promise>; +) => Promise; /** - * The RouteHandlerMap type. - * - * @template TInput - The type of the input to the route handler. - * @template TOutputData - The type of the data returned by the route handler. + * Options for registering a message handler. */ -export type RouteHandlerMap = Record< - string, - { - handler: RouteHandler; - allowUnsafeInput?: boolean; - } ->; +export type RegisterHandlerOptions = { + allowUnsafeInput?: boolean; +}; /** - * The router type. - * - * @template TOutputData - The type of the data returned by the router. + * Options for creating a MessageRouter. */ -export type Router = ( - input: unknown, -) => Promise>; +export type MessageRouterOptions = { + logInput?: (input: unknown) => void; + logOutput?: (output: unknown) => void; +}; /** - * The input type for creating a Nimbus router. - * - * @template TInput - The type of the input to the router. - * @template TResultData - The type of the data returned by the router. + * Internal handler registration. */ -export type CreateRouterInput = { - type: 'command' | 'query' | 'event'; - handlerMap: RouteHandlerMap; - inputLogFunc?: (input: unknown) => void; +type HandlerRegistration = { + handler: MessageHandler; + allowUnsafeInput: boolean; }; /** - * Creates a Nimbus router. - * - * @param {CreateRouterInput} input - * @param {'command' | 'query' | 'event'} type - The type of input messages the router handles - * @param {RouteHandlerMap} input.handlerMap - The map of route handlers. - * @param {Function} input.inputLogFunc - Function to log input received by the router (optional). - * - * @returns {Router} The Nimbus router. + * The MessageRouter routes messages to their handlers. * * @example * ```ts - * import { createRouter } from "@nimbus/core"; + * import { MessageRouter } from "@nimbus/core"; * - * const commandRouter = createRouter({ - * type: 'command', - * handlerMap: { - * 'at.overlap.nimbus.get-account': { - * handler: getAccountHandler - * }, - * }, - * }); + * const commandRouter = new MessageRouter('command'); * - * const queryRouter = createRouter({ - * type: 'query', - * handlerMap: { - * 'at.overlap.nimbus.get-account': { - * handler: getAccountHandler, - * allowUnsafeInput: true, // Disables input validation (not recommended) - * }, - * }, - * }); + * commandRouter.register( + * 'at.overlap.nimbus.add-recipe', + * addRecipeHandler, + * ); * - * const eventRouter = createRouter({ - * type: 'event', - * handlerMap: { - * 'at.overlap.nimbus.account-added': { - * handler: accountAddedHandler, - * }, - * }, - * inputLogFunc: (input) => { - * getLogger().info({ - * category: 'Events', - * message: `Received event: ${input.type}`, - * }); - * }, - * }); + * const result = await commandRouter.route(someCommand); * ``` */ -export const createRouter = ({ - type, - handlerMap, - inputLogFunc, -}: CreateRouterInput): Router => { - const validator = getValidator(); +export class MessageRouter { + private readonly _type: 'command' | 'query' | 'event'; + private readonly _handlers: Map; + private readonly _logInput?: (input: unknown) => void; + private readonly _logOutput?: (output: unknown) => void; + + constructor( + type: 'command' | 'query' | 'event', + options?: MessageRouterOptions, + ) { + this._type = type; + this._handlers = new Map(); + this._logInput = options?.logInput; + this._logOutput = options?.logOutput; + } /** - * The Nimbus router takes unknown input, - * validates the input and routes it to the appropriate handler. + * Register a message handler for a specific message type. * - * @param {unknown} input - The input to the router. + * @param {string} messageType - The message type (e.g., 'at.overlap.nimbus.add-recipe') + * @param {MessageHandler} handler - The handler function + * @param {RegisterHandlerOptions} options - Optional configuration + * + * @example + * ```ts + * router.register( + * 'at.overlap.nimbus.add-recipe', + * addRecipeHandler, + * { allowUnsafeInput: true } + * ); + * ``` + */ + public register( + messageType: string, + handler: MessageHandler, + options?: RegisterHandlerOptions, + ): void { + this._handlers.set(messageType, { + handler, + allowUnsafeInput: options?.allowUnsafeInput ?? false, + }); + + getLogger().debug({ + category: 'Nimbus', + message: `Registered ${this._type} handler for: ${messageType}`, + }); + } + + /** + * Route a message to its handler. * - * @returns {Promise} The result of the route handler. + * @param {unknown} input - The raw input to route * - * @throws {NotFoundException} - If the route handler is not found. - * @throws {InvalidInputException} - If the input is invalid. - * @throws {GenericException} - If an error occurs while handling the input. + * @returns {Promise} The result from the handler + * + * @throws {NotFoundException} - If no handler is registered for the message type + * @throws {InvalidInputException} - If the message is invalid + * @throws {GenericException} - If an error occurs during routing */ - const router: Router = ( - input: unknown, - ): Promise => { - if (inputLogFunc) { - inputLogFunc(input); + public async route(input: unknown): Promise { + if (this._logInput) { + this._logInput(input); } + const validator = getValidator(); + + // Validate message envelope let parseResult; - if (type === 'command') { + if (this._type === 'command') { parseResult = validator.validate(commandSchema.$id, input); - } else if (type === 'query') { + } else if (this._type === 'query') { parseResult = validator.validate(querySchema.$id, input); - } else if (type === 'event') { + } else if (this._type === 'event') { parseResult = validator.validate(eventSchema.$id, input); } else { throw new GenericException( @@ -170,9 +150,11 @@ export const createRouter = ({ const message = parseResult.data; - if (!handlerMap[message.type]) { + // Find handler + const registration = this._handlers.get(message.type); + if (!registration) { throw new NotFoundException( - 'Route handler not found', + 'Message handler not found', { reason: `Could not find a handler for message type: "${message.type}"`, @@ -180,11 +162,12 @@ export const createRouter = ({ ); } - const { handler, allowUnsafeInput } = handlerMap[message.type]; + const { handler, allowUnsafeInput } = registration; - let validMessage: TInput; + // Validate message data if dataschema is provided + let validMessage: Message; if (message.dataschema) { - const { data, error } = validator.validate( + const { data, error } = validator.validate( message.dataschema, message, ); @@ -206,18 +189,22 @@ export const createRouter = ({ { errorCode: 'MISSING_DATASCHEMA', reason: `The dataschema is missing on the message - and "allowUnsafeInput" is not enabled to the message type. + and "allowUnsafeInput" is not enabled for the message type. It is recommended to always provide a dataschema for input validation. Otherwise set "allowUnsafeInput" - to true for the route handler.`, + to true when registering the handler.`, }, ); } - validMessage = message as TInput; + validMessage = message; } - return handler(validMessage); - }; + const result = await handler(validMessage); - return router; -}; + if (this._logOutput) { + this._logOutput(result); + } + + return result; + } +} diff --git a/packages/core/src/lib/router/testCommand.ts b/packages/core/src/lib/router/testCommand.ts deleted file mode 100644 index b513b58..0000000 --- a/packages/core/src/lib/router/testCommand.ts +++ /dev/null @@ -1,51 +0,0 @@ -import type { Command } from '../message/command.ts'; -import type { RouteHandler, RouteHandlerMap } from './router.ts'; - -/** - * The type for the testCommand data - */ -export type TestCommandData = { - aNumber: number; -}; - -/** - * A test command - */ -export const testCommand: Command = { - specversion: '1.0', - id: '123', - correlationid: '456', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.test-command', - data: { - aNumber: 42, - }, - datacontenttype: 'application/json', -}; - -/** - * The handler for the TestCommand. - */ -export const testCommandHandler: RouteHandler< - Command, - TestCommandData -> = (event) => { - return Promise.resolve({ - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: event.data, - }); -}; - -/** - * The handler map for the TestCommand. - */ -export const commandHandlerMap: RouteHandlerMap> = { - 'at.overlap.nimbus.test-command': { - handler: testCommandHandler, - allowUnsafeInput: true, - }, -}; diff --git a/packages/core/src/lib/router/testEvent.ts b/packages/core/src/lib/router/testFixtures.ts similarity index 52% rename from packages/core/src/lib/router/testEvent.ts rename to packages/core/src/lib/router/testFixtures.ts index b980043..d1e934c 100644 --- a/packages/core/src/lib/router/testEvent.ts +++ b/packages/core/src/lib/router/testFixtures.ts @@ -1,9 +1,77 @@ import type { SchemaObject } from 'ajv'; import { GenericException } from '../exception/genericException.ts'; +import type { Command } from '../message/command.ts'; import { type Event, eventSchema } from '../message/event.ts'; -import type { RouteHandler, RouteHandlerMap } from './router.ts'; +import type { Query } from '../message/query.ts'; +import type { MessageHandler } from './router.ts'; -type TestEventData = { +// ============================================================================= +// Command Fixtures +// ============================================================================= + +export type TestCommandData = { + aNumber: number; +}; + +export const testCommand: Command = { + specversion: '1.0', + id: '123', + correlationid: '456', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.test-command', + data: { + aNumber: 42, + }, + datacontenttype: 'application/json', +}; + +export const testCommandHandler: MessageHandler< + Command, + TestCommandData +> = async (command) => { + return command.data; +}; + +// ============================================================================= +// Query Fixtures +// ============================================================================= + +export type TestQueryData = { + filter: string; +}; + +export type TestQueryResult = { + foo: string; +}; + +export const testQuery: Query = { + specversion: '1.0', + id: '123', + correlationid: '456', + time: '2025-01-01T00:00:00Z', + source: 'https://nimbus.overlap.at', + type: 'at.overlap.nimbus.test-query', + data: { + filter: '42', + }, + datacontenttype: 'application/json', +}; + +export const testQueryHandler: MessageHandler< + Query, + TestQueryResult +> = async () => { + return { + foo: 'bar', + }; +}; + +// ============================================================================= +// Event Fixtures +// ============================================================================= + +export type TestEventData = { testException: boolean; aNumber: number; }; @@ -27,9 +95,6 @@ export const testEventSchema: SchemaObject = { }, }; -/** - * A test event without an exception - */ export const testEvent: Event = { specversion: '1.0', id: '123', @@ -46,9 +111,6 @@ export const testEvent: Event = { dataschema: 'https://api.nimbus.overlap.at/schemas/event/test/v1', }; -/** - * A test event without an exception - */ export const testEventWithException: Event = { specversion: '1.0', id: '123', @@ -65,9 +127,6 @@ export const testEventWithException: Event = { dataschema: 'https://api.nimbus.overlap.at/schemas/event/test/v1', }; -/** - * A test event without an exception - */ export const testEventWithInvalidData: Event = { specversion: '1.0', id: '123', @@ -84,30 +143,13 @@ export const testEventWithInvalidData: Event = { dataschema: 'https://api.nimbus.overlap.at/schemas/event/test/v1', }; -/** - * The handler for the TestEvent. - */ -export const testEventHandler: RouteHandler> = ( - event, -) => { +export const testEventHandler: MessageHandler< + Event, + TestEventData +> = async (event) => { if (event.data?.testException) { - throw new GenericException(); + throw new GenericException('Test exception thrown'); } - return Promise.resolve({ - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: event.data, - }); -}; - -/** - * The handler map for the TestEvent. - */ -export const eventHandlerMap: RouteHandlerMap> = { - 'at.overlap.nimbus.test-event': { - handler: testEventHandler, - }, + return event.data; }; diff --git a/packages/core/src/lib/router/testQuery.ts b/packages/core/src/lib/router/testQuery.ts deleted file mode 100644 index 104519c..0000000 --- a/packages/core/src/lib/router/testQuery.ts +++ /dev/null @@ -1,49 +0,0 @@ -import type { Query } from '../message/query.ts'; -import type { RouteHandler, RouteHandlerMap } from './router.ts'; - -/** - * The type for the testQuery data - */ -export type TestQueryData = { - filter: string; -}; - -/** - * A test query - */ -export const testQuery: Query = { - specversion: '1.0', - id: '123', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.test-query', - data: { - filter: '42', - }, - datacontenttype: 'application/json', -}; - -/** - * The handler for the TestQuery. - */ -export const testQueryHandler: RouteHandler> = () => { - return Promise.resolve({ - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - data: { - foo: 'bar', - }, - }); -}; - -/** - * The handler map for the TestQuery. - */ -export const queryHandlerMap: RouteHandlerMap> = { - 'at.overlap.nimbus.test-query': { - handler: testQueryHandler, - allowUnsafeInput: true, - }, -}; diff --git a/packages/core/src/lib/validator/validator.ts b/packages/core/src/lib/validator/validator.ts index 93bddfd..1dec137 100644 --- a/packages/core/src/lib/validator/validator.ts +++ b/packages/core/src/lib/validator/validator.ts @@ -10,8 +10,6 @@ import { querySchema } from '../message/query.ts'; import _addFormats from 'ajv-formats'; const addFormats = _addFormats as unknown as typeof _addFormats.default; -// TODO: check out https://github.com/standard-schema/standard-schema?tab=readme-ov-file to have it more agnostic like the elysia lib https://elysiajs.com/essential/validation.html - export type ValidatorOptions = { ajv?: Ajv; }; diff --git a/packages/oak/deno.json b/packages/oak/deno.json index 559da6b..712d36d 100644 --- a/packages/oak/deno.json +++ b/packages/oak/deno.json @@ -32,7 +32,7 @@ }, "imports": { "@std/ulid": "jsr:@std/ulid@^1.0.0", - "@oak/oak": "jsr:@oak/oak@^17.1.4", + "@oak/oak": "jsr:@oak/oak@^17.1.6", "ajv": "npm:ajv@^8.17.1", "zod": "npm:zod@^3.24.1" } diff --git a/packages/oak/src/index.ts b/packages/oak/src/index.ts index 946228d..02c4506 100644 --- a/packages/oak/src/index.ts +++ b/packages/oak/src/index.ts @@ -1,2 +1 @@ -export * from './lib/middleware/requestCorrelationId.ts'; export * from './lib/router.ts'; diff --git a/packages/oak/src/lib/router.ts b/packages/oak/src/lib/router.ts index 53fd72f..ba73931 100644 --- a/packages/oak/src/lib/router.ts +++ b/packages/oak/src/lib/router.ts @@ -1,18 +1,101 @@ -import { - type Command, - createRouter, - getLogger, - type Query, - type RouteHandler, - type RouteHandlerResult, -} from '@nimbus/core'; +import { getLogger, type MessageRouter } from '@nimbus/core'; import type { Context } from '@oak/oak/context'; import { Router as OakRouter, type RouterOptions } from '@oak/oak/router'; /** - * The NimbusOakRouter extends the Oak Router - * to directly route commands and queries coming - * in from HTTP requests to a Nimbus router. + * Options for handling errors in Oak routes. + */ +export type OakErrorHandlerOptions = { + onError?: (error: any, ctx: Context) => void; +}; + +/** + * Helper to create an Oak middleware that handles a Nimbus MessageRouter. + * This bridges the MessageRouter (transport-agnostic) to Oak's HTTP context. + * + * @param {MessageRouter} router - The Nimbus MessageRouter instance + * @param {OakErrorHandlerOptions} options - Optional options for error handling + * + * @returns {Function} The Oak middleware function + */ +export function createOakMessageHandler( + router: MessageRouter, + options?: OakErrorHandlerOptions, +): (ctx: Context) => Promise { + return async (ctx: Context) => { + try { + const requestBody = await ctx.request.body.json(); + const result = await router.route(requestBody); + + // Default HTTP response + ctx.response.status = 200; + ctx.response.body = result as any; + } catch (error: any) { + handleOakError(error, ctx, options?.onError); + } + }; +} + +/** + * Default error handler for Oak routes that maps Nimbus exceptions to HTTP responses. + * + * @param {any} error - The error to handle. + * @param {Context} ctx - The Oak context. + * @param {Function} customHandler - Optional custom error handler. + * + * @returns {void} + */ +export function handleOakError( + error: any, + ctx: Context, + customHandler?: (error: any, ctx: Context) => void, +): void { + if (customHandler) { + customHandler(error, ctx); + return; + } + + getLogger().error({ + category: 'Nimbus', + message: error.message, + error, + }); + + const statusCode = error.statusCode ?? 500; + ctx.response.status = statusCode; + + if (statusCode < 500) { + ctx.response.body = { + statusCode, + ...(error.details ? { code: error.name } : {}), + ...(error.message ? { message: error.message } : {}), + ...(error.details ? { details: error.details } : {}), + }; + } else { + ctx.response.body = { + message: 'Internal server error', + }; + } +} + +/** + * The NimbusOakRouter extends the Oak Router to provide + * convenient methods for registering Nimbus message handlers. + * + * @example + * ```ts + * import { NimbusOakRouter } from "@nimbus/oak"; + * + * const router = new NimbusOakRouter(); + * const commandRouter = new MessageRouter('command'); + * + * commandRouter.register('at.overlap.nimbus.add-recipe', addRecipeHandler); + * + * router.command({ + * path: '/command', + * router: commandRouter, + * }); + * ``` */ export class NimbusOakRouter extends OakRouter { constructor(opts: RouterOptions = {}) { @@ -20,182 +103,62 @@ export class NimbusOakRouter extends OakRouter { } /** - * Routes a POST request to a Nimbus command router. + * Routes POST requests to a Nimbus command router. * * @param {string} path - Oak request path - * @param {string} commandType - Type of the command - * @param {RouteHandler} handler - Nimbus Route Handler function - * @param {AnySchema} commandSchema - JSON Schema of the command + * @param {MessageRouter} router - The Nimbus MessageRouter instance * @param {Function} onError - Optional function to customize error handling + * + * @example + * ```ts + * const commandRouter = new MessageRouter('command'); + * commandRouter.register('at.overlap.nimbus.add-recipe', addRecipeHandler); + * + * oakRouter.command({ + * path: '/command', + * router: commandRouter, + * }); + * ``` */ - command({ + command({ path, - type, - handler, - allowUnsafeInput, + router, onError, }: { path: string; - type: string; - handler: RouteHandler; - allowUnsafeInput?: boolean; + router: MessageRouter; onError?: (error: any, ctx: Context) => void; }) { - const inputLogFunc = (input: any) => { - getLogger().info({ - category: 'Nimbus', - ...(input?.data?.correlationId && { - correlationId: input.data.correlationId, - }), - message: - `${input?.data?.correlationId} - [Command] ${input?.type} from ${input?.source}`, - }); - }; - - super.post(path, async (ctx: Context) => { - try { - const requestBody = await ctx.request.body.json(); - - const nimbusRouter = createRouter({ - type: 'command', - handlerMap: { - [type]: { - handler, - allowUnsafeInput: allowUnsafeInput ?? false, - }, - }, - inputLogFunc, - }); - - // TODO: How do we implement the authentication context? - // data: { - // ...(ctx.state.authContext && { - // authContext: ctx.state.authContext, - // }), - // }, - - const result = await nimbusRouter(requestBody); - - this._handleNimbusRouterSuccess(result, ctx); - } catch (error: any) { - this._handleNimbusRouterError(error, ctx, onError); - } - }); + super.post(path, createOakMessageHandler(router, { onError })); } /** - * Routes a GET request to a Nimbus query router. + * Routes POST requests to a Nimbus query router. * * @param {string} path - Oak request path - * @param {string} queryType - Type of the query - * @param {boolean} allowUnsafeInput - Allow unsafe input - * @param {RouteHandler} handler - Nimbus Route Handler function + * @param {MessageRouter} router - The Nimbus MessageRouter instance * @param {Function} onError - Optional function to customize error handling + * + * @example + * ```ts + * const queryRouter = new MessageRouter('query'); + * queryRouter.register('at.overlap.nimbus.get-recipe', getRecipeHandler); + * + * oakRouter.query({ + * path: '/query', + * router: queryRouter, + * }); + * ``` */ - query({ + query({ path, - type, - allowUnsafeInput, - handler, + router, onError, }: { path: string; - type: string; - handler: RouteHandler; - allowUnsafeInput?: boolean; + router: MessageRouter; onError?: (error: any, ctx: Context) => void; }) { - const inputLogFunc = (input: any) => { - getLogger().info({ - category: 'Nimbus', - ...(input?.data?.correlationId && { - correlationId: input.data.correlationId, - }), - message: - `${input?.data?.correlationId} - [Query] ${input?.type} from ${input?.source}`, - }); - }; - - super.post(path, async (ctx: Context) => { - try { - const requestBody = await ctx.request.body.json(); - - const nimbusRouter = createRouter({ - type: 'query', - handlerMap: { - [type]: { - handler, - allowUnsafeInput: allowUnsafeInput ?? false, - }, - }, - inputLogFunc, - }); - - // TODO: How do we implement the authentication context? - // data: { - // ...(ctx.state.authContext && { - // authContext: ctx.state.authContext, - // }), - // }, - - const result = await nimbusRouter(requestBody); - - this._handleNimbusRouterSuccess(result, ctx); - } catch (error: any) { - this._handleNimbusRouterError(error, ctx, onError); - } - }); - } - - private _handleNimbusRouterSuccess( - result: RouteHandlerResult, - ctx: Context, - ) { - ctx.response.status = result.statusCode; - - if (result.headers) { - for (const header of Object.keys(result.headers)) { - ctx.response.headers.set( - header, - result.headers[header], - ); - } - } - - if (result.data) { - ctx.response.body = result.data; - } - } - - private _handleNimbusRouterError( - error: any, - ctx: Context, - onError?: (error: any, ctx: Context) => void, - ) { - if (onError) { - onError(error, ctx); - } else { - getLogger().error({ - category: 'Nimbus', - message: error.message, - error, - }); - - const statusCode = error.statusCode ?? 500; - ctx.response.status = statusCode; - - if (statusCode < 500) { - ctx.response.body = { - statusCode, - ...(error.details ? { code: error.name } : {}), - ...(error.message ? { message: error.message } : {}), - ...(error.details ? { details: error.details } : {}), - }; - } else { - ctx.response.body = { - message: 'Internal server error', - }; - } - } + super.post(path, createOakMessageHandler(router, { onError })); } } From 1d3a76fa81e1521ddef2690fd57571f32bb32f74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Sat, 4 Oct 2025 13:28:06 +0200 Subject: [PATCH 08/37] rework OakRouter and HTTP implementation to better suit HTTP standards --- .claude/settings.local.json | 3 +- deno.lock | 1 + examples/gustav/deno.json | 1 + .../contexts/recipe/core/domain/ingredient.ts | 2 +- .../infrastructure/http}/commandRouter.ts | 5 +- .../http/handler/addRecipe.handler.ts | 17 + .../http/handler/getRecipe.handler.ts | 11 + .../http/handler/recipeAdded.handler.ts | 14 + .../infrastructure/http}/queryRouter.ts | 5 +- .../recipe/infrastructure/http/router.ts | 29 ++ .../http/schemas/addRecipeCommandSchema.ts | 66 ++++ .../http/schemas/getRecipeQuerySchema.ts | 34 ++ .../router/addRecipe.handler.ts | 14 - .../router/getRecipe.handler.ts | 11 - .../router/recipeAdded.handler.ts | 19 -- examples/gustav/src/main.ts | 6 +- examples/gustav/src/shared/http/router.ts | 45 +++ examples/gustav/src/shared/http/schemas.ts | 18 + .../gustav/src/shared/routing/httpRouter.ts | 90 ----- packages/oak/src/lib/router.ts | 316 +++++++++++++----- 20 files changed, 487 insertions(+), 220 deletions(-) rename examples/gustav/src/{shared/routing => contexts/recipe/infrastructure/http}/commandRouter.ts (70%) create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts rename examples/gustav/src/{shared/routing => contexts/recipe/infrastructure/http}/queryRouter.ts (70%) create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/router.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/router/addRecipe.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/router/getRecipe.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/router/recipeAdded.handler.ts create mode 100644 examples/gustav/src/shared/http/router.ts create mode 100644 examples/gustav/src/shared/http/schemas.ts delete mode 100644 examples/gustav/src/shared/routing/httpRouter.ts diff --git a/.claude/settings.local.json b/.claude/settings.local.json index 41a685c..65f3579 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -5,7 +5,8 @@ "Bash(deno add:*)", "Bash(deno task:*)", "Bash(deno check:*)", - "Bash(find:*)" + "Bash(find:*)", + "Bash(deno doc:*)" ], "deny": [], "ask": [] diff --git a/deno.lock b/deno.lock index cb87160..14ca611 100644 --- a/deno.lock +++ b/deno.lock @@ -318,6 +318,7 @@ "jsr:@std/dotenv@~0.225.5", "jsr:@std/ulid@1", "jsr:@tajpouria/cors@^1.2.1", + "npm:ajv@^8.17.1", "npm:effect@^3.17.9", "npm:mongodb@^6.12.0", "npm:zod@^3.24.1" diff --git a/examples/gustav/deno.json b/examples/gustav/deno.json index d06c114..2bf7da2 100644 --- a/examples/gustav/deno.json +++ b/examples/gustav/deno.json @@ -30,6 +30,7 @@ "@std/dotenv": "jsr:@std/dotenv@^0.225.5", "@std/ulid": "jsr:@std/ulid@^1.0.0", "@tajpouria/cors": "jsr:@tajpouria/cors@^1.2.1", + "ajv": "npm:ajv@^8.17.1", "mongodb": "npm:mongodb@^6.12.0", "zod": "npm:zod@^3.24.1", "effect": "npm:effect@^3.17.9" diff --git a/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts b/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts index e4c6806..277141c 100644 --- a/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts +++ b/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts @@ -2,7 +2,7 @@ import { Unit } from '../../../../shared/types/unit.ts'; export type Ingredient = { name: string; - quantity: number; + amount: number; unit: Unit; productId?: string; }; diff --git a/examples/gustav/src/shared/routing/commandRouter.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts similarity index 70% rename from examples/gustav/src/shared/routing/commandRouter.ts rename to examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts index 7a563fd..bdc7b10 100644 --- a/examples/gustav/src/shared/routing/commandRouter.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts @@ -1,6 +1,6 @@ import { getLogger, MessageRouter } from '@nimbus/core'; -import { AddRecipeCommandType } from '../../contexts/recipe/core/commands/addRecipe.ts'; -import { addRecipeHandler } from '../../contexts/recipe/infrastructure/router/addRecipe.handler.ts'; +import { AddRecipeCommandType } from '../../core/commands/addRecipe.ts'; +import { addRecipeHandler } from './handler/addRecipe.handler.ts'; export const commandRouter = new MessageRouter('command', { logInput: (input: any) => { @@ -18,5 +18,4 @@ export const commandRouter = new MessageRouter('command', { commandRouter.register( AddRecipeCommandType, addRecipeHandler, - { allowUnsafeInput: true }, ); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts new file mode 100644 index 0000000..062d25a --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts @@ -0,0 +1,17 @@ +import { MessageHandler } from '@nimbus/core'; +import { eventSourcingDBEventStore } from '../../../../../shared/adapters/eventSourcingDBEventStore.ts'; +import { + addRecipe, + AddRecipeCommand, +} from '../../../core/commands/addRecipe.ts'; +import { Recipe } from '../../../core/domain/recipe.ts'; + +export const addRecipeHandler: MessageHandler< + AddRecipeCommand, + Recipe +> = async (command) => { + return addRecipe( + command, + eventSourcingDBEventStore, + ); +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts new file mode 100644 index 0000000..9ec788e --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts @@ -0,0 +1,11 @@ +import { MessageHandler } from '@nimbus/core'; +import { Recipe } from '../../../core/domain/recipe.ts'; +import { getRecipe, GetRecipeQuery } from '../../../core/queries/getRecipe.ts'; +import { recipeMemoryRepository } from '../../repository/recipeMemoryRepository.ts'; + +export const getRecipeHandler: MessageHandler< + GetRecipeQuery, + Recipe +> = async (query) => { + return getRecipe(query, recipeMemoryRepository); +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts new file mode 100644 index 0000000..14a9a27 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts @@ -0,0 +1,14 @@ +import { MessageHandler } from '@nimbus/core'; +import { Recipe } from '../../../core/domain/recipe.ts'; +import { + recipeAdded, + RecipeAddedEvent, +} from '../../../core/events/recipeAdded.ts'; +import { recipeMemoryRepository } from '../../repository/recipeMemoryRepository.ts'; + +export const recipeAddedHandler: MessageHandler< + RecipeAddedEvent, + Recipe +> = async (event) => { + return recipeAdded(event, recipeMemoryRepository); +}; diff --git a/examples/gustav/src/shared/routing/queryRouter.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/queryRouter.ts similarity index 70% rename from examples/gustav/src/shared/routing/queryRouter.ts rename to examples/gustav/src/contexts/recipe/infrastructure/http/queryRouter.ts index 82f26d2..85c6316 100644 --- a/examples/gustav/src/shared/routing/queryRouter.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/queryRouter.ts @@ -1,6 +1,6 @@ import { getLogger, MessageRouter } from '@nimbus/core'; -import { GetRecipeQueryType } from '../../contexts/recipe/core/queries/getRecipe.ts'; -import { getRecipeHandler } from '../../contexts/recipe/infrastructure/router/getRecipe.handler.ts'; +import { GetRecipeQueryType } from '../../core/queries/getRecipe.ts'; +import { getRecipeHandler } from './handler/getRecipe.handler.ts'; export const queryRouter = new MessageRouter('query', { logInput: (input: any) => { @@ -18,5 +18,4 @@ export const queryRouter = new MessageRouter('query', { queryRouter.register( GetRecipeQueryType, getRecipeHandler, - { allowUnsafeInput: true }, ); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts new file mode 100644 index 0000000..0eee69a --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts @@ -0,0 +1,29 @@ +import { NimbusOakRouter } from '@nimbus/oak'; +import { AddRecipeCommandType } from '../../core/commands/addRecipe.ts'; +import { GetRecipeQueryType } from '../../core/queries/getRecipe.ts'; +import { commandRouter } from './commandRouter.ts'; +import { queryRouter } from './queryRouter.ts'; +import { AddRecipeCommandSchemaUrl } from './schemas/addRecipeCommandSchema.ts'; +import { GetRecipeQuerySchemaUrl } from './schemas/getRecipeQuerySchema.ts'; + +export const recipeRouter = new NimbusOakRouter(); + +// POST /recipes - Add a new recipe +recipeRouter.command({ + path: '/', + messageType: AddRecipeCommandType, + router: commandRouter, + dataschema: AddRecipeCommandSchemaUrl, + // extractData defaults to reading JSON body +}); + +// GET /recipes/:slug - Get a recipe by slug +recipeRouter.query({ + path: '/:slug', + messageType: GetRecipeQueryType, + router: queryRouter, + extractData: (ctx) => ({ + slug: ctx.params.slug, + }), + dataschema: GetRecipeQuerySchemaUrl, +}); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts new file mode 100644 index 0000000..d0e9606 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts @@ -0,0 +1,66 @@ +import { commandSchema } from '@nimbus/core'; +import type { SchemaObject } from 'ajv'; + +export const AddRecipeCommandSchemaUrl = + 'https://api.gustav.app/schemas/commands/add-recipe/v1' as const; + +/** + * JSON Schema for AddRecipeCommand + * + * This schema validates the data payload of the add-recipe command. + * It will be registered with the validator and referenced via dataschema URL. + */ +export const addRecipeCommandSchema: SchemaObject = { + ...commandSchema, + $id: AddRecipeCommandSchemaUrl, + properties: { + ...commandSchema.properties, + data: { + type: 'object', + required: ['slug', 'name', 'ingredients', 'instructions', 'tags'], + properties: { + slug: { + type: 'string', + pattern: '^[a-z0-9-]+$', + minLength: 1, + maxLength: 100, + }, + name: { + type: 'string', + minLength: 1, + maxLength: 200, + }, + ingredients: { + type: 'array', + items: { + type: 'object', + required: ['name', 'amount', 'unit'], + properties: { + name: { type: 'string', minLength: 1 }, + amount: { type: 'number', minimum: 1 }, + unit: { type: 'string', minLength: 1 }, + productId: { type: 'string', minLength: 1 }, + }, + }, + }, + instructions: { + type: 'array', + items: { + type: 'string', + minLength: 1, + }, + }, + tags: { + type: 'array', + items: { + type: 'string', + minLength: 1, + }, + }, + }, + }, + dataschema: { + const: AddRecipeCommandSchemaUrl, + }, + }, +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts new file mode 100644 index 0000000..c0f8a2c --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts @@ -0,0 +1,34 @@ +import { querySchema } from '@nimbus/core'; +import type { SchemaObject } from 'ajv'; + +export const GetRecipeQuerySchemaUrl = + 'https://api.gustav.app/schemas/queries/get-recipe/v1' as const; + +/** + * JSON Schema for GetRecipeQuery + * + * This schema validates the data payload of the get-recipe query. + * It will be registered with the validator and referenced via dataschema URL. + */ +export const getRecipeQuerySchema: SchemaObject = { + ...querySchema, + $id: GetRecipeQuerySchemaUrl, + properties: { + ...querySchema.properties, + data: { + type: 'object', + required: ['slug'], + properties: { + slug: { + type: 'string', + pattern: '^[a-z0-9-]+$', + minLength: 1, + maxLength: 100, + }, + }, + }, + dataschema: { + const: GetRecipeQuerySchemaUrl, + }, + }, +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/router/addRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/router/addRecipe.handler.ts deleted file mode 100644 index 27d0783..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/router/addRecipe.handler.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { MessageHandler } from '@nimbus/core'; -import { eventSourcingDBEventStore } from '../../../../shared/adapters/eventSourcingDBEventStore.ts'; -import { addRecipe, AddRecipeCommand } from '../../core/commands/addRecipe.ts'; -import { Recipe } from '../../core/domain/recipe.ts'; - -export const addRecipeHandler: MessageHandler< - AddRecipeCommand, - Recipe -> = async (command) => { - return await addRecipe( - command, - eventSourcingDBEventStore, - ); -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/router/getRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/router/getRecipe.handler.ts deleted file mode 100644 index 8a62d01..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/router/getRecipe.handler.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { MessageHandler } from '@nimbus/core'; -import { Recipe } from '../../core/domain/recipe.ts'; -import { getRecipe, GetRecipeQuery } from '../../core/queries/getRecipe.ts'; -import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; - -export const getRecipeHandler: MessageHandler< - GetRecipeQuery, - Recipe -> = async (query) => { - return await getRecipe(query, recipeMemoryRepository); -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/router/recipeAdded.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/router/recipeAdded.handler.ts deleted file mode 100644 index 1c322be..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/router/recipeAdded.handler.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { RouteHandler } from '@nimbus/core'; -import { Recipe } from '../../core/domain/recipe.ts'; -import { - recipeAdded, - RecipeAddedEvent, -} from '../../core/events/recipeAdded.ts'; -import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; - -export const recipeAddedHandler: RouteHandler< - RecipeAddedEvent, - Recipe -> = async (query) => { - const recipe = await recipeAdded(query, recipeMemoryRepository); - - return { - statusCode: 200, - data: recipe, - }; -}; diff --git a/examples/gustav/src/main.ts b/examples/gustav/src/main.ts index 8a272af..111a6c6 100644 --- a/examples/gustav/src/main.ts +++ b/examples/gustav/src/main.ts @@ -9,8 +9,9 @@ import '@std/dotenv/load'; import { oakCors } from '@tajpouria/cors'; import process from 'node:process'; import { exampleAuthMiddleware } from './contexts/iam/infrastructure/http/auth.middleware.ts'; +import { router } from './shared/http/router.ts'; +import { registerSchemas } from './shared/http/schemas.ts'; import { initMongoConnectionManager } from './shared/mongodb.ts'; -import { router } from './shared/routing/httpRouter.ts'; // // Setup logging with basic options provided by Nimbus @@ -25,6 +26,9 @@ setupLogger({ useConsoleColors: process.env.LOG_FORMAT === 'pretty', }); +// Register JSON schemas for validation +registerSchemas(); + // Initialize MongoDB Manager initMongoConnectionManager(); diff --git a/examples/gustav/src/shared/http/router.ts b/examples/gustav/src/shared/http/router.ts new file mode 100644 index 0000000..9a9a065 --- /dev/null +++ b/examples/gustav/src/shared/http/router.ts @@ -0,0 +1,45 @@ +import { getLogger } from '@nimbus/core'; +import { NimbusOakRouter } from '@nimbus/oak'; +import { recipeRouter } from '../../contexts/recipe/infrastructure/http/router.ts'; +import { mongoManager } from '../mongodb.ts'; + +export const router = new NimbusOakRouter(); + +router.get('/health', async (ctx) => { + const logger = getLogger(); + const now = new Date().toISOString(); + + const mongoHealth = await mongoManager.healthCheck(); + + logger.info({ + message: 'Health check', + data: { + time: now, + database: { ...mongoHealth }, + ...(ctx.state.correlationId + ? { correlationId: ctx.state.correlationId } + : {}), + ...(ctx.state.authContext + ? { authContext: ctx.state.authContext } + : {}), + }, + }); + + ctx.response.body = { + status: mongoHealth.status === 'healthy' ? 'OK' : 'ERROR', + http: { + status: 'healthy', + }, + database: { ...mongoHealth }, + ...(ctx.state.correlationId + ? { correlationId: ctx.state.correlationId } + : {}), + time: now, + }; +}); + +router.use( + '/recipes', + recipeRouter.routes(), + recipeRouter.allowedMethods(), +); diff --git a/examples/gustav/src/shared/http/schemas.ts b/examples/gustav/src/shared/http/schemas.ts new file mode 100644 index 0000000..f5255fe --- /dev/null +++ b/examples/gustav/src/shared/http/schemas.ts @@ -0,0 +1,18 @@ +import { getValidator } from '@nimbus/core'; +import { addRecipeCommandSchema } from '../../contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts'; +import { getRecipeQuerySchema } from '../../contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts'; + +/** + * Register all JSON schemas with the Nimbus validator. + * + * This should be called during application startup, before any routes are accessed. + * The schemas are used to validate message payloads when dataschema is present. + */ +export function registerSchemas(): void { + const validator = getValidator(); + + validator.addSchema(addRecipeCommandSchema); + validator.addSchema(getRecipeQuerySchema); + + // TODO: Add more schemas as you develop more commands/queries +} diff --git a/examples/gustav/src/shared/routing/httpRouter.ts b/examples/gustav/src/shared/routing/httpRouter.ts deleted file mode 100644 index 9f59536..0000000 --- a/examples/gustav/src/shared/routing/httpRouter.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { getLogger, Query } from '@nimbus/core'; -import { handleOakError, NimbusOakRouter } from '@nimbus/oak'; -import { mongoManager } from '../mongodb.ts'; -import { commandRouter } from './commandRouter.ts'; -import { queryRouter } from './queryRouter.ts'; - -export const router = new NimbusOakRouter(); - -router.get('/health', async (ctx) => { - const logger = getLogger(); - const now = new Date().toISOString(); - - const mongoHealth = await mongoManager.healthCheck(); - - logger.info({ - message: 'Health check', - data: { - time: now, - database: { ...mongoHealth }, - ...(ctx.state.correlationId - ? { correlationId: ctx.state.correlationId } - : {}), - ...(ctx.state.authContext - ? { authContext: ctx.state.authContext } - : {}), - }, - }); - - ctx.response.body = { - status: mongoHealth.status === 'healthy' ? 'OK' : 'ERROR', - http: { - status: 'healthy', - }, - database: { ...mongoHealth }, - ...(ctx.state.correlationId - ? { correlationId: ctx.state.correlationId } - : {}), - time: now, - }; -}); - -// Command endpoint - uses the Oak adapter to bridge MessageRouter to HTTP -router.command({ - path: '/command', - router: commandRouter, -}); - -// Query endpoint - uses the Oak adapter to bridge MessageRouter to HTTP -router.get('/query', async (ctx) => { - try { - const queryParams: Record = {}; - for ( - const [key, value] of ctx.request.url.searchParams.entries() - ) { - queryParams[key] = value; - } - - const queryObject: Query = { - specversion: '1.0', - id: queryParams.id, - correlationid: queryParams.correlationid, - time: queryParams.time, - source: queryParams.source, - type: queryParams.type, - ...(queryParams.datacontenttype && - { datacontenttype: queryParams.datacontenttype }), - ...(queryParams.dataschema && - { dataschema: queryParams.dataschema }), - data: {}, - }; - - if (queryObject.datacontenttype === 'application/json') { - queryObject.data = JSON.parse(queryParams.data); - } - - // TODO: How do we implement the authentication context? - // data: { - // ...(ctx.state.authContext && { - // authContext: ctx.state.authContext, - // }), - // }, - - const result = await queryRouter.route(queryObject); - - ctx.response.status = 200; - ctx.response.body = result as any; - } catch (error: any) { - handleOakError(error, ctx); - } -}); diff --git a/packages/oak/src/lib/router.ts b/packages/oak/src/lib/router.ts index ba73931..3583014 100644 --- a/packages/oak/src/lib/router.ts +++ b/packages/oak/src/lib/router.ts @@ -1,6 +1,14 @@ -import { getLogger, type MessageRouter } from '@nimbus/core'; +import { + getLogger, + type MessageRouter, +} from '@nimbus/core'; import type { Context } from '@oak/oak/context'; -import { Router as OakRouter, type RouterOptions } from '@oak/oak/router'; +import { + Router as OakRouter, + type RouterContext, + type RouterOptions, +} from '@oak/oak/router'; +import { ulid } from '@std/ulid'; /** * Options for handling errors in Oak routes. @@ -10,40 +18,36 @@ export type OakErrorHandlerOptions = { }; /** - * Helper to create an Oak middleware that handles a Nimbus MessageRouter. - * This bridges the MessageRouter (transport-agnostic) to Oak's HTTP context. - * - * @param {MessageRouter} router - The Nimbus MessageRouter instance - * @param {OakErrorHandlerOptions} options - Optional options for error handling - * - * @returns {Function} The Oak middleware function + * Function to extract data from Oak RouterContext for a message. */ -export function createOakMessageHandler( - router: MessageRouter, - options?: OakErrorHandlerOptions, -): (ctx: Context) => Promise { - return async (ctx: Context) => { - try { - const requestBody = await ctx.request.body.json(); - const result = await router.route(requestBody); - - // Default HTTP response - ctx.response.status = 200; - ctx.response.body = result as any; - } catch (error: any) { - handleOakError(error, ctx, options?.onError); - } - }; -} +export type DataExtractor = (ctx: RouterContext) => TData | Promise; + +/** + * Options for registering a command route. + */ +export type CommandRouteOptions = { + path: string; + messageType: string; + router: MessageRouter; + extractData?: DataExtractor; + dataschema?: string; + onError?: (error: any, ctx: Context) => void; +}; + +/** + * Options for registering a query route. + */ +export type QueryRouteOptions = { + path: string; + messageType: string; + router: MessageRouter; + extractData: DataExtractor; + dataschema?: string; + onError?: (error: any, ctx: Context) => void; +}; /** * Default error handler for Oak routes that maps Nimbus exceptions to HTTP responses. - * - * @param {any} error - The error to handle. - * @param {Context} ctx - The Oak context. - * @param {Function} customHandler - Optional custom error handler. - * - * @returns {void} */ export function handleOakError( error: any, @@ -79,21 +83,38 @@ export function handleOakError( } /** - * The NimbusOakRouter extends the Oak Router to provide - * convenient methods for registering Nimbus message handlers. + * The NimbusOakRouter extends Oak Router to provide convenient methods + * for routing HTTP requests to Nimbus MessageRouter handlers. + * + * It automatically constructs CloudEvents message envelopes from HTTP requests + * and handles response mapping. * * @example * ```ts * import { NimbusOakRouter } from "@nimbus/oak"; + * import { MessageRouter } from "@nimbus/core"; * - * const router = new NimbusOakRouter(); - * const commandRouter = new MessageRouter('command'); + * const queryRouter = new MessageRouter('query'); + * queryRouter.register('at.overlap.nimbus.get-recipe', getRecipeHandler); * + * const commandRouter = new MessageRouter('command'); * commandRouter.register('at.overlap.nimbus.add-recipe', addRecipeHandler); * - * router.command({ - * path: '/command', - * router: commandRouter, + * const httpRouter = new NimbusOakRouter(); + * + * // Query route (GET) + * httpRouter.query({ + * path: '/recipes/:slug', + * messageType: 'at.overlap.nimbus.get-recipe', + * router: queryRouter, + * extractData: (ctx) => ({ slug: ctx.params.slug }), + * }); + * + * // Command route (POST) + * httpRouter.command({ + * path: '/recipes', + * messageType: 'at.overlap.nimbus.add-recipe', + * router: commandRouter, * }); * ``` */ @@ -103,62 +124,203 @@ export class NimbusOakRouter extends OakRouter { } /** - * Routes POST requests to a Nimbus command router. + * Register a GET route that maps to a query message. + * + * Automatically constructs a Query CloudEvents message from the HTTP request + * and routes it through the provided MessageRouter. + * + * @param options - Route configuration + * + * @example + * ```ts + * router.query({ + * path: '/recipes/:slug', + * messageType: 'at.overlap.nimbus.get-recipe', + * router: queryRouter, + * extractData: (ctx) => ({ slug: ctx.params.slug }), + * }); + * ``` + */ + query(options: QueryRouteOptions): void { + super.get(options.path, async (ctx: RouterContext) => { + try { + const data = await options.extractData(ctx); + + const query = { + specversion: '1.0' as const, + id: ulid(), + correlationid: ctx.state.correlationId ?? ulid(), + time: new Date().toISOString(), + source: ctx.request.url.origin, + type: options.messageType, + data, + datacontenttype: 'application/json' as const, + ...(options.dataschema && { dataschema: options.dataschema }), + }; + + const result = await options.router.route(query); + + ctx.response.status = 200; + ctx.response.body = result as any; + } catch (error: any) { + handleOakError(error, ctx, options.onError); + } + }); + } + + /** + * Register a POST route that maps to a command message. + * + * Automatically constructs a Command CloudEvents message from the HTTP request + * and routes it through the provided MessageRouter. * - * @param {string} path - Oak request path - * @param {MessageRouter} router - The Nimbus MessageRouter instance - * @param {Function} onError - Optional function to customize error handling + * By default, extracts data from the request body as JSON. + * + * @param options - Route configuration * * @example * ```ts - * const commandRouter = new MessageRouter('command'); - * commandRouter.register('at.overlap.nimbus.add-recipe', addRecipeHandler); + * // Using default body extraction + * router.command({ + * path: '/recipes', + * messageType: 'at.overlap.nimbus.add-recipe', + * router: commandRouter, + * }); * - * oakRouter.command({ - * path: '/command', - * router: commandRouter, + * // Custom data extraction + * router.command({ + * path: '/recipes/:slug', + * messageType: 'at.overlap.nimbus.update-recipe', + * router: commandRouter, + * extractData: async (ctx) => ({ + * slug: ctx.params.slug, + * ...await ctx.request.body.json() + * }), * }); * ``` */ - command({ - path, - router, - onError, - }: { - path: string; - router: MessageRouter; - onError?: (error: any, ctx: Context) => void; - }) { - super.post(path, createOakMessageHandler(router, { onError })); + command(options: CommandRouteOptions): void { + const extractData = options.extractData ?? + (async (ctx: RouterContext) => await ctx.request.body.json()); + + super.post(options.path, async (ctx: RouterContext) => { + try { + const data = await extractData(ctx); + + const command = { + specversion: '1.0' as const, + id: ulid(), + correlationid: ctx.state.correlationId ?? ulid(), + time: new Date().toISOString(), + source: ctx.request.url.origin, + type: options.messageType, + data, + datacontenttype: 'application/json' as const, + ...(options.dataschema && { dataschema: options.dataschema }), + }; + + const result = await options.router.route(command); + + ctx.response.status = 201; + ctx.response.body = result as any; + } catch (error: any) { + handleOakError(error, ctx, options.onError); + } + }); } /** - * Routes POST requests to a Nimbus query router. + * Register a PUT route that maps to a command message. * - * @param {string} path - Oak request path - * @param {MessageRouter} router - The Nimbus MessageRouter instance - * @param {Function} onError - Optional function to customize error handling + * Similar to command() but uses PUT method (for updates/replacements). + * + * @param options - Route configuration * * @example * ```ts - * const queryRouter = new MessageRouter('query'); - * queryRouter.register('at.overlap.nimbus.get-recipe', getRecipeHandler); + * router.commandPut({ + * path: '/recipes/:slug', + * messageType: 'at.overlap.nimbus.update-recipe', + * router: commandRouter, + * extractData: async (ctx) => ({ + * slug: ctx.params.slug, + * ...await ctx.request.body.json() + * }), + * }); + * ``` + */ + commandPut(options: CommandRouteOptions): void { + const extractData = options.extractData ?? + (async (ctx: RouterContext) => await ctx.request.body.json()); + + super.put(options.path, async (ctx: RouterContext) => { + try { + const data = await extractData(ctx); + + const command = { + specversion: '1.0' as const, + id: ulid(), + correlationid: ctx.state.correlationId ?? ulid(), + time: new Date().toISOString(), + source: ctx.request.url.origin, + type: options.messageType, + data, + datacontenttype: 'application/json' as const, + ...(options.dataschema && { dataschema: options.dataschema }), + }; + + const result = await options.router.route(command); + + ctx.response.status = 200; + ctx.response.body = result as any; + } catch (error: any) { + handleOakError(error, ctx, options.onError); + } + }); + } + + /** + * Register a DELETE route that maps to a command message. * - * oakRouter.query({ - * path: '/query', - * router: queryRouter, + * @param options - Route configuration + * + * @example + * ```ts + * router.commandDelete({ + * path: '/recipes/:slug', + * messageType: 'at.overlap.nimbus.delete-recipe', + * router: commandRouter, + * extractData: (ctx) => ({ slug: ctx.params.slug }), * }); * ``` */ - query({ - path, - router, - onError, - }: { - path: string; - router: MessageRouter; - onError?: (error: any, ctx: Context) => void; - }) { - super.post(path, createOakMessageHandler(router, { onError })); + commandDelete(options: CommandRouteOptions): void { + const extractData = options.extractData ?? + ((ctx: RouterContext) => ({ id: ctx.params.id })); + + super.delete(options.path, async (ctx: RouterContext) => { + try { + const data = await extractData(ctx); + + const command = { + specversion: '1.0' as const, + id: ulid(), + correlationid: ctx.state.correlationId ?? ulid(), + time: new Date().toISOString(), + source: ctx.request.url.origin, + type: options.messageType, + data, + datacontenttype: 'application/json' as const, + ...(options.dataschema && { dataschema: options.dataschema }), + }; + + const result = await options.router.route(command); + + ctx.response.status = 204; + ctx.response.body = result as any; + } catch (error: any) { + handleOakError(error, ctx, options.onError); + } + }); } } From 617b27f425b0ffab154016801a631a22d6e9aa7f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Sat, 4 Oct 2025 21:27:37 +0200 Subject: [PATCH 09/37] implements more if the eventsourcing parts --- .claude/settings.local.json | 4 +- deno.json | 4 +- .../recipe/core/commands/addRecipe.ts | 48 ++--- .../recipe/core/commands/deleteRecipe.ts | 66 +++++++ .../recipe/core/commands/updateRecipe.ts | 77 ++++++++ .../contexts/recipe/core/domain/ingredient.ts | 2 +- .../recipe/core/domain/recipeAggregate.ts | 85 ++++++++ .../src/contexts/recipe/core/errors/error.md | 3 - .../recipe/core/events/recipeDeleted.ts | 27 +++ .../recipe/core/events/recipeUpdated.ts | 29 +++ .../recipe/core/ports/recipeRepository.ts | 2 +- .../recipe/core/queries/listRecipes.ts | 58 ++++++ .../http/handler/addRecipe.handler.ts | 4 +- .../repository/recipeMemoryRepository.ts | 9 +- examples/gustav/src/main.ts | 10 +- .../adapters/eventSourcingDBEventStore.ts | 133 ------------- .../src/shared/{types => core/domain}/unit.ts | 0 .../src/shared/infrastructure/eventStore.ts | 15 ++ .../{ => infrastructure}/http/router.ts | 2 +- .../{ => infrastructure}/http/schemas.ts | 4 +- .../shared/{ => infrastructure}/mongodb.ts | 0 packages/eventsourcing/README.md | 10 + packages/eventsourcing/deno.json | 43 ++++ packages/eventsourcing/src/index.ts | 2 + packages/eventsourcing/src/lib/aggregate.ts | 162 +++++++++++++++ .../eventsourcing/src/lib}/eventStore.ts | 7 +- packages/eventsourcingdb/README.md | 10 + packages/eventsourcingdb/deno.json | 43 ++++ packages/eventsourcingdb/src/index.ts | 1 + .../src/lib/eventSourcingDb.ts | 184 ++++++++++++++++++ 30 files changed, 867 insertions(+), 177 deletions(-) create mode 100644 examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts create mode 100644 examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts create mode 100644 examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/errors/error.md create mode 100644 examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts create mode 100644 examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts create mode 100644 examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts delete mode 100644 examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts rename examples/gustav/src/shared/{types => core/domain}/unit.ts (100%) create mode 100644 examples/gustav/src/shared/infrastructure/eventStore.ts rename examples/gustav/src/shared/{ => infrastructure}/http/router.ts (93%) rename examples/gustav/src/shared/{ => infrastructure}/http/schemas.ts (68%) rename examples/gustav/src/shared/{ => infrastructure}/mongodb.ts (100%) create mode 100644 packages/eventsourcing/README.md create mode 100644 packages/eventsourcing/deno.json create mode 100644 packages/eventsourcing/src/index.ts create mode 100644 packages/eventsourcing/src/lib/aggregate.ts rename {examples/gustav/src/shared/ports => packages/eventsourcing/src/lib}/eventStore.ts (89%) create mode 100644 packages/eventsourcingdb/README.md create mode 100644 packages/eventsourcingdb/deno.json create mode 100644 packages/eventsourcingdb/src/index.ts create mode 100644 packages/eventsourcingdb/src/lib/eventSourcingDb.ts diff --git a/.claude/settings.local.json b/.claude/settings.local.json index 65f3579..3e1cf0c 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -6,7 +6,9 @@ "Bash(deno task:*)", "Bash(deno check:*)", "Bash(find:*)", - "Bash(deno doc:*)" + "Bash(deno doc:*)", + "WebFetch(domain:docs.eventsourcingdb.io)", + "Bash(cat:*)" ], "deny": [], "ask": [] diff --git a/deno.json b/deno.json index a886b64..34258ae 100644 --- a/deno.json +++ b/deno.json @@ -5,6 +5,8 @@ "workspace": [ "./packages/core", "./packages/client", + "./packages/eventsourcing", + "./packages/eventsourcingdb", "./packages/mongodb", "./packages/oak", "./packages/utils", @@ -12,4 +14,4 @@ "./examples/the-expense" ], "nodeModulesDir": "none" -} +} \ No newline at end of file diff --git a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts index e1aaabb..649e951 100644 --- a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts @@ -1,8 +1,9 @@ import { Command, InvalidInputException } from '@nimbus/core'; +import { type EventStore, loadAggregate } from '@nimbus/eventsourcing'; import { getEnv } from '@nimbus/utils'; import { ulid } from '@std/ulid'; -import { EventStore } from '../../../../shared/ports/eventStore.ts'; import { Recipe } from '../domain/recipe.ts'; +import { recipeReducer, recipeSubject } from '../domain/recipeAggregate.ts'; import { RecipeAddedCommandType, RecipeAddedEvent, @@ -19,9 +20,29 @@ export const addRecipe = async ( eventStore: EventStore, ): Promise => { const { EVENT_SOURCE } = getEnv({ - variables: ['EVENT_SOURCE', 'EVENT_TYPE_PREFIX'], + variables: ['EVENT_SOURCE'], }); + const subject = recipeSubject(command.data.slug); + + // Load current aggregate state by replaying events + const snapshot = await loadAggregate( + eventStore, + subject, + null, + recipeReducer, + ); + + // Validate recipe doesn't already exist + if (snapshot.state !== null) { + throw new InvalidInputException('Recipe already exists', { + errorCode: 'DUPLICATE_RECIPE', + reason: + 'A recipe with this slug already exists. The slug for each recipe must be unique, please choose a different slug.', + }); + } + + // Create event const recipeAddedEvent: RecipeAddedEvent = { specversion: '1.0', id: ulid(), @@ -29,25 +50,13 @@ export const addRecipe = async ( time: new Date().toISOString(), source: EVENT_SOURCE, type: RecipeAddedCommandType, - subject: `/recipes/${command.data.slug}`, + subject, data: command.data, datacontenttype: 'application/json', - // TODO: add dataschema }; - const replayedEvents = await eventStore.readEvents( - recipeAddedEvent.subject, - ); - - if (replayedEvents.length > 0) { - throw new InvalidInputException('Recipe already exists', { - errorCode: 'DUPLICATE_RECIPE', - reason: - 'A recipe with this slug already exists. The slug for each recipe must be unique, please choose a different slug.', - }); - } - - const writtenEvents = await eventStore.writeEvents([ + // Write event + await eventStore.writeEvents([ { source: recipeAddedEvent.source, subject: recipeAddedEvent.subject, @@ -56,10 +65,5 @@ export const addRecipe = async ( }, ]); - console.log('writtenEvents', writtenEvents); - - // TODO: Next work on the readModels and the projectors which update the readModels based on the events. - // On application startup we need to replay all events to rebuild the readModels. - return command.data; }; diff --git a/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts new file mode 100644 index 0000000..047aa88 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts @@ -0,0 +1,66 @@ +import { Command } from '@nimbus/core'; +import { type EventStore, loadAggregate } from '@nimbus/eventsourcing'; +import { getEnv } from '@nimbus/utils'; +import { ulid } from '@std/ulid'; +import { + recipeReducer, + recipeSubject, + requireRecipe, +} from '../domain/recipeAggregate.ts'; +import { + RecipeDeletedEvent, + RecipeDeletedEventType, +} from '../events/recipeDeleted.ts'; + +export const DeleteRecipeCommandType = 'at.overlap.nimbus.delete-recipe' as const; + +export type DeleteRecipeCommand = Command<{ + slug: string; +}> & { + type: typeof DeleteRecipeCommandType; +}; + +export const deleteRecipe = async ( + command: DeleteRecipeCommand, + eventStore: EventStore, +): Promise => { + const { EVENT_SOURCE } = getEnv({ + variables: ['EVENT_SOURCE'], + }); + + const subject = recipeSubject(command.data.slug); + + // Load current aggregate state by replaying events + const snapshot = await loadAggregate( + eventStore, + subject, + null, + recipeReducer, + ); + + // Validate recipe exists + requireRecipe(snapshot.state); + + // Create event + const recipeDeletedEvent: RecipeDeletedEvent = { + specversion: '1.0', + id: ulid(), + correlationid: command.correlationid, + time: new Date().toISOString(), + source: EVENT_SOURCE, + type: RecipeDeletedEventType, + subject, + data: { slug: command.data.slug }, + datacontenttype: 'application/json', + }; + + // Write event + await eventStore.writeEvents([ + { + source: recipeDeletedEvent.source, + subject: recipeDeletedEvent.subject, + type: recipeDeletedEvent.type, + data: recipeDeletedEvent.data, + }, + ]); +}; diff --git a/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts new file mode 100644 index 0000000..5022416 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts @@ -0,0 +1,77 @@ +import { Command } from '@nimbus/core'; +import { type EventStore, loadAggregate } from '@nimbus/eventsourcing'; +import { getEnv } from '@nimbus/utils'; +import { ulid } from '@std/ulid'; +import { Recipe } from '../domain/recipe.ts'; +import { + recipeReducer, + recipeSubject, + requireRecipe, +} from '../domain/recipeAggregate.ts'; +import { + RecipeUpdatedEvent, + RecipeUpdatedEventType, +} from '../events/recipeUpdated.ts'; + +export const UpdateRecipeCommandType = 'at.overlap.nimbus.update-recipe' as const; + +export type UpdateRecipeCommand = Command<{ + slug: string; + updates: Partial; +}> & { + type: typeof UpdateRecipeCommandType; +}; + +export const updateRecipe = async ( + command: UpdateRecipeCommand, + eventStore: EventStore, +): Promise => { + const { EVENT_SOURCE } = getEnv({ + variables: ['EVENT_SOURCE'], + }); + + const subject = recipeSubject(command.data.slug); + + // Load current aggregate state by replaying events + const snapshot = await loadAggregate( + eventStore, + subject, + null, + recipeReducer, + ); + + // Validate recipe exists + const currentRecipe = requireRecipe(snapshot.state); + + // Apply updates to get new state + const updatedRecipe = { + ...currentRecipe, + ...command.data.updates, + slug: currentRecipe.slug, // Prevent slug changes + }; + + // Create event + const recipeUpdatedEvent: RecipeUpdatedEvent = { + specversion: '1.0', + id: ulid(), + correlationid: command.correlationid, + time: new Date().toISOString(), + source: EVENT_SOURCE, + type: RecipeUpdatedEventType, + subject, + data: command.data.updates, + datacontenttype: 'application/json', + }; + + // Write event + await eventStore.writeEvents([ + { + source: recipeUpdatedEvent.source, + subject: recipeUpdatedEvent.subject, + type: recipeUpdatedEvent.type, + data: recipeUpdatedEvent.data, + }, + ]); + + return updatedRecipe; +}; diff --git a/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts b/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts index 277141c..e1d258f 100644 --- a/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts +++ b/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts @@ -1,4 +1,4 @@ -import { Unit } from '../../../../shared/types/unit.ts'; +import { Unit } from '../../../../shared/core/domain/unit.ts'; export type Ingredient = { name: string; diff --git a/examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts b/examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts new file mode 100644 index 0000000..2148a2c --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts @@ -0,0 +1,85 @@ +import { Event } from '@nimbus/core'; +import { type EventReducer } from '@nimbus/eventsourcing'; +import { Recipe } from './recipe.ts'; + +/** + * Recipe aggregate state. + * + * Null represents a recipe that doesn't exist or has been deleted. + */ +export type RecipeState = Recipe | null; + +/** + * Recipe event reducer. + * + * Applies events to recipe state to reconstruct the aggregate. + * This is the core of event sourcing - replaying events to rebuild state. + * + * @param state - Current recipe state + * @param event - Event to apply + * @returns New recipe state after applying the event + * + * @example + * ```ts + * const events = await eventStore.readEvents('/recipes/carbonara'); + * const currentState = events.reduce(recipeReducer, null); + * ``` + */ +export const recipeReducer: EventReducer = ( + state: RecipeState, + event: Event, +): RecipeState => { + switch (event.type) { + case 'at.overlap.nimbus.recipe-added': { + // Create new recipe from event data + return event.data as Recipe; + } + + case 'at.overlap.nimbus.recipe-updated': { + if (!state) { + // Cannot update a recipe that doesn't exist + // In a real system, you might want to log this as a warning + return state; + } + + // Merge update data into existing recipe + return { + ...state, + ...(event.data as Partial), + }; + } + + case 'at.overlap.nimbus.recipe-deleted': { + // Mark recipe as deleted + return null; + } + + default: + // Unknown event type - return state unchanged + return state; + } +}; + +/** + * Helper to get subject for a recipe. + * + * @param slug - Recipe slug + * @returns Subject path for the recipe + */ +export function recipeSubject(slug: string): string { + return `/recipes/${slug}`; +} + +/** + * Helper to validate recipe state. + * + * @param state - Recipe state to validate + * @throws Error if recipe doesn't exist + * @returns The recipe (for chaining) + */ +export function requireRecipe(state: RecipeState): Recipe { + if (!state) { + throw new Error('Recipe does not exist'); + } + return state; +} diff --git a/examples/gustav/src/contexts/recipe/core/errors/error.md b/examples/gustav/src/contexts/recipe/core/errors/error.md deleted file mode 100644 index e43d0d3..0000000 --- a/examples/gustav/src/contexts/recipe/core/errors/error.md +++ /dev/null @@ -1,3 +0,0 @@ -./src/contexts//core/errors/.ts - -We define domain specific errors that can occur during runtime here. diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts b/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts new file mode 100644 index 0000000..8fc0c54 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts @@ -0,0 +1,27 @@ +import { type Event, getLogger } from '@nimbus/core'; +import { RecipeRepository } from '../ports/recipeRepository.ts'; + +export const RecipeDeletedEventType = 'at.overlap.nimbus.recipe-deleted' as const; + +export type RecipeDeletedEvent = Event<{ slug: string }> & { + type: typeof RecipeDeletedEventType; +}; + +export const recipeDeleted = async ( + event: RecipeDeletedEvent, + repository: RecipeRepository, +) => { + getLogger().info({ + message: 'recipeDeleted Handler', + data: event.data, + }); + + // In event sourcing, this would update the read model + // The event itself is already stored in the event store + const slug = event.subject?.split('/').pop(); + if (!slug) { + throw new Error('Recipe slug not found in event subject'); + } + + await repository.delete(slug); +}; diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts b/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts new file mode 100644 index 0000000..cdc0f98 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts @@ -0,0 +1,29 @@ +import { type Event, getLogger } from '@nimbus/core'; +import { Recipe } from '../domain/recipe.ts'; +import { RecipeRepository } from '../ports/recipeRepository.ts'; + +export const RecipeUpdatedEventType = 'at.overlap.nimbus.recipe-updated' as const; + +export type RecipeUpdatedEvent = Event> & { + type: typeof RecipeUpdatedEventType; +}; + +export const recipeUpdated = async ( + event: RecipeUpdatedEvent, + repository: RecipeRepository, +) => { + getLogger().info({ + message: 'recipeUpdated Handler', + data: event.data, + }); + + // In event sourcing, this would update the read model + // The event itself is already stored in the event store + const slug = event.subject?.split('/').pop(); + if (!slug) { + throw new Error('Recipe slug not found in event subject'); + } + + const recipe = await repository.update(slug, event.data); + return recipe; +}; diff --git a/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts b/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts index 85f85de..f34bb67 100644 --- a/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts +++ b/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts @@ -5,7 +5,7 @@ export interface RecipeRepository { insert: (recipe: Recipe) => Promise; - update: (recipe: Recipe) => Promise; + update: (slug: string, updates: Partial) => Promise; delete: (id: string) => Promise; diff --git a/examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts b/examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts new file mode 100644 index 0000000..bcb0255 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts @@ -0,0 +1,58 @@ +import { Query } from '@nimbus/core'; +import { type EventStore, loadAggregates } from '@nimbus/eventsourcing'; +import { Recipe } from '../domain/recipe.ts'; +import { recipeReducer } from '../domain/recipeAggregate.ts'; + +export const ListRecipesQueryType = 'at.overlap.nimbus.list-recipes' as const; + +export type ListRecipesQuery = Query<{ + limit?: number; + offset?: number; +}> & { + type: typeof ListRecipesQueryType; +}; + +/** + * List all recipes using dynamic aggregate boundaries. + * + * This demonstrates reading events recursively across multiple aggregates + * and reconstructing each one's state independently. + * + * Instead of querying a read model, we rebuild state from events in real-time. + * This is powerful for: + * - Avoiding eventual consistency issues (always current) + * - Querying across aggregate boundaries + * - Temporal queries (add time bounds to see state at any point) + * + * @param query - List query with pagination + * @param eventStore - Event store to read from + * @returns Array of reconstructed recipes + */ +export const listRecipes = async ( + query: ListRecipesQuery, + eventStore: EventStore, +): Promise => { + // Load all recipe aggregates by reading events recursively + // from the parent subject '/recipes' + const aggregates = await loadAggregates( + eventStore, + '/recipes', + null, + recipeReducer, + (event) => event.subject, // Group events by their subject + ); + + // Extract non-null recipe states (filter out deleted recipes) + const recipes: Recipe[] = []; + for (const [_subject, snapshot] of aggregates) { + if (snapshot.state !== null) { + recipes.push(snapshot.state); + } + } + + // Apply pagination (in a real app, you might do this at the event store level) + const offset = query.data.offset ?? 0; + const limit = query.data.limit ?? 10; + + return recipes.slice(offset, offset + limit); +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts index 062d25a..4d9a68e 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts @@ -1,5 +1,5 @@ import { MessageHandler } from '@nimbus/core'; -import { eventSourcingDBEventStore } from '../../../../../shared/adapters/eventSourcingDBEventStore.ts'; +import { eventStore } from '../../../../../shared/infrastructure/eventStore.ts'; import { addRecipe, AddRecipeCommand, @@ -12,6 +12,6 @@ export const addRecipeHandler: MessageHandler< > = async (command) => { return addRecipe( command, - eventSourcingDBEventStore, + eventStore, ); }; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts b/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts index c16d789..aae62c9 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts @@ -18,8 +18,8 @@ const makeMemoryRepository = (): RecipeRepository => { }, // deno-lint-ignore require-await - update: async (recipe) => { - const existingRecipe = store.get(recipe.slug); + update: async (slug, updates) => { + const existingRecipe = store.get(slug); if (!existingRecipe) { throw new NotFoundException('Recipe not found', { @@ -27,8 +27,9 @@ const makeMemoryRepository = (): RecipeRepository => { }); } - store.set(recipe.slug, recipe); - return recipe; + const updatedRecipe = { ...existingRecipe, ...updates }; + store.set(slug, updatedRecipe); + return updatedRecipe; }, // deno-lint-ignore require-await diff --git a/examples/gustav/src/main.ts b/examples/gustav/src/main.ts index 111a6c6..4c7888e 100644 --- a/examples/gustav/src/main.ts +++ b/examples/gustav/src/main.ts @@ -9,9 +9,10 @@ import '@std/dotenv/load'; import { oakCors } from '@tajpouria/cors'; import process from 'node:process'; import { exampleAuthMiddleware } from './contexts/iam/infrastructure/http/auth.middleware.ts'; -import { router } from './shared/http/router.ts'; -import { registerSchemas } from './shared/http/schemas.ts'; -import { initMongoConnectionManager } from './shared/mongodb.ts'; +import { initEventStore } from './shared/infrastructure/eventStore.ts'; +import { router } from './shared/infrastructure/http/router.ts'; +import { registerSchemas } from './shared/infrastructure/http/schemas.ts'; +import { initMongoConnectionManager } from './shared/infrastructure/mongodb.ts'; // // Setup logging with basic options provided by Nimbus @@ -29,6 +30,9 @@ setupLogger({ // Register JSON schemas for validation registerSchemas(); +// Initialize EventStore +initEventStore(); + // Initialize MongoDB Manager initMongoConnectionManager(); diff --git a/examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts b/examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts deleted file mode 100644 index aa0717e..0000000 --- a/examples/gustav/src/shared/adapters/eventSourcingDBEventStore.ts +++ /dev/null @@ -1,133 +0,0 @@ -import { GenericException } from '@nimbus/core'; -import { getEnv } from '@nimbus/utils'; -import { EventStore, EventStoreReadOptions } from '../ports/eventStore.ts'; - -// TODO: this implementation should be moved to @nimbus/eventsourcingdb - -const defaultReadOptions: EventStoreReadOptions = { - recursive: false, -}; - -const makeEventSourcingDBEventStore = (): EventStore => { - const { EVENTSOURCINGDB_API, EVENTSOURCINGDB_SECRET } = getEnv({ - variables: ['EVENTSOURCINGDB_API', 'EVENTSOURCINGDB_SECRET'], - }); - - return { - writeEvents: async (events) => { - const payload = JSON.stringify({ - events: events, - }); - - const response = await fetch( - `${EVENTSOURCINGDB_API}/write-events`, - { - method: 'POST', - headers: { - 'authorization': `Bearer ${EVENTSOURCINGDB_SECRET}`, - 'content-type': 'application/json', - }, - body: payload, - }, - ); - - const body = await response.text(); - - if (!response.ok) { - throw new GenericException('Failed to write events', { - status: response.status, - statusText: response.statusText, - url: response.url, - body, - }); - } - - if (body.startsWith('[')) { - let items: any[]; - - try { - items = JSON.parse(body); - } catch (error: any) { - throw new GenericException('Failed to parse events', { - reason: error.message, - }); - } - - return items; - } else { - throw new GenericException('Failed to parse events', { - reason: 'Response was not an array of events', - }); - } - }, - - readEvents: async (subject, options = defaultReadOptions) => { - console.log('readEvents', subject, options); - - const response = await fetch( - `${EVENTSOURCINGDB_API}/read-events`, - { - method: 'POST', - headers: { - 'authorization': `Bearer ${EVENTSOURCINGDB_SECRET}`, - 'content-type': 'application/json', - }, - body: JSON.stringify({ - subject, - options: { - recursive: options.recursive, - ...(options.order && { order: options.order }), - ...(options.lowerBound && { - lowerBound: options.lowerBound, - }), - ...(options.upperBound && { - upperBound: options.upperBound, - }), - ...(options.fromLatestEvent && { - fromLatestEvent: options.fromLatestEvent, - }), - }, - }), - }, - ); - - const body = await response.text(); - - if (!response.ok) { - throw new GenericException('Failed to read events', { - status: response.status, - statusText: response.statusText, - url: response.url, - body, - }); - } - - let items: any[] = []; - - try { - // We return an empty array if there are no events at all. - if (body.length === 0) { - return []; - } - - // Otherwise we turn the NDJSON response into an array of items. - // https://docs.eventsourcingdb.io/getting-started/reading-events - items = body - .split('\n') - .filter((item) => item.startsWith('{')) - .map((item) => JSON.parse(item)); - } catch (error: any) { - throw new GenericException('Failed to parse events', { - reason: error.message, - }); - } - - // Parse all items to Nimbus objects and ensure type safety. - const events = items.map((item) => item.payload); - - return events; - }, - }; -}; - -export const eventSourcingDBEventStore = makeEventSourcingDBEventStore(); diff --git a/examples/gustav/src/shared/types/unit.ts b/examples/gustav/src/shared/core/domain/unit.ts similarity index 100% rename from examples/gustav/src/shared/types/unit.ts rename to examples/gustav/src/shared/core/domain/unit.ts diff --git a/examples/gustav/src/shared/infrastructure/eventStore.ts b/examples/gustav/src/shared/infrastructure/eventStore.ts new file mode 100644 index 0000000..9b5799e --- /dev/null +++ b/examples/gustav/src/shared/infrastructure/eventStore.ts @@ -0,0 +1,15 @@ +import { EventSourcingDBStore } from '@nimbus/eventsourcingdb'; +import { getEnv } from '@nimbus/utils'; + +export let eventStore: EventSourcingDBStore; + +export const initEventStore = () => { + const { EVENTSOURCINGDB_API, EVENTSOURCINGDB_SECRET } = getEnv({ + variables: ['EVENTSOURCINGDB_API', 'EVENTSOURCINGDB_SECRET'], + }); + + eventStore = new EventSourcingDBStore({ + apiUrl: EVENTSOURCINGDB_API, + secret: EVENTSOURCINGDB_SECRET, + }); +}; diff --git a/examples/gustav/src/shared/http/router.ts b/examples/gustav/src/shared/infrastructure/http/router.ts similarity index 93% rename from examples/gustav/src/shared/http/router.ts rename to examples/gustav/src/shared/infrastructure/http/router.ts index 9a9a065..382c357 100644 --- a/examples/gustav/src/shared/http/router.ts +++ b/examples/gustav/src/shared/infrastructure/http/router.ts @@ -1,6 +1,6 @@ import { getLogger } from '@nimbus/core'; import { NimbusOakRouter } from '@nimbus/oak'; -import { recipeRouter } from '../../contexts/recipe/infrastructure/http/router.ts'; +import { recipeRouter } from '../../../contexts/recipe/infrastructure/http/router.ts'; import { mongoManager } from '../mongodb.ts'; export const router = new NimbusOakRouter(); diff --git a/examples/gustav/src/shared/http/schemas.ts b/examples/gustav/src/shared/infrastructure/http/schemas.ts similarity index 68% rename from examples/gustav/src/shared/http/schemas.ts rename to examples/gustav/src/shared/infrastructure/http/schemas.ts index f5255fe..ae43ff8 100644 --- a/examples/gustav/src/shared/http/schemas.ts +++ b/examples/gustav/src/shared/infrastructure/http/schemas.ts @@ -1,6 +1,6 @@ import { getValidator } from '@nimbus/core'; -import { addRecipeCommandSchema } from '../../contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts'; -import { getRecipeQuerySchema } from '../../contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts'; +import { addRecipeCommandSchema } from '../../../contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts'; +import { getRecipeQuerySchema } from '../../../contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts'; /** * Register all JSON schemas with the Nimbus validator. diff --git a/examples/gustav/src/shared/mongodb.ts b/examples/gustav/src/shared/infrastructure/mongodb.ts similarity index 100% rename from examples/gustav/src/shared/mongodb.ts rename to examples/gustav/src/shared/infrastructure/mongodb.ts diff --git a/packages/eventsourcing/README.md b/packages/eventsourcing/README.md new file mode 100644 index 0000000..8db202e --- /dev/null +++ b/packages/eventsourcing/README.md @@ -0,0 +1,10 @@ +Nimbus + +# Nimbus Event Sourcing + +The event sourcing package of the Nimbus framework. + +Refer to the [Nimbus main repository](https://github.com/overlap-dev/Nimbus) or the [Nimbus documentation](https://nimbus.overlap.at) for more information about the Nimbus framework. diff --git a/packages/eventsourcing/deno.json b/packages/eventsourcing/deno.json new file mode 100644 index 0000000..ec3ee0f --- /dev/null +++ b/packages/eventsourcing/deno.json @@ -0,0 +1,43 @@ +{ + "name": "@nimbus/eventsourcing", + "version": "0.17.0", + "license": "MIT", + "author": "Daniel Gördes (https://overlap.at)", + "repository": { + "type": "git", + "url": "https://github.com/overlap-dev/Nimbus" + }, + "bugs": { + "url": "https://github.com/overlap-dev/Nimbus/issues" + }, + "homepage": "https://nimbus.overlap.at", + "exports": "./src/index.ts", + "fmt": { + "include": [ + "src/" + ], + "useTabs": false, + "lineWidth": 80, + "indentWidth": 4, + "semiColons": true, + "singleQuote": true, + "proseWrap": "always" + }, + "lint": { + "include": [ + "src/" + ], + "rules": { + "exclude": [ + "no-explicit-any", + "no-slow-types" + ] + } + }, + "test": { + "include": [ + "src/" + ] + }, + "imports": {} +} \ No newline at end of file diff --git a/packages/eventsourcing/src/index.ts b/packages/eventsourcing/src/index.ts new file mode 100644 index 0000000..31b1de8 --- /dev/null +++ b/packages/eventsourcing/src/index.ts @@ -0,0 +1,2 @@ +export * from './lib/aggregate.ts'; +export * from './lib/eventStore.ts'; diff --git a/packages/eventsourcing/src/lib/aggregate.ts b/packages/eventsourcing/src/lib/aggregate.ts new file mode 100644 index 0000000..f8c537e --- /dev/null +++ b/packages/eventsourcing/src/lib/aggregate.ts @@ -0,0 +1,162 @@ +import type { Event } from '@nimbus/core'; +import type { EventStore, EventStoreReadOptions } from './eventStore.ts'; + +/** + * Reducer function that applies an event to aggregate state. + * + * @template TState - The type of the aggregate state + */ +export type EventReducer = ( + state: TState, + event: Event, +) => TState; + +/** + * Options for loading an aggregate. + */ +export type LoadAggregateOptions = EventStoreReadOptions; + +/** + * Result of loading an aggregate from the event store. + * + * @template TState - The type of the aggregate state + */ +export type AggregateSnapshot = { + state: TState; + events: Event[]; + version: number; +}; + +/** + * Load an aggregate from the event store by replaying events. + * + * This function reads all events for a given subject and applies them + * sequentially using the provided reducer to reconstruct the current state. + * + * @param eventStore - The event store to read from + * @param subject - The subject (aggregate ID) to load + * @param initialState - The initial state before any events + * @param reducer - Function to apply events to state + * @param options - Optional read options (recursive, bounds, etc.) + * + * @returns The reconstructed aggregate state with metadata + * + * @example + * ```ts + * const snapshot = await loadAggregate( + * eventStore, + * '/recipes/carbonara', + * null, + * recipeReducer, + * { order: 'chronological' } + * ); + * + * console.log(snapshot.state); // Current recipe state + * console.log(snapshot.version); // Number of events applied + * ``` + */ +export async function loadAggregate( + eventStore: EventStore, + subject: string, + initialState: TState, + reducer: EventReducer, + options?: LoadAggregateOptions, +): Promise> { + const events = await eventStore.readEvents(subject, { + ...options, + order: 'chronological', // Always apply events in order + }); + + const state = events.reduce(reducer, initialState); + + return { + state, + events, + version: events.length, + }; +} + +/** + * Check if an aggregate exists (has any events). + * + * @param eventStore - The event store to check + * @param subject - The subject (aggregate ID) to check + * + * @returns True if the aggregate has events, false otherwise + */ +export async function aggregateExists( + eventStore: EventStore, + subject: string, +): Promise { + const events = await eventStore.readEvents(subject, { + recursive: false, + }); + + return events.length > 0; +} + +/** + * Load multiple aggregates by reading events recursively. + * + * This enables dynamic aggregate boundaries - you can read events for + * a parent subject and reconstruct multiple child aggregates. + * + * @param eventStore - The event store to read from + * @param parentSubject - The parent subject to read recursively + * @param initialState - The initial state for each aggregate + * @param reducer - Function to apply events to state + * @param groupBy - Function to extract subject from event + * + * @returns Map of subject to aggregate snapshot + * + * @example + * ```ts + * // Load all recipes + * const recipes = await loadAggregates( + * eventStore, + * '/recipes', + * null, + * recipeReducer, + * (event) => event.subject + * ); + * + * for (const [subject, snapshot] of recipes) { + * console.log(`${subject}: ${snapshot.state?.name}`); + * } + * ``` + */ +export async function loadAggregates( + eventStore: EventStore, + parentSubject: string, + initialState: TState, + reducer: EventReducer, + groupBy: (event: Event) => string, +): Promise>> { + const events = await eventStore.readEvents(parentSubject, { + recursive: true, + order: 'chronological', + }); + + // Group events by subject + const eventsBySubject = new Map(); + for (const event of events) { + const subject = groupBy(event); + if (!eventsBySubject.has(subject)) { + eventsBySubject.set(subject, []); + } + eventsBySubject.get(subject)!.push(event); + } + + // Reduce each subject's events to build aggregates + const aggregates = new Map>(); + for (const [subject, subjectEvents] of eventsBySubject) { + const state = subjectEvents.reduce(reducer, initialState); + aggregates.set(subject, { + state, + events: subjectEvents, + version: subjectEvents.length, + }); + } + + return aggregates; +} diff --git a/examples/gustav/src/shared/ports/eventStore.ts b/packages/eventsourcing/src/lib/eventStore.ts similarity index 89% rename from examples/gustav/src/shared/ports/eventStore.ts rename to packages/eventsourcing/src/lib/eventStore.ts index 709c440..ab744b0 100644 --- a/examples/gustav/src/shared/ports/eventStore.ts +++ b/packages/eventsourcing/src/lib/eventStore.ts @@ -1,6 +1,4 @@ -import { Event } from '@nimbus/core'; - -// TODO: this interface should be moved to @nimbus/core +import type { Event } from '@nimbus/core'; export type EventStoreWriteEvent = { source: string; @@ -28,6 +26,9 @@ export type EventStoreMarker = { ifEventIsMissing: 'read-everything' | 'read-nothing'; }; +/** + * Event store interface. + */ export interface EventStore { writeEvents: ( events: EventStoreWriteEvent[], diff --git a/packages/eventsourcingdb/README.md b/packages/eventsourcingdb/README.md new file mode 100644 index 0000000..fb12e25 --- /dev/null +++ b/packages/eventsourcingdb/README.md @@ -0,0 +1,10 @@ +Nimbus + +# Nimbus EventSourcingDB + +The EventSourcingDB package of the Nimbus framework. + +Refer to the [Nimbus main repository](https://github.com/overlap-dev/Nimbus) or the [Nimbus documentation](https://nimbus.overlap.at) for more information about the Nimbus framework. diff --git a/packages/eventsourcingdb/deno.json b/packages/eventsourcingdb/deno.json new file mode 100644 index 0000000..5be4a9a --- /dev/null +++ b/packages/eventsourcingdb/deno.json @@ -0,0 +1,43 @@ +{ + "name": "@nimbus/eventsourcingdb", + "version": "0.17.0", + "license": "MIT", + "author": "Daniel Gördes (https://overlap.at)", + "repository": { + "type": "git", + "url": "https://github.com/overlap-dev/Nimbus" + }, + "bugs": { + "url": "https://github.com/overlap-dev/Nimbus/issues" + }, + "homepage": "https://nimbus.overlap.at", + "exports": "./src/index.ts", + "fmt": { + "include": [ + "src/" + ], + "useTabs": false, + "lineWidth": 80, + "indentWidth": 4, + "semiColons": true, + "singleQuote": true, + "proseWrap": "always" + }, + "lint": { + "include": [ + "src/" + ], + "rules": { + "exclude": [ + "no-explicit-any", + "no-slow-types" + ] + } + }, + "test": { + "include": [ + "src/" + ] + }, + "imports": {} +} \ No newline at end of file diff --git a/packages/eventsourcingdb/src/index.ts b/packages/eventsourcingdb/src/index.ts new file mode 100644 index 0000000..fa745f3 --- /dev/null +++ b/packages/eventsourcingdb/src/index.ts @@ -0,0 +1 @@ +export * from './lib/eventSourcingDb.ts'; diff --git a/packages/eventsourcingdb/src/lib/eventSourcingDb.ts b/packages/eventsourcingdb/src/lib/eventSourcingDb.ts new file mode 100644 index 0000000..37ba172 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/eventSourcingDb.ts @@ -0,0 +1,184 @@ +import { type Event, GenericException, getLogger } from '@nimbus/core'; +import type { + EventStore, + EventStoreReadOptions, + EventStoreWriteEvent, +} from '@nimbus/eventsourcing'; + +/** + * Options for EventSourcingDBStore. + */ +export type EventSourcingDBStoreOptions = { + apiUrl: string; + secret: string; +}; + +const defaultReadOptions: EventStoreReadOptions = { + recursive: false, +}; + +/** + * EventSourcingDB adapter for the EventStore interface. + * + * Connects to EventSourcingDB API to write and read events. + * + * @example + * ```ts + * const eventStore = new EventSourcingDBStore({ + * apiUrl: process.env.EVENTSOURCINGDB_API, + * secret: process.env.EVENTSOURCINGDB_SECRET, + * }); + * + * await eventStore.writeEvents([{ + * source: 'my-app', + * subject: '/recipes/carbonara', + * type: 'recipe-added', + * data: { title: 'Carbonara' }, + * }]); + * + * const events = await eventStore.readEvents('/recipes/carbonara'); + * ``` + */ +export class EventSourcingDBStore implements EventStore { + private readonly _apiUrl: string; + private readonly _secret: string; + + constructor(options: EventSourcingDBStoreOptions) { + this._apiUrl = options.apiUrl; + this._secret = options.secret; + } + + /** + * Write events to EventSourcingDB. + * + * @param events - Events to write + * @returns The written events with metadata + */ + async writeEvents(events: EventStoreWriteEvent[]): Promise { + const payload = JSON.stringify({ + events: events, + }); + + const response = await fetch(`${this._apiUrl}/write-events`, { + method: 'POST', + headers: { + 'authorization': `Bearer ${this._secret}`, + 'content-type': 'application/json', + }, + body: payload, + }); + + const body = await response.text(); + + if (!response.ok) { + throw new GenericException('Failed to write events', { + status: response.status, + statusText: response.statusText, + url: response.url, + body, + }); + } + + if (body.startsWith('[')) { + let items: any[]; + + try { + items = JSON.parse(body); + } catch (error: any) { + throw new GenericException('Failed to parse events', { + reason: error.message, + }); + } + + getLogger().debug({ + category: 'Nimbus', + message: 'EventSourcingDBStore :: Events written', + data: { count: items.length }, + }); + + return items; + } else { + throw new GenericException('Failed to parse events', { + reason: 'Response was not an array of events', + }); + } + } + + /** + * Read events from EventSourcingDB. + * + * @param subject - The subject to read events for + * @param options - Read options (recursive, order, bounds, etc.) + * @returns Array of events matching the criteria + */ + async readEvents( + subject: string, + options: EventStoreReadOptions = defaultReadOptions, + ): Promise { + const response = await fetch(`${this._apiUrl}/read-events`, { + method: 'POST', + headers: { + 'authorization': `Bearer ${this._secret}`, + 'content-type': 'application/json', + }, + body: JSON.stringify({ + subject, + options: { + recursive: options.recursive ?? false, + ...(options.order && { order: options.order }), + ...(options.lowerBound && { + lowerBound: options.lowerBound, + }), + ...(options.upperBound && { + upperBound: options.upperBound, + }), + ...(options.fromLatestEvent && { + fromLatestEvent: options.fromLatestEvent, + }), + }, + }), + }); + + const body = await response.text(); + + if (!response.ok) { + throw new GenericException('Failed to read events', { + status: response.status, + statusText: response.statusText, + url: response.url, + body, + }); + } + + let items: any[] = []; + + try { + // We return an empty array if there are no events at all. + if (body.length === 0) { + return []; + } + + // Otherwise we turn the NDJSON response into an array of items. + // https://docs.eventsourcingdb.io/getting-started/reading-events + items = body + .split('\n') + .filter((item) => item.startsWith('{')) + .map((item) => JSON.parse(item)); + } catch (error: any) { + throw new GenericException('Failed to parse events', { + reason: error.message, + }); + } + + // Parse all items to Nimbus objects and ensure type safety. + const events = items.map((item) => item.payload); + + getLogger().debug({ + category: 'Nimbus', + message: 'EventSourcingDBStore :: Events read', + data: { subject, count: events.length }, + }); + + return events; + } +} From ca05435212bf15d594207e9c940e6c50dc8d046f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 6 Oct 2025 14:47:15 +0200 Subject: [PATCH 10/37] adds concurrency control mechanisms and better event mapping --- examples/gustav/concurrency-tester.sh | 57 +++++++++ .../recipe/core/commands/addRecipe.ts | 63 +++++----- .../recipe/core/commands/deleteRecipe.ts | 40 ++++-- .../recipe/core/commands/updateRecipe.ts | 52 +++++--- .../recipe/core/domain/recipeAggregate.ts | 7 +- .../infrastructure/http/commandRouter.ts | 7 ++ .../http/handler/updateRecipe.handler.ts | 17 +++ .../recipe/infrastructure/http/router.ts | 29 ++++- .../http/schemas/updateRecipeCommandSchema.ts | 66 ++++++++++ .../src/shared/infrastructure/http/schemas.ts | 2 + packages/core/src/index.ts | 1 + .../src/lib/exception/concurrencyException.ts | 18 +++ packages/eventsourcing/src/lib/aggregate.ts | 29 +++-- packages/eventsourcing/src/lib/eventStore.ts | 109 ++++++++++++++-- .../src/lib/eventSourcingDb.ts | 116 ++++++++++++++++-- 15 files changed, 522 insertions(+), 91 deletions(-) create mode 100644 examples/gustav/concurrency-tester.sh create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/schemas/updateRecipeCommandSchema.ts create mode 100644 packages/core/src/lib/exception/concurrencyException.ts diff --git a/examples/gustav/concurrency-tester.sh b/examples/gustav/concurrency-tester.sh new file mode 100644 index 0000000..00d079f --- /dev/null +++ b/examples/gustav/concurrency-tester.sh @@ -0,0 +1,57 @@ +#!/bin/bash + +# SETUP for testing the concurrency +# +# Go into the file examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts +# and add the following code right before eventStore.writeEvents. +# +# await new Promise((resolve) => +# setTimeout(resolve, Math.floor(Math.random() * (1000 - 200 + 1)) + 200) +# ); +# +# This adds a random delay between 200ms and 1000ms between the read and the write +# on the event store. This way we can easier shake up the order on multiple commands +# to provoke some concurrency issues. + +# Number of concurrent requests +CONCURRENCY=5 + +# Replace :slug with your actual recipe slug +SLUG="test" + +echo "Starting $CONCURRENCY concurrent requests at $(date +%T.%3N)" +echo "==========================================" + +for i in $(seq 1 $CONCURRENCY); do + ( + echo "[Request $i] Starting at $(date +%T.%3N)" + + RESPONSE=$(curl -s -w "\nHTTP_STATUS:%{http_code}\nTIME_TOTAL:%{time_total}" \ + --request PUT \ + --url "http://localhost:3100/recipes/$SLUG" \ + --header 'Authorization: Bearer very-special-secret' \ + --header 'Content-Type: application/json' \ + --header 'User-Agent: insomnium/0.2.3-a' \ + --data "{ + \"name\": \"Test Rezept Change $i\", + \"instructions\": [ + \"1. Wasser kochen\" + ] + }") + + HTTP_STATUS=$(echo "$RESPONSE" | grep "HTTP_STATUS:" | cut -d: -f2) + TIME_TOTAL=$(echo "$RESPONSE" | grep "TIME_TOTAL:" | cut -d: -f2) + BODY=$(echo "$RESPONSE" | sed '/HTTP_STATUS:/,$d') + + echo "[Request $i] Completed at $(date +%T.%3N)" + echo "[Request $i] Status: $HTTP_STATUS" + echo "[Request $i] Time: ${TIME_TOTAL}s" + echo "[Request $i] Response Body:" + echo "$BODY" | sed 's/^/ /' + echo "==========================================" + ) & +done + +wait + +echo "All requests completed at $(date +%T.%3N)" diff --git a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts index 649e951..94003c2 100644 --- a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts @@ -1,9 +1,13 @@ -import { Command, InvalidInputException } from '@nimbus/core'; -import { type EventStore, loadAggregate } from '@nimbus/eventsourcing'; +import { + Command, + ConcurrencyException, + InvalidInputException, +} from '@nimbus/core'; +import { type EventStore } from '@nimbus/eventsourcing'; import { getEnv } from '@nimbus/utils'; import { ulid } from '@std/ulid'; import { Recipe } from '../domain/recipe.ts'; -import { recipeReducer, recipeSubject } from '../domain/recipeAggregate.ts'; +import { recipeSubject } from '../domain/recipeAggregate.ts'; import { RecipeAddedCommandType, RecipeAddedEvent, @@ -25,23 +29,6 @@ export const addRecipe = async ( const subject = recipeSubject(command.data.slug); - // Load current aggregate state by replaying events - const snapshot = await loadAggregate( - eventStore, - subject, - null, - recipeReducer, - ); - - // Validate recipe doesn't already exist - if (snapshot.state !== null) { - throw new InvalidInputException('Recipe already exists', { - errorCode: 'DUPLICATE_RECIPE', - reason: - 'A recipe with this slug already exists. The slug for each recipe must be unique, please choose a different slug.', - }); - } - // Create event const recipeAddedEvent: RecipeAddedEvent = { specversion: '1.0', @@ -55,15 +42,33 @@ export const addRecipe = async ( datacontenttype: 'application/json', }; - // Write event - await eventStore.writeEvents([ - { - source: recipeAddedEvent.source, - subject: recipeAddedEvent.subject, - type: recipeAddedEvent.type, - data: recipeAddedEvent.data, - }, - ]); + // Write event with optimistic concurrency control + // Use isSubjectPristine to ensure this is the first event for this subject + try { + await eventStore.writeEvents( + [ + recipeAddedEvent, + ], + { + preconditions: [ + { + type: 'isSubjectPristine', + payload: { subject }, + }, + ], + }, + ); + } catch (error) { + console.log('###error', error); + // Handle concurrency conflict for duplicate recipe + if (error instanceof ConcurrencyException) { + throw new InvalidInputException('Recipe already exists', { + errorCode: 'DUPLICATE_RECIPE', + reason: + 'A recipe with this slug already exists. The slug for each recipe must be unique, please choose a different slug.', + }); + } + } return command.data; }; diff --git a/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts index 047aa88..5fabefb 100644 --- a/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts @@ -12,13 +12,16 @@ import { RecipeDeletedEventType, } from '../events/recipeDeleted.ts'; -export const DeleteRecipeCommandType = 'at.overlap.nimbus.delete-recipe' as const; +export const DeleteRecipeCommandType = + 'at.overlap.nimbus.delete-recipe' as const; -export type DeleteRecipeCommand = Command<{ - slug: string; -}> & { - type: typeof DeleteRecipeCommandType; -}; +export type DeleteRecipeCommand = + & Command<{ + slug: string; + }> + & { + type: typeof DeleteRecipeCommandType; + }; export const deleteRecipe = async ( command: DeleteRecipeCommand, @@ -54,13 +57,24 @@ export const deleteRecipe = async ( datacontenttype: 'application/json', }; - // Write event - await eventStore.writeEvents([ + // Write event with optimistic concurrency control + // Use isSubjectOnEventId to ensure no other updates happened since we read + await eventStore.writeEvents( + [ + recipeDeletedEvent, + ], { - source: recipeDeletedEvent.source, - subject: recipeDeletedEvent.subject, - type: recipeDeletedEvent.type, - data: recipeDeletedEvent.data, + preconditions: snapshot.lastEventId + ? [ + { + type: 'isSubjectOnEventId', + payload: { + subject, + eventId: snapshot.lastEventId, + }, + }, + ] + : undefined, }, - ]); + ); }; diff --git a/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts index 5022416..f2de4b6 100644 --- a/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts @@ -13,15 +13,19 @@ import { RecipeUpdatedEventType, } from '../events/recipeUpdated.ts'; -export const UpdateRecipeCommandType = 'at.overlap.nimbus.update-recipe' as const; +export const UpdateRecipeCommandType = + 'at.overlap.nimbus.update-recipe' as const; -export type UpdateRecipeCommand = Command<{ +export type UpdateRecipeData = Partial & { slug: string; - updates: Partial; -}> & { - type: typeof UpdateRecipeCommandType; }; +export type UpdateRecipeCommand = + & Command + & { + type: typeof UpdateRecipeCommandType; + }; + export const updateRecipe = async ( command: UpdateRecipeCommand, eventStore: EventStore, @@ -43,11 +47,18 @@ export const updateRecipe = async ( // Validate recipe exists const currentRecipe = requireRecipe(snapshot.state); + // Prevent slug changes + const updates: Partial = { + ...command.data, + }; + if (updates.slug) { + delete updates.slug; + } + // Apply updates to get new state const updatedRecipe = { ...currentRecipe, - ...command.data.updates, - slug: currentRecipe.slug, // Prevent slug changes + ...updates, }; // Create event @@ -59,19 +70,30 @@ export const updateRecipe = async ( source: EVENT_SOURCE, type: RecipeUpdatedEventType, subject, - data: command.data.updates, + data: updates, datacontenttype: 'application/json', }; - // Write event - await eventStore.writeEvents([ + // Write event with optimistic concurrency control + // Use isSubjectOnEventId to ensure no other updates happened since we read + await eventStore.writeEvents( + [ + recipeUpdatedEvent, + ], { - source: recipeUpdatedEvent.source, - subject: recipeUpdatedEvent.subject, - type: recipeUpdatedEvent.type, - data: recipeUpdatedEvent.data, + preconditions: snapshot.lastEventId + ? [ + { + type: 'isSubjectOnEventId', + payload: { + subject, + eventId: snapshot.lastEventId, + }, + }, + ] + : undefined, }, - ]); + ); return updatedRecipe; }; diff --git a/examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts b/examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts index 2148a2c..1b3974e 100644 --- a/examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts +++ b/examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts @@ -1,4 +1,4 @@ -import { Event } from '@nimbus/core'; +import { Event, NotFoundException } from '@nimbus/core'; import { type EventReducer } from '@nimbus/eventsourcing'; import { Recipe } from './recipe.ts'; @@ -79,7 +79,10 @@ export function recipeSubject(slug: string): string { */ export function requireRecipe(state: RecipeState): Recipe { if (!state) { - throw new Error('Recipe does not exist'); + throw new NotFoundException('Recipe not found', { + errorCode: 'RECIPE_NOT_FOUND', + reason: 'The recipe with the provided slug was not found', + }); } return state; } diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts index bdc7b10..0417bb9 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts @@ -1,6 +1,8 @@ import { getLogger, MessageRouter } from '@nimbus/core'; import { AddRecipeCommandType } from '../../core/commands/addRecipe.ts'; +import { UpdateRecipeCommandType } from '../../core/commands/updateRecipe.ts'; import { addRecipeHandler } from './handler/addRecipe.handler.ts'; +import { updateRecipeHandler } from './handler/updateRecipe.handler.ts'; export const commandRouter = new MessageRouter('command', { logInput: (input: any) => { @@ -19,3 +21,8 @@ commandRouter.register( AddRecipeCommandType, addRecipeHandler, ); + +commandRouter.register( + UpdateRecipeCommandType, + updateRecipeHandler, +); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts new file mode 100644 index 0000000..d9ed5ce --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts @@ -0,0 +1,17 @@ +import { MessageHandler } from '@nimbus/core'; +import { eventStore } from '../../../../../shared/infrastructure/eventStore.ts'; +import { + updateRecipe, + UpdateRecipeCommand, +} from '../../../core/commands/updateRecipe.ts'; +import { Recipe } from '../../../core/domain/recipe.ts'; + +export const updateRecipeHandler: MessageHandler< + UpdateRecipeCommand, + Recipe +> = async (command) => { + return updateRecipe( + command, + eventStore, + ); +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts index 0eee69a..db24b84 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts @@ -1,14 +1,18 @@ import { NimbusOakRouter } from '@nimbus/oak'; import { AddRecipeCommandType } from '../../core/commands/addRecipe.ts'; +import { UpdateRecipeCommandType } from '../../core/commands/updateRecipe.ts'; import { GetRecipeQueryType } from '../../core/queries/getRecipe.ts'; import { commandRouter } from './commandRouter.ts'; import { queryRouter } from './queryRouter.ts'; import { AddRecipeCommandSchemaUrl } from './schemas/addRecipeCommandSchema.ts'; import { GetRecipeQuerySchemaUrl } from './schemas/getRecipeQuerySchema.ts'; +import { UpdateRecipeCommandSchemaUrl } from './schemas/updateRecipeCommandSchema.ts'; export const recipeRouter = new NimbusOakRouter(); -// POST /recipes - Add a new recipe +// Add a new recipe +// +// POST /recipes recipeRouter.command({ path: '/', messageType: AddRecipeCommandType, @@ -17,13 +21,32 @@ recipeRouter.command({ // extractData defaults to reading JSON body }); -// GET /recipes/:slug - Get a recipe by slug +// Update a recipe +// +// PUT /recipes/:slug +recipeRouter.commandPut({ + path: '/:slug', + messageType: UpdateRecipeCommandType, + router: commandRouter, + dataschema: UpdateRecipeCommandSchemaUrl, + extractData: async (ctx) => { + const body = await ctx.request.body.json(); + return { + ...body, + slug: ctx.params.slug, + }; + }, +}); + +// Get a recipe by slug +// +// GET /recipes/:slug recipeRouter.query({ path: '/:slug', messageType: GetRecipeQueryType, router: queryRouter, + dataschema: GetRecipeQuerySchemaUrl, extractData: (ctx) => ({ slug: ctx.params.slug, }), - dataschema: GetRecipeQuerySchemaUrl, }); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/updateRecipeCommandSchema.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/updateRecipeCommandSchema.ts new file mode 100644 index 0000000..d5e43d0 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/updateRecipeCommandSchema.ts @@ -0,0 +1,66 @@ +import { commandSchema } from '@nimbus/core'; +import type { SchemaObject } from 'ajv'; + +export const UpdateRecipeCommandSchemaUrl = + 'https://api.gustav.app/schemas/commands/update-recipe/v1' as const; + +/** + * JSON Schema for UpdateRecipeCommand + * + * This schema validates the data payload of the update-recipe command. + * It will be registered with the validator and referenced via dataschema URL. + */ +export const updateRecipeCommandSchema: SchemaObject = { + ...commandSchema, + $id: UpdateRecipeCommandSchemaUrl, + properties: { + ...commandSchema.properties, + data: { + type: 'object', + required: ['slug'], + properties: { + slug: { + type: 'string', + pattern: '^[a-z0-9-]+$', + minLength: 1, + maxLength: 100, + }, + name: { + type: 'string', + minLength: 1, + maxLength: 200, + }, + ingredients: { + type: 'array', + items: { + type: 'object', + required: ['name', 'amount', 'unit'], + properties: { + name: { type: 'string', minLength: 1 }, + amount: { type: 'number', minimum: 1 }, + unit: { type: 'string', minLength: 1 }, + productId: { type: 'string', minLength: 1 }, + }, + }, + }, + instructions: { + type: 'array', + items: { + type: 'string', + minLength: 1, + }, + }, + tags: { + type: 'array', + items: { + type: 'string', + minLength: 1, + }, + }, + }, + }, + dataschema: { + const: UpdateRecipeCommandSchemaUrl, + }, + }, +}; diff --git a/examples/gustav/src/shared/infrastructure/http/schemas.ts b/examples/gustav/src/shared/infrastructure/http/schemas.ts index ae43ff8..50fcfbb 100644 --- a/examples/gustav/src/shared/infrastructure/http/schemas.ts +++ b/examples/gustav/src/shared/infrastructure/http/schemas.ts @@ -1,6 +1,7 @@ import { getValidator } from '@nimbus/core'; import { addRecipeCommandSchema } from '../../../contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts'; import { getRecipeQuerySchema } from '../../../contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts'; +import { updateRecipeCommandSchema } from '../../../contexts/recipe/infrastructure/http/schemas/updateRecipeCommandSchema.ts'; /** * Register all JSON schemas with the Nimbus validator. @@ -12,6 +13,7 @@ export function registerSchemas(): void { const validator = getValidator(); validator.addSchema(addRecipeCommandSchema); + validator.addSchema(updateRecipeCommandSchema); validator.addSchema(getRecipeQuerySchema); // TODO: Add more schemas as you develop more commands/queries diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 00b2a84..62af76b 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,5 +1,6 @@ export * from './lib/auth/authContext.ts'; export * from './lib/event/eventBus.ts'; +export * from './lib/exception/concurrencyException.ts'; export * from './lib/exception/exception.ts'; export * from './lib/exception/forbiddenException.ts'; export * from './lib/exception/genericException.ts'; diff --git a/packages/core/src/lib/exception/concurrencyException.ts b/packages/core/src/lib/exception/concurrencyException.ts new file mode 100644 index 0000000..a33a438 --- /dev/null +++ b/packages/core/src/lib/exception/concurrencyException.ts @@ -0,0 +1,18 @@ +import { Exception } from './exception.ts'; + +/** + * Exception thrown when a concurrency conflict occurs. + * + * This typically happens in event sourcing when using optimistic concurrency control + * and another process has modified the aggregate between reading and writing. + */ +export class ConcurrencyException extends Exception { + constructor(message?: string, details?: Record) { + super( + 'CONCURRENCY_EXCEPTION', + message ?? 'Concurrency conflict detected', + details, + 409, + ); + } +} diff --git a/packages/eventsourcing/src/lib/aggregate.ts b/packages/eventsourcing/src/lib/aggregate.ts index f8c537e..38e593b 100644 --- a/packages/eventsourcing/src/lib/aggregate.ts +++ b/packages/eventsourcing/src/lib/aggregate.ts @@ -1,5 +1,9 @@ import type { Event } from '@nimbus/core'; -import type { EventStore, EventStoreReadOptions } from './eventStore.ts'; +import type { + EventStore, + EventStoreReadOptions, + EventWithMetadata, +} from './eventStore.ts'; /** * Reducer function that applies an event to aggregate state. @@ -23,8 +27,8 @@ export type LoadAggregateOptions = EventStoreReadOptions; */ export type AggregateSnapshot = { state: TState; - events: Event[]; - version: number; + events: EventWithMetadata[]; + lastEventId?: string; }; /** @@ -67,12 +71,12 @@ export async function loadAggregate( order: 'chronological', // Always apply events in order }); - const state = events.reduce(reducer, initialState); - return { - state, + state: events.reduce(reducer, initialState), events, - version: events.length, + lastEventId: events.length > 0 + ? events[events.length - 1].eventstoremetadata.id + : undefined, }; } @@ -130,7 +134,7 @@ export async function loadAggregates( parentSubject: string, initialState: TState, reducer: EventReducer, - groupBy: (event: Event) => string, + groupBy: (event: EventWithMetadata) => string, ): Promise>> { const events = await eventStore.readEvents(parentSubject, { recursive: true, @@ -138,7 +142,7 @@ export async function loadAggregates( }); // Group events by subject - const eventsBySubject = new Map(); + const eventsBySubject = new Map(); for (const event of events) { const subject = groupBy(event); if (!eventsBySubject.has(subject)) { @@ -150,11 +154,12 @@ export async function loadAggregates( // Reduce each subject's events to build aggregates const aggregates = new Map>(); for (const [subject, subjectEvents] of eventsBySubject) { - const state = subjectEvents.reduce(reducer, initialState); aggregates.set(subject, { - state, + state: subjectEvents.reduce(reducer, initialState), events: subjectEvents, - version: subjectEvents.length, + lastEventId: subjectEvents.length > 0 + ? subjectEvents[subjectEvents.length - 1].eventstoremetadata.id + : undefined, }); } diff --git a/packages/eventsourcing/src/lib/eventStore.ts b/packages/eventsourcing/src/lib/eventStore.ts index ab744b0..7515421 100644 --- a/packages/eventsourcing/src/lib/eventStore.ts +++ b/packages/eventsourcing/src/lib/eventStore.ts @@ -1,10 +1,84 @@ import type { Event } from '@nimbus/core'; -export type EventStoreWriteEvent = { - source: string; - subject: string; - type: string; - data: any; +/** + * Precondition to ensure a subject is on a specific event ID. + * Enables optimistic concurrency control. + * + * @example + * ```ts + * { + * type: 'isSubjectOnEventId', + * payload: { + * subject: '/recipes/carbonara', + * eventId: '42' // Last known event ID + * } + * } + * ``` + */ +export type IsSubjectOnEventIdPrecondition = { + type: 'isSubjectOnEventId'; + payload: { + subject: string; + eventId: string; + }; +}; + +/** + * Precondition to ensure a subject has no existing events. + * Used when creating a new aggregate to prevent duplicates. + * + * @example + * ```ts + * { + * type: 'isSubjectPristine', + * payload: { + * subject: '/recipes/carbonara' + * } + * } + * ``` + */ +export type IsSubjectPristinePrecondition = { + type: 'isSubjectPristine'; + payload: { + subject: string; + }; +}; + +/** + * Sometimes, you want to ensure that an event is only written if a more complex condition holds + * for example, if no similar event has ever been recorded before. + * The isEventQlQueryTrue precondition lets you define such conditions using EventQL. + * + * @example + * ```ts + * { + * type: 'isEventQlQueryTrue', + * payload: { + * query: 'FROM e IN events WHERE e.data.title == "2001 – A Space Odyssey" PROJECT INTO COUNT() == 0' + * } + * ``` + */ +export type IsEventQlQueryTruePrecondition = { + type: 'isEventQlQueryTrue'; + payload: { + query: string; + }; +}; + +/** + * Preconditions for writing events. + * Used for optimistic concurrency control and validation. + */ +export type EventStorePrecondition = + | IsSubjectOnEventIdPrecondition + | IsSubjectPristinePrecondition + | IsEventQlQueryTruePrecondition; + +/** + * Options for writing events. + */ +export type EventStoreWriteOptions = { + preconditions?: EventStorePrecondition[]; }; export type EventStoreReadOptions = { @@ -26,16 +100,35 @@ export type EventStoreMarker = { ifEventIsMissing: 'read-everything' | 'read-nothing'; }; +/** + * Metadata from the event store. + * Includes the database-assigned event ID for concurrency control. + */ +export type EventStoreMetadata = { + id: string; + hash?: string; + predecessorhash?: string; + signature?: string | null; +}; + +/** + * Event with metadata from the event store. + */ +export type EventWithMetadata = Event & { + eventstoremetadata: EventStoreMetadata; +}; + /** * Event store interface. */ export interface EventStore { writeEvents: ( - events: EventStoreWriteEvent[], - ) => Promise; + events: Event[], + options?: EventStoreWriteOptions, + ) => Promise; readEvents: ( subject: string, options?: EventStoreReadOptions, - ) => Promise; + ) => Promise; } diff --git a/packages/eventsourcingdb/src/lib/eventSourcingDb.ts b/packages/eventsourcingdb/src/lib/eventSourcingDb.ts index 37ba172..4eab21d 100644 --- a/packages/eventsourcingdb/src/lib/eventSourcingDb.ts +++ b/packages/eventsourcingdb/src/lib/eventSourcingDb.ts @@ -1,10 +1,46 @@ -import { type Event, GenericException, getLogger } from '@nimbus/core'; +import { + ConcurrencyException, + type Event, + GenericException, + getLogger, +} from '@nimbus/core'; import type { EventStore, EventStoreReadOptions, - EventStoreWriteEvent, + EventStoreWriteOptions, + EventWithMetadata, } from '@nimbus/eventsourcing'; +export type MappingEnvelope = { + nimbusData: { + id: string; + correlationid: string; + dataschema?: string; + }; + data: any; +}; + +export type EventSourcingDbInput = { + source: string; + subject: string; + type: string; + data: MappingEnvelope; +}; + +export type EventSourcingDbEvent = { + source: string; + subject: string; + type: string; + specversion: '1.0'; + id: string; + time: string; + datacontenttype: string; + data: any; + hash: string; + predecessorhash: string; + signature: string | null; +}; + /** * Options for EventSourcingDBStore. */ @@ -52,11 +88,19 @@ export class EventSourcingDBStore implements EventStore { * Write events to EventSourcingDB. * * @param events - Events to write + * @param options - Write options including preconditions for optimistic concurrency * @returns The written events with metadata + * @throws ConcurrencyException if preconditions fail (409 status) */ - async writeEvents(events: EventStoreWriteEvent[]): Promise { + async writeEvents( + events: Event[], + options?: EventStoreWriteOptions, + ): Promise { const payload = JSON.stringify({ - events: events, + events: events.map(this._mapNimbusEventToEventSourcingDbInput), + ...(options?.preconditions && { + preconditions: options.preconditions, + }), }); const response = await fetch(`${this._apiUrl}/write-events`, { @@ -70,6 +114,17 @@ export class EventSourcingDBStore implements EventStore { const body = await response.text(); + if (response.status === 409) { + throw new ConcurrencyException( + `Concurrency conflict. At least one precondition failed.`, + { + ...(options?.preconditions && + { preconditions: options.preconditions }), + response: body || 'Precondition check failed', + }, + ); + } + if (!response.ok) { throw new GenericException('Failed to write events', { status: response.status, @@ -96,7 +151,7 @@ export class EventSourcingDBStore implements EventStore { data: { count: items.length }, }); - return items; + return items.map(this._mapEventSourcingDbEventToNimbusEvent); } else { throw new GenericException('Failed to parse events', { reason: 'Response was not an array of events', @@ -109,12 +164,12 @@ export class EventSourcingDBStore implements EventStore { * * @param subject - The subject to read events for * @param options - Read options (recursive, order, bounds, etc.) - * @returns Array of events matching the criteria + * @returns Array of events matching the criteria with metadata */ async readEvents( subject: string, options: EventStoreReadOptions = defaultReadOptions, - ): Promise { + ): Promise { const response = await fetch(`${this._apiUrl}/read-events`, { method: 'POST', headers: { @@ -170,8 +225,9 @@ export class EventSourcingDBStore implements EventStore { }); } - // Parse all items to Nimbus objects and ensure type safety. - const events = items.map((item) => item.payload); + const events: EventWithMetadata[] = items.map((item) => + this._mapEventSourcingDbEventToNimbusEvent(item.payload) + ); getLogger().debug({ category: 'Nimbus', @@ -181,4 +237,46 @@ export class EventSourcingDBStore implements EventStore { return events; } + + private _mapNimbusEventToEventSourcingDbInput( + event: Event, + ): EventSourcingDbInput { + return { + source: event.source, + subject: event.subject, + type: event.type, + data: { + nimbusData: { + id: event.id, + correlationid: event.correlationid, + ...(event.dataschema && { dataschema: event.dataschema }), + }, + data: event.data, + }, + }; + } + + private _mapEventSourcingDbEventToNimbusEvent( + dbEvent: EventSourcingDbEvent, + ): EventWithMetadata { + return { + specversion: '1.0', + id: dbEvent.data.nimbusData.id, + correlationid: dbEvent.data.nimbusData.correlationId, + time: dbEvent.time, + source: dbEvent.source, + type: dbEvent.type, + subject: dbEvent.subject, + data: dbEvent.data.data, + datacontenttype: dbEvent.datacontenttype, + ...(dbEvent.data.nimbusData.dataschema && + { dataschema: dbEvent.data.nimbusData.dataschema }), + eventstoremetadata: { + id: dbEvent.id, + hash: dbEvent.hash, + predecessorhash: dbEvent.predecessorhash, + signature: dbEvent.signature, + }, + }; + } } From 371dd49a4ec7f9510f29598f6ddee57e6b2e544e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 6 Oct 2025 17:05:23 +0200 Subject: [PATCH 11/37] fix(oak): fixes commandDelete method to not return data As it conflicts the status code 204 we do not respond with any data anymore --- packages/oak/src/lib/router.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/oak/src/lib/router.ts b/packages/oak/src/lib/router.ts index 3583014..2e67601 100644 --- a/packages/oak/src/lib/router.ts +++ b/packages/oak/src/lib/router.ts @@ -314,10 +314,9 @@ export class NimbusOakRouter extends OakRouter { ...(options.dataschema && { dataschema: options.dataschema }), }; - const result = await options.router.route(command); + await options.router.route(command); ctx.response.status = 204; - ctx.response.body = result as any; } catch (error: any) { handleOakError(error, ctx, options.onError); } From d8a8b8d9fcb87b6a0042aeccc78647f4e7acef35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 6 Oct 2025 17:08:51 +0200 Subject: [PATCH 12/37] implements event store observe method to observe and reply events written to then update our read models --- .../recipe/core/commands/addRecipe.ts | 58 +++--- .../recipe/core/commands/updateRecipe.ts | 5 +- .../recipe/core/events/recipeDeleted.ts | 12 +- .../recipe/core/events/recipeUpdated.ts | 24 ++- .../recipe/core/queries/listRecipes.ts | 62 ++---- .../infrastructure/http/commandRouter.ts | 7 + .../http/handler/addRecipe.handler.ts | 4 +- .../http/handler/deleteRecipe.handler.ts | 16 ++ .../http/handler/getRecipe.handler.ts | 4 +- .../http/handler/listRecipes.handler.ts | 16 ++ .../http/handler/recipeAdded.handler.ts | 4 +- .../http/handler/updateRecipe.handler.ts | 4 +- .../recipe/infrastructure/http/queryRouter.ts | 7 + .../recipe/infrastructure/http/router.ts | 37 ++++ .../http/schemas/deleteRecipeCommandSchema.ts | 28 +++ .../http/schemas/listRecipesQuerySchema.ts | 31 +++ examples/gustav/src/main.ts | 4 +- .../src/shared/infrastructure/eventStore.ts | 108 ++++++++++- .../src/shared/infrastructure/http/schemas.ts | 4 + packages/eventsourcing/src/lib/eventStore.ts | 124 ++++++++++++ .../src/lib/eventSourcingDb.ts | 27 +++ .../src/lib/eventSourcingDbSubscription.ts | 180 ++++++++++++++++++ 22 files changed, 655 insertions(+), 111 deletions(-) create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/listRecipes.handler.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/schemas/deleteRecipeCommandSchema.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/schemas/listRecipesQuerySchema.ts create mode 100644 packages/eventsourcingdb/src/lib/eventSourcingDbSubscription.ts diff --git a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts index 94003c2..0cedd65 100644 --- a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts @@ -1,13 +1,9 @@ -import { - Command, - ConcurrencyException, - InvalidInputException, -} from '@nimbus/core'; -import { type EventStore } from '@nimbus/eventsourcing'; +import { Command, InvalidInputException } from '@nimbus/core'; +import { type EventStore, loadAggregate } from '@nimbus/eventsourcing'; import { getEnv } from '@nimbus/utils'; import { ulid } from '@std/ulid'; import { Recipe } from '../domain/recipe.ts'; -import { recipeSubject } from '../domain/recipeAggregate.ts'; +import { recipeReducer, recipeSubject } from '../domain/recipeAggregate.ts'; import { RecipeAddedCommandType, RecipeAddedEvent, @@ -29,6 +25,22 @@ export const addRecipe = async ( const subject = recipeSubject(command.data.slug); + // Load current aggregate state by replaying events + const snapshot = await loadAggregate( + eventStore, + subject, + null, + recipeReducer, + ); + + if (snapshot.state !== null) { + throw new InvalidInputException('Recipe already exists', { + errorCode: 'DUPLICATE_RECIPE', + reason: + 'A recipe with this slug already exists. The slug for each recipe must be unique, please choose a different slug.', + }); + } + // Create event const recipeAddedEvent: RecipeAddedEvent = { specversion: '1.0', @@ -42,33 +54,11 @@ export const addRecipe = async ( datacontenttype: 'application/json', }; - // Write event with optimistic concurrency control - // Use isSubjectPristine to ensure this is the first event for this subject - try { - await eventStore.writeEvents( - [ - recipeAddedEvent, - ], - { - preconditions: [ - { - type: 'isSubjectPristine', - payload: { subject }, - }, - ], - }, - ); - } catch (error) { - console.log('###error', error); - // Handle concurrency conflict for duplicate recipe - if (error instanceof ConcurrencyException) { - throw new InvalidInputException('Recipe already exists', { - errorCode: 'DUPLICATE_RECIPE', - reason: - 'A recipe with this slug already exists. The slug for each recipe must be unique, please choose a different slug.', - }); - } - } + await eventStore.writeEvents( + [ + recipeAddedEvent, + ], + ); return command.data; }; diff --git a/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts index f2de4b6..2ed3f82 100644 --- a/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts @@ -70,7 +70,10 @@ export const updateRecipe = async ( source: EVENT_SOURCE, type: RecipeUpdatedEventType, subject, - data: updates, + data: { + slug: command.data.slug, + updates, + }, datacontenttype: 'application/json', }; diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts b/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts index 8fc0c54..4bc3b66 100644 --- a/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts +++ b/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts @@ -1,7 +1,8 @@ import { type Event, getLogger } from '@nimbus/core'; import { RecipeRepository } from '../ports/recipeRepository.ts'; -export const RecipeDeletedEventType = 'at.overlap.nimbus.recipe-deleted' as const; +export const RecipeDeletedEventType = + 'at.overlap.nimbus.recipe-deleted' as const; export type RecipeDeletedEvent = Event<{ slug: string }> & { type: typeof RecipeDeletedEventType; @@ -16,12 +17,5 @@ export const recipeDeleted = async ( data: event.data, }); - // In event sourcing, this would update the read model - // The event itself is already stored in the event store - const slug = event.subject?.split('/').pop(); - if (!slug) { - throw new Error('Recipe slug not found in event subject'); - } - - await repository.delete(slug); + await repository.delete(event.data.slug); }; diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts b/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts index cdc0f98..cedc39c 100644 --- a/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts +++ b/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts @@ -2,11 +2,17 @@ import { type Event, getLogger } from '@nimbus/core'; import { Recipe } from '../domain/recipe.ts'; import { RecipeRepository } from '../ports/recipeRepository.ts'; -export const RecipeUpdatedEventType = 'at.overlap.nimbus.recipe-updated' as const; +export const RecipeUpdatedEventType = + 'at.overlap.nimbus.recipe-updated' as const; -export type RecipeUpdatedEvent = Event> & { - type: typeof RecipeUpdatedEventType; -}; +export type RecipeUpdatedEvent = + & Event<{ + slug: string; + updates: Partial; + }> + & { + type: typeof RecipeUpdatedEventType; + }; export const recipeUpdated = async ( event: RecipeUpdatedEvent, @@ -17,13 +23,5 @@ export const recipeUpdated = async ( data: event.data, }); - // In event sourcing, this would update the read model - // The event itself is already stored in the event store - const slug = event.subject?.split('/').pop(); - if (!slug) { - throw new Error('Recipe slug not found in event subject'); - } - - const recipe = await repository.update(slug, event.data); - return recipe; + await repository.update(event.data.slug, event.data.updates); }; diff --git a/examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts b/examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts index bcb0255..eedfc7d 100644 --- a/examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts +++ b/examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts @@ -1,58 +1,26 @@ import { Query } from '@nimbus/core'; -import { type EventStore, loadAggregates } from '@nimbus/eventsourcing'; import { Recipe } from '../domain/recipe.ts'; -import { recipeReducer } from '../domain/recipeAggregate.ts'; +import { RecipeRepository } from '../ports/recipeRepository.ts'; export const ListRecipesQueryType = 'at.overlap.nimbus.list-recipes' as const; -export type ListRecipesQuery = Query<{ - limit?: number; - offset?: number; -}> & { - type: typeof ListRecipesQueryType; -}; +export type ListRecipesQuery = + & Query<{ + limit?: number; + offset?: number; + }> + & { + type: typeof ListRecipesQueryType; + }; -/** - * List all recipes using dynamic aggregate boundaries. - * - * This demonstrates reading events recursively across multiple aggregates - * and reconstructing each one's state independently. - * - * Instead of querying a read model, we rebuild state from events in real-time. - * This is powerful for: - * - Avoiding eventual consistency issues (always current) - * - Querying across aggregate boundaries - * - Temporal queries (add time bounds to see state at any point) - * - * @param query - List query with pagination - * @param eventStore - Event store to read from - * @returns Array of reconstructed recipes - */ export const listRecipes = async ( query: ListRecipesQuery, - eventStore: EventStore, + repository: RecipeRepository, ): Promise => { - // Load all recipe aggregates by reading events recursively - // from the parent subject '/recipes' - const aggregates = await loadAggregates( - eventStore, - '/recipes', - null, - recipeReducer, - (event) => event.subject, // Group events by their subject - ); - - // Extract non-null recipe states (filter out deleted recipes) - const recipes: Recipe[] = []; - for (const [_subject, snapshot] of aggregates) { - if (snapshot.state !== null) { - recipes.push(snapshot.state); - } - } - - // Apply pagination (in a real app, you might do this at the event store level) - const offset = query.data.offset ?? 0; - const limit = query.data.limit ?? 10; + const recipes = await repository.list({ + limit: query.data.limit, + offset: query.data.offset, + }); - return recipes.slice(offset, offset + limit); + return recipes; }; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts index 0417bb9..ad454f5 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts @@ -1,7 +1,9 @@ import { getLogger, MessageRouter } from '@nimbus/core'; import { AddRecipeCommandType } from '../../core/commands/addRecipe.ts'; +import { DeleteRecipeCommandType } from '../../core/commands/deleteRecipe.ts'; import { UpdateRecipeCommandType } from '../../core/commands/updateRecipe.ts'; import { addRecipeHandler } from './handler/addRecipe.handler.ts'; +import { deleteRecipeHandler } from './handler/deleteRecipe.handler.ts'; import { updateRecipeHandler } from './handler/updateRecipe.handler.ts'; export const commandRouter = new MessageRouter('command', { @@ -26,3 +28,8 @@ commandRouter.register( UpdateRecipeCommandType, updateRecipeHandler, ); + +commandRouter.register( + DeleteRecipeCommandType, + deleteRecipeHandler, +); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts index 4d9a68e..5377ea6 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts @@ -10,8 +10,10 @@ export const addRecipeHandler: MessageHandler< AddRecipeCommand, Recipe > = async (command) => { - return addRecipe( + const recipe = await addRecipe( command, eventStore, ); + + return recipe; }; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts new file mode 100644 index 0000000..ba35039 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts @@ -0,0 +1,16 @@ +import { MessageHandler } from '@nimbus/core'; +import { eventStore } from '../../../../../shared/infrastructure/eventStore.ts'; +import { + deleteRecipe, + DeleteRecipeCommand, +} from '../../../core/commands/deleteRecipe.ts'; + +export const deleteRecipeHandler: MessageHandler< + DeleteRecipeCommand, + void +> = async (command) => { + await deleteRecipe( + command, + eventStore, + ); +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts index 9ec788e..6dc486a 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts @@ -7,5 +7,7 @@ export const getRecipeHandler: MessageHandler< GetRecipeQuery, Recipe > = async (query) => { - return getRecipe(query, recipeMemoryRepository); + const recipe = await getRecipe(query, recipeMemoryRepository); + + return recipe; }; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/listRecipes.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/listRecipes.handler.ts new file mode 100644 index 0000000..5ed5db5 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/listRecipes.handler.ts @@ -0,0 +1,16 @@ +import { MessageHandler } from '@nimbus/core'; +import { Recipe } from '../../../core/domain/recipe.ts'; +import { + listRecipes, + ListRecipesQuery, +} from '../../../core/queries/listRecipes.ts'; +import { recipeMemoryRepository } from '../../repository/recipeMemoryRepository.ts'; + +export const listRecipesHandler: MessageHandler< + ListRecipesQuery, + Recipe[] +> = async (query) => { + const recipes = await listRecipes(query, recipeMemoryRepository); + + return recipes; +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts index 14a9a27..3133934 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts @@ -10,5 +10,7 @@ export const recipeAddedHandler: MessageHandler< RecipeAddedEvent, Recipe > = async (event) => { - return recipeAdded(event, recipeMemoryRepository); + const recipe = await recipeAdded(event, recipeMemoryRepository); + + return recipe; }; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts index d9ed5ce..2bd12b4 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts @@ -10,8 +10,10 @@ export const updateRecipeHandler: MessageHandler< UpdateRecipeCommand, Recipe > = async (command) => { - return updateRecipe( + const recipe = await updateRecipe( command, eventStore, ); + + return recipe; }; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/queryRouter.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/queryRouter.ts index 85c6316..38d240a 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/queryRouter.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/queryRouter.ts @@ -1,6 +1,8 @@ import { getLogger, MessageRouter } from '@nimbus/core'; import { GetRecipeQueryType } from '../../core/queries/getRecipe.ts'; +import { ListRecipesQueryType } from '../../core/queries/listRecipes.ts'; import { getRecipeHandler } from './handler/getRecipe.handler.ts'; +import { listRecipesHandler } from './handler/listRecipes.handler.ts'; export const queryRouter = new MessageRouter('query', { logInput: (input: any) => { @@ -19,3 +21,8 @@ queryRouter.register( GetRecipeQueryType, getRecipeHandler, ); + +queryRouter.register( + ListRecipesQueryType, + listRecipesHandler, +); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts index db24b84..d301ffa 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts @@ -1,11 +1,15 @@ import { NimbusOakRouter } from '@nimbus/oak'; import { AddRecipeCommandType } from '../../core/commands/addRecipe.ts'; +import { DeleteRecipeCommandType } from '../../core/commands/deleteRecipe.ts'; import { UpdateRecipeCommandType } from '../../core/commands/updateRecipe.ts'; import { GetRecipeQueryType } from '../../core/queries/getRecipe.ts'; +import { ListRecipesQueryType } from '../../core/queries/listRecipes.ts'; import { commandRouter } from './commandRouter.ts'; import { queryRouter } from './queryRouter.ts'; import { AddRecipeCommandSchemaUrl } from './schemas/addRecipeCommandSchema.ts'; +import { DeleteRecipeCommandSchemaUrl } from './schemas/deleteRecipeCommandSchema.ts'; import { GetRecipeQuerySchemaUrl } from './schemas/getRecipeQuerySchema.ts'; +import { ListRecipesQuerySchemaUrl } from './schemas/listRecipesQuerySchema.ts'; import { UpdateRecipeCommandSchemaUrl } from './schemas/updateRecipeCommandSchema.ts'; export const recipeRouter = new NimbusOakRouter(); @@ -38,6 +42,19 @@ recipeRouter.commandPut({ }, }); +// Delete a recipe +// +// DELETE /recipes/:slug +recipeRouter.commandDelete({ + path: '/:slug', + messageType: DeleteRecipeCommandType, + router: commandRouter, + dataschema: DeleteRecipeCommandSchemaUrl, + extractData: (ctx) => ({ + slug: ctx.params.slug, + }), +}); + // Get a recipe by slug // // GET /recipes/:slug @@ -50,3 +67,23 @@ recipeRouter.query({ slug: ctx.params.slug, }), }); + +// List recipes +// +// GET /recipes +recipeRouter.query({ + path: '/', + messageType: ListRecipesQueryType, + router: queryRouter, + dataschema: ListRecipesQuerySchemaUrl, + extractData: (ctx) => { + const { limit, offset } = Object.fromEntries( + ctx.request.url.searchParams, + ); + + return { + limit: limit ? Number(limit) : undefined, + offset: offset ? Number(offset) : undefined, + }; + }, +}); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/deleteRecipeCommandSchema.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/deleteRecipeCommandSchema.ts new file mode 100644 index 0000000..05a1a83 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/deleteRecipeCommandSchema.ts @@ -0,0 +1,28 @@ +import { querySchema } from '@nimbus/core'; +import type { SchemaObject } from 'ajv'; + +export const DeleteRecipeCommandSchemaUrl = + 'https://api.gustav.app/schemas/commands/delete-recipe/v1' as const; + +export const deleteRecipeCommandSchema: SchemaObject = { + ...querySchema, + $id: DeleteRecipeCommandSchemaUrl, + properties: { + ...querySchema.properties, + data: { + type: 'object', + required: ['slug'], + properties: { + slug: { + type: 'string', + pattern: '^[a-z0-9-]+$', + minLength: 1, + maxLength: 100, + }, + }, + }, + dataschema: { + const: DeleteRecipeCommandSchemaUrl, + }, + }, +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/listRecipesQuerySchema.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/listRecipesQuerySchema.ts new file mode 100644 index 0000000..495d8d4 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/listRecipesQuerySchema.ts @@ -0,0 +1,31 @@ +import { querySchema } from '@nimbus/core'; +import type { SchemaObject } from 'ajv'; + +export const ListRecipesQuerySchemaUrl = + 'https://api.gustav.app/schemas/queries/list-recipes/v1' as const; + +export const listRecipesQuerySchema: SchemaObject = { + ...querySchema, + $id: ListRecipesQuerySchemaUrl, + properties: { + ...querySchema.properties, + data: { + type: 'object', + required: [], + properties: { + limit: { + type: 'number', + minimum: 1, + maximum: 100, + }, + offset: { + type: 'number', + minimum: 0, + }, + }, + }, + dataschema: { + const: ListRecipesQuerySchemaUrl, + }, + }, +}; diff --git a/examples/gustav/src/main.ts b/examples/gustav/src/main.ts index 4c7888e..cdd3ccb 100644 --- a/examples/gustav/src/main.ts +++ b/examples/gustav/src/main.ts @@ -30,8 +30,8 @@ setupLogger({ // Register JSON schemas for validation registerSchemas(); -// Initialize EventStore -initEventStore(); +// Initialize EventStore (includes event observer for read models) +await initEventStore(); // Initialize MongoDB Manager initMongoConnectionManager(); diff --git a/examples/gustav/src/shared/infrastructure/eventStore.ts b/examples/gustav/src/shared/infrastructure/eventStore.ts index 9b5799e..fd8c161 100644 --- a/examples/gustav/src/shared/infrastructure/eventStore.ts +++ b/examples/gustav/src/shared/infrastructure/eventStore.ts @@ -1,9 +1,37 @@ +import { getLogger } from '@nimbus/core'; +import type { EventStoreSubscription } from '@nimbus/eventsourcing'; import { EventSourcingDBStore } from '@nimbus/eventsourcingdb'; import { getEnv } from '@nimbus/utils'; +import { + recipeAdded, + RecipeAddedCommandType, +} from '../../contexts/recipe/core/events/recipeAdded.ts'; +import { + recipeDeleted, + RecipeDeletedEventType, +} from '../../contexts/recipe/core/events/recipeDeleted.ts'; +import { + recipeUpdated, + RecipeUpdatedEventType, +} from '../../contexts/recipe/core/events/recipeUpdated.ts'; +import { recipeMemoryRepository } from '../../contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts'; export let eventStore: EventSourcingDBStore; +let subscription: EventStoreSubscription | undefined; -export const initEventStore = () => { +/** + * Initialize the event store and start observing events. + * + * This sets up: + * 1. Connection to EventSourcingDB + * 2. Event observer that keeps read models in sync + * + * Pattern: EventStore (source of truth) → Observer → Read Model Updates + */ +export const initEventStore = async () => { + const logger = getLogger(); + + // 1. Initialize EventStore connection const { EVENTSOURCINGDB_API, EVENTSOURCINGDB_SECRET } = getEnv({ variables: ['EVENTSOURCINGDB_API', 'EVENTSOURCINGDB_SECRET'], }); @@ -12,4 +40,82 @@ export const initEventStore = () => { apiUrl: EVENTSOURCINGDB_API, secret: EVENTSOURCINGDB_SECRET, }); + + logger.info({ + category: 'Gustav', + message: 'EventStore initialized', + }); + + // 2. Start observing events to keep read models in sync + logger.info({ + category: 'Gustav', + message: 'Starting event observer for read model synchronization', + }); + + subscription = await eventStore.observe({ + subject: '/', + recursive: true, + // sinceEventId: '123', // Optional: Resume from checkpoint after restart + handler: async (event) => { + logger.debug({ + category: 'Gustav', + message: `Processing event: ${event.type}`, + data: { + eventId: event.eventstoremetadata.id, + subject: event.subject, + }, + }); + + // Route events to appropriate read model handlers + switch (event.type) { + case RecipeAddedCommandType: + await recipeAdded(event as any, recipeMemoryRepository); + break; + + case RecipeUpdatedEventType: + await recipeUpdated(event as any, recipeMemoryRepository); + break; + + case RecipeDeletedEventType: + await recipeDeleted(event as any, recipeMemoryRepository); + break; + + default: + logger.debug({ + category: 'Gustav', + message: + `No handler registered for event type: ${event.type}`, + }); + } + }, + onError: (error, event) => { + logger.error({ + category: 'Gustav', + message: 'Failed to process event', + error, + data: { + eventType: event?.type, + eventId: event?.eventstoremetadata?.id, + }, + }); + }, + }); + + logger.info({ + category: 'Gustav', + message: 'Event observer started successfully', + }); +}; + +/** + * Cleanup event store observer on shutdown. + */ +export const stopEventStore = async () => { + if (subscription) { + await subscription.unsubscribe(); + getLogger().info({ + category: 'Gustav', + message: 'Event observer stopped', + }); + } }; diff --git a/examples/gustav/src/shared/infrastructure/http/schemas.ts b/examples/gustav/src/shared/infrastructure/http/schemas.ts index 50fcfbb..acf2835 100644 --- a/examples/gustav/src/shared/infrastructure/http/schemas.ts +++ b/examples/gustav/src/shared/infrastructure/http/schemas.ts @@ -1,6 +1,8 @@ import { getValidator } from '@nimbus/core'; import { addRecipeCommandSchema } from '../../../contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts'; +import { deleteRecipeCommandSchema } from '../../../contexts/recipe/infrastructure/http/schemas/deleteRecipeCommandSchema.ts'; import { getRecipeQuerySchema } from '../../../contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts'; +import { listRecipesQuerySchema } from '../../../contexts/recipe/infrastructure/http/schemas/listRecipesQuerySchema.ts'; import { updateRecipeCommandSchema } from '../../../contexts/recipe/infrastructure/http/schemas/updateRecipeCommandSchema.ts'; /** @@ -14,7 +16,9 @@ export function registerSchemas(): void { validator.addSchema(addRecipeCommandSchema); validator.addSchema(updateRecipeCommandSchema); + validator.addSchema(deleteRecipeCommandSchema); validator.addSchema(getRecipeQuerySchema); + validator.addSchema(listRecipesQuerySchema); // TODO: Add more schemas as you develop more commands/queries } diff --git a/packages/eventsourcing/src/lib/eventStore.ts b/packages/eventsourcing/src/lib/eventStore.ts index 7515421..94c3690 100644 --- a/packages/eventsourcing/src/lib/eventStore.ts +++ b/packages/eventsourcing/src/lib/eventStore.ts @@ -118,17 +118,141 @@ export type EventWithMetadata = Event & { eventstoremetadata: EventStoreMetadata; }; +/** + * Configuration for observing events from the event store. + * + * @example + * ```ts + * { + * subject: '/recipes', + * recursive: true, + * sinceEventId: '42', + * handler: async (event) => { + * console.log('New event:', event); + * }, + * onError: (error, event) => { + * console.error('Failed to process event:', error); + * } + * } + * ``` + */ +export type EventStoreObserveConfig = { + /** + * The subject to observe events for. + */ + subject: string; + + /** + * Whether to observe events recursively for all child subjects. + * @default false + */ + recursive?: boolean; + + /** + * Resume observing from a specific event ID. + * Useful for checkpointing and recovery after restarts. + */ + sinceEventId?: string; + + /** + * Handler called for each new event. + */ + handler: (event: EventWithMetadata) => Promise; + + /** + * Error handler called when event processing fails. + * If not provided, errors will be thrown. + */ + onError?: (error: Error, event?: EventWithMetadata) => void; +}; + +/** + * Status of an event store subscription. + */ +export type EventStoreSubscriptionStatus = + | 'active' + | 'paused' + | 'closed' + | 'error'; + +/** + * Represents an active subscription to events from the event store. + * Returned by EventStore.observe(). + */ +export interface EventStoreSubscription { + /** + * Unsubscribe from the event stream and clean up resources. + */ + unsubscribe(): Promise; + + /** + * Get the current status of the subscription. + */ + getStatus(): EventStoreSubscriptionStatus; + + /** + * Get the last processed event ID. + * Useful for checkpointing. + */ + getLastEventId(): string | undefined; +} + /** * Event store interface. */ export interface EventStore { + /** + * Write events to the event store. + * + * @param events - Events to write + * @param options - Write options including preconditions + * @returns The written events with metadata + */ writeEvents: ( events: Event[], options?: EventStoreWriteOptions, ) => Promise; + /** + * Read events from the event store. + * + * @param subject - The subject to read events for + * @param options - Read options (recursive, order, bounds, etc.) + * @returns Array of events matching the criteria with metadata + */ readEvents: ( subject: string, options?: EventStoreReadOptions, ) => Promise; + + /** + * Observe events from the event store in real-time. + * + * Creates a subscription that calls the handler for each new event. + * The implementation may use SSE, WebSockets, polling, or other mechanisms. + * + * @param config - Configuration for observing events + * @returns A subscription object that can be used to unsubscribe + * + * @example + * ```ts + * const subscription = await eventStore.observe({ + * subject: '/recipes', + * recursive: true, + * handler: async (event) => { + * // Update read model + * await updateRecipeProjection(event); + * }, + * onError: (error, event) => { + * console.error('Failed to process event:', error); + * } + * }); + * + * // Later, when shutting down: + * await subscription.unsubscribe(); + * ``` + */ + observe: ( + config: EventStoreObserveConfig, + ) => Promise; } diff --git a/packages/eventsourcingdb/src/lib/eventSourcingDb.ts b/packages/eventsourcingdb/src/lib/eventSourcingDb.ts index 4eab21d..502cd5b 100644 --- a/packages/eventsourcingdb/src/lib/eventSourcingDb.ts +++ b/packages/eventsourcingdb/src/lib/eventSourcingDb.ts @@ -6,10 +6,13 @@ import { } from '@nimbus/core'; import type { EventStore, + EventStoreObserveConfig, EventStoreReadOptions, + EventStoreSubscription, EventStoreWriteOptions, EventWithMetadata, } from '@nimbus/eventsourcing'; +import { EventSourcingDBSubscription } from './eventSourcingDbSubscription.ts'; export type MappingEnvelope = { nimbusData: { @@ -238,6 +241,30 @@ export class EventSourcingDBStore implements EventStore { return events; } + /** + * Observe events from EventSourcingDB in real-time. + * + * Creates a streaming connection to EventSourcingDB and calls the handler + * for each new event. Supports resuming from a specific event ID. + * + * @param config - Configuration for observing events + * @returns A subscription object that can be used to unsubscribe + */ + async observe( + config: EventStoreObserveConfig, + ): Promise { + const subscription = new EventSourcingDBSubscription( + this._apiUrl, + this._secret, + config, + this._mapEventSourcingDbEventToNimbusEvent.bind(this), + ); + + await subscription.start(); + + return subscription; + } + private _mapNimbusEventToEventSourcingDbInput( event: Event, ): EventSourcingDbInput { diff --git a/packages/eventsourcingdb/src/lib/eventSourcingDbSubscription.ts b/packages/eventsourcingdb/src/lib/eventSourcingDbSubscription.ts new file mode 100644 index 0000000..6c436a4 --- /dev/null +++ b/packages/eventsourcingdb/src/lib/eventSourcingDbSubscription.ts @@ -0,0 +1,180 @@ +import { GenericException, getLogger } from '@nimbus/core'; +import type { + EventStoreObserveConfig, + EventStoreSubscription, + EventStoreSubscriptionStatus, + EventWithMetadata, +} from '@nimbus/eventsourcing'; +import type { EventSourcingDbEvent } from './eventSourcingDb.ts'; + +/** + * Subscription implementation for EventSourcingDB. + * Manages the streaming connection and processes incoming events. + */ +export class EventSourcingDBSubscription implements EventStoreSubscription { + private _status: EventStoreSubscriptionStatus = 'active'; + private _lastEventId?: string; + private _abortController?: AbortController; + private readonly _apiUrl: string; + private readonly _secret: string; + private readonly _config: EventStoreObserveConfig; + private readonly _mapper: ( + dbEvent: EventSourcingDbEvent, + ) => EventWithMetadata; + + constructor( + apiUrl: string, + secret: string, + config: EventStoreObserveConfig, + mapper: (dbEvent: EventSourcingDbEvent) => EventWithMetadata, + ) { + this._apiUrl = apiUrl; + this._secret = secret; + this._config = config; + this._mapper = mapper; + this._lastEventId = config.sinceEventId; + } + + start(): Promise { + this._abortController = new AbortController(); + this._status = 'active'; + + // Start streaming in background + this._stream().catch((error) => { + this._status = 'error'; + if (this._config.onError) { + this._config.onError(error); + } else { + getLogger().error({ + category: 'Nimbus', + message: 'EventSourcingDB observation stream error', + error, + }); + } + }); + + return Promise.resolve(); + } + + unsubscribe(): Promise { + if (this._abortController) { + this._abortController.abort(); + } + this._status = 'closed'; + + getLogger().debug({ + category: 'Nimbus', + message: 'EventSourcingDB subscription closed', + data: { + subject: this._config.subject, + lastEventId: this._lastEventId, + }, + }); + + return Promise.resolve(); + } + + getStatus(): EventStoreSubscriptionStatus { + return this._status; + } + + getLastEventId(): string | undefined { + return this._lastEventId; + } + + private async _stream(): Promise { + const body = JSON.stringify({ + subject: this._config.subject, + options: { + recursive: this._config.recursive ?? false, + ...(this._lastEventId && { + lowerBound: { + id: parseInt(this._lastEventId, 10), + type: 'exclusive', + }, + }), + }, + }); + + const response = await fetch(`${this._apiUrl}/observe-events`, { + method: 'POST', + headers: { + 'authorization': `Bearer ${this._secret}`, + 'content-type': 'application/json', + }, + body, + signal: this._abortController?.signal, + }); + + if (!response.ok) { + throw new GenericException('Failed to start observing events', { + status: response.status, + statusText: response.statusText, + url: response.url, + }); + } + + if (!response.body) { + throw new GenericException('Response body is null'); + } + + getLogger().info({ + category: 'Nimbus', + message: 'EventSourcingDB observation started', + data: { + subject: this._config.subject, + recursive: this._config.recursive, + sinceEventId: this._lastEventId, + }, + }); + + // Process NDJSON stream + const reader = response.body.getReader(); + const decoder = new TextDecoder(); + let buffer = ''; + + try { + while (true) { + const { done, value } = await reader.read(); + + if (done) { + break; + } + + buffer += decoder.decode(value, { stream: true }); + + // Process complete lines + const lines = buffer.split('\n'); + buffer = lines.pop() || ''; // Keep incomplete line in buffer + + for (const line of lines) { + if (line.trim() === '') continue; + + try { + const item = JSON.parse(line); + + // Skip heartbeat messages + if (item.type === 'heartbeat') { + continue; + } + + const dbEvent = item.payload as EventSourcingDbEvent; + const nimbusEvent = this._mapper(dbEvent); + + this._lastEventId = nimbusEvent.eventstoremetadata.id; + + await this._config.handler(nimbusEvent); + } catch (error: any) { + if (this._config.onError) { + this._config.onError(error); + } else { + throw error; + } + } + } + } + } finally { + reader.releaseLock(); + } + } +} From 6993803c5d8534530161664f115ef6badc0adc62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Tue, 7 Oct 2025 11:38:20 +0200 Subject: [PATCH 13/37] rework to have pure core in example app and starts on docs --- docs/.vitepress/config.mjs | 36 +- docs/guide/eventsourcing/aggregate.md | 1 + docs/guide/eventsourcing/event-store.md | 1 + docs/guide/eventsourcing/index.md | 24 + .../guide/eventsourcing/why-event-sourcing.md | 7 + docs/guide/eventsourcingdb/event-store.md | 1 + docs/guide/eventsourcingdb/index.md | 23 + docs/guide/{oak => http}/index.md | 0 docs/guide/{oak => http}/router.md | 0 docs/guide/oak/middleware.md | 1 - docs/guide/quickstart.md | 11 +- docs/guide/what-is-nimbus.md | 262 ++++- docs/package-lock.json | 1017 +++++++++-------- docs/package.json | 4 +- .../gustav/src/contexts/meal-planner/.gitkeep | 0 .../gustav/src/contexts/notification/.gitkeep | 0 .../recipe/core/commands/addRecipe.ts | 38 +- .../recipe/core/commands/deleteRecipe.ts | 50 +- .../recipe/core/commands/updateRecipe.ts | 55 +- .../recipe/core/events/recipeAdded.ts | 8 +- .../recipe/core/events/recipeDeleted.ts | 6 +- .../recipe/core/events/recipeUpdated.ts | 6 +- .../eventHandler/recipeAdded.handler.ts | 18 + .../eventHandler/recipeDeleted.handler.ts | 15 + .../eventHandler/recipeUpdated.handler.ts | 21 + .../http/handler/addRecipe.handler.ts | 45 +- .../http/handler/deleteRecipe.handler.ts | 35 +- .../http/handler/recipeAdded.handler.ts | 16 - .../http/handler/updateRecipe.handler.ts | 37 +- .../src/shared/infrastructure/eventStore.ts | 16 +- 30 files changed, 1061 insertions(+), 693 deletions(-) create mode 100644 docs/guide/eventsourcing/aggregate.md create mode 100644 docs/guide/eventsourcing/event-store.md create mode 100644 docs/guide/eventsourcing/index.md create mode 100644 docs/guide/eventsourcing/why-event-sourcing.md create mode 100644 docs/guide/eventsourcingdb/event-store.md create mode 100644 docs/guide/eventsourcingdb/index.md rename docs/guide/{oak => http}/index.md (100%) rename docs/guide/{oak => http}/router.md (100%) delete mode 100644 docs/guide/oak/middleware.md delete mode 100644 examples/gustav/src/contexts/meal-planner/.gitkeep delete mode 100644 examples/gustav/src/contexts/notification/.gitkeep create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeAdded.handler.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeDeleted.handler.ts create mode 100644 examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeUpdated.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts diff --git a/docs/.vitepress/config.mjs b/docs/.vitepress/config.mjs index a640471..d2d0639 100644 --- a/docs/.vitepress/config.mjs +++ b/docs/.vitepress/config.mjs @@ -73,16 +73,42 @@ export default defineConfig({ }, { - text: "Oak (HTTP)", - link: "/guide/oak", + text: "HTTP (Oak)", + link: "/guide/http", items: [ { text: "Router", - link: "/guide/oak/router", + link: "/guide/http/router", }, + ], + }, + + { + text: "Event Sourcing", + link: "/guide/eventsourcing", + items: [ + { + text: "Why Event Sourcing?", + link: "/guide/eventsourcing/why-event-sourcing", + }, + { + text: "Event Store", + link: "/guide/eventsourcing/event-store", + }, + { + text: "Aggregate", + link: "/guide/eventsourcing/aggregate", + }, + ], + }, + + { + text: "EventSourcingDB", + link: "/guide/eventsourcingdb", + items: [ { - text: "Middleware", - link: "/guide/oak/middleware", + text: "EventSourcingDB Event Store", + link: "/guide/eventsourcingdb/event-store", }, ], }, diff --git a/docs/guide/eventsourcing/aggregate.md b/docs/guide/eventsourcing/aggregate.md new file mode 100644 index 0000000..80890aa --- /dev/null +++ b/docs/guide/eventsourcing/aggregate.md @@ -0,0 +1 @@ +# Aggregate diff --git a/docs/guide/eventsourcing/event-store.md b/docs/guide/eventsourcing/event-store.md new file mode 100644 index 0000000..528759a --- /dev/null +++ b/docs/guide/eventsourcing/event-store.md @@ -0,0 +1 @@ +# Event Store diff --git a/docs/guide/eventsourcing/index.md b/docs/guide/eventsourcing/index.md new file mode 100644 index 0000000..5e7658e --- /dev/null +++ b/docs/guide/eventsourcing/index.md @@ -0,0 +1,24 @@ +# Event Sourcing + +The eventsourcing package provides a simple set of abstractions for working with event sourcing. +Like event store and aggregate utilities. + +[https://jsr.io/@nimbus/eventsourcing](https://jsr.io/@nimbus/eventsourcing) + +### Deno + +```bash +deno add jsr:@nimbus/eventsourcing +``` + +### NPM + +```bash +npx jsr add @nimbus/eventsourcing +``` + +### Bun + +```bash +bunx jsr add @nimbus/eventsourcing +``` diff --git a/docs/guide/eventsourcing/why-event-sourcing.md b/docs/guide/eventsourcing/why-event-sourcing.md new file mode 100644 index 0000000..760e091 --- /dev/null +++ b/docs/guide/eventsourcing/why-event-sourcing.md @@ -0,0 +1,7 @@ +# Why Event Sourcing? + +Data quality and completeness is the backbone of all data analytics and AI applications. + +All the hype about AI is cool but in reality it is not the latest hyped AI Model that is the most important. It is the data that a model can use to learn, predict, enhance, or recommend something. + +Event Sourcing is the most powerful pattern to achieve this. diff --git a/docs/guide/eventsourcingdb/event-store.md b/docs/guide/eventsourcingdb/event-store.md new file mode 100644 index 0000000..1c230b3 --- /dev/null +++ b/docs/guide/eventsourcingdb/event-store.md @@ -0,0 +1 @@ +# EventSourcingDB Event Store diff --git a/docs/guide/eventsourcingdb/index.md b/docs/guide/eventsourcingdb/index.md new file mode 100644 index 0000000..1b615e2 --- /dev/null +++ b/docs/guide/eventsourcingdb/index.md @@ -0,0 +1,23 @@ +# EventSourcingDB + +The eventsourcingdb package provides an adapter for the EventStore interface to work with [EventSourcingDB](https://eventsourcingdb.io/). + +[https://jsr.io/@nimbus/eventsourcingdb](https://jsr.io/@nimbus/eventsourcingdb) + +### Deno + +```bash +deno add jsr:@nimbus/eventsourcingdb +``` + +### NPM + +```bash +npx jsr add @nimbus/eventsourcingdb +``` + +### Bun + +```bash +bunx jsr add @nimbus/eventsourcingdb +``` diff --git a/docs/guide/oak/index.md b/docs/guide/http/index.md similarity index 100% rename from docs/guide/oak/index.md rename to docs/guide/http/index.md diff --git a/docs/guide/oak/router.md b/docs/guide/http/router.md similarity index 100% rename from docs/guide/oak/router.md rename to docs/guide/http/router.md diff --git a/docs/guide/oak/middleware.md b/docs/guide/oak/middleware.md deleted file mode 100644 index 29caa56..0000000 --- a/docs/guide/oak/middleware.md +++ /dev/null @@ -1 +0,0 @@ -# Nimbus Oak - Middleware diff --git a/docs/guide/quickstart.md b/docs/guide/quickstart.md index 2f8e22c..612f1b1 100644 --- a/docs/guide/quickstart.md +++ b/docs/guide/quickstart.md @@ -4,9 +4,12 @@ To get started with Nimbus you need to install the [@nimbus/core](https://jsr.io ## Dependencies -Nimbus tries to keep dependencies as low as possible, but there are some packages that are necessary to run Nimbus. +Nimbus tries to keep dependencies as low as possible. +Theses are the dependencies Nimbus relies on. -For type safety at runtime Nimbus relies on [Zod](https://zod.dev/). +- [Deno Standard Library](https://docs.deno.com/runtime/fundamentals/standard_library/) ([@std](https://jsr.io/@std)) +- [ajv](https://ajv.js.org/) +- [ajv-formats](https://ajv.js.org/packages/ajv-formats.html) ## Installation @@ -15,19 +18,17 @@ Depending on your runtime you can install Nimbus with the following commands. ### Deno ```bash -deno add jsr:@nimbus/core npm:zod +deno add jsr:@nimbus/core ``` ### NPM ```bash -npm install zod npx jsr add @nimbus/core ``` ### Bun ```bash -bun add zod bunx jsr add @nimbus/core ``` diff --git a/docs/guide/what-is-nimbus.md b/docs/guide/what-is-nimbus.md index 515a787..7d691e6 100644 --- a/docs/guide/what-is-nimbus.md +++ b/docs/guide/what-is-nimbus.md @@ -1,64 +1,266 @@ # What is Nimbus? -:::tip The Goal: Keep it simple all the way! +Nimbus is a TypeScript framework for building message-driven applications with a focus on Domain-Driven Design (DDD) and clean architecture principles. It provides the building blocks for implementing Commands, Queries, and Events while keeping business logic pure and testable. -No complex object-oriented patterns, no clunky abstractions, no magic. -Just easily understandable and type-safe code. +## Philosophy + +Nimbus aims to keep things simple and avoid complex OOP or FP principles. +No complex inheritance hierarchies, no dependency injection, no decorators, no magic. +Just code that is easy to understand. + +:::tip Simplicity first! +Keep it as simple as possible with the least amount of external dependencies. ::: -Nimbus aims to be a simple framework for building event-driven applications in Typescript. +There are already great Frameworks like [NestJS](https://nestjs.com/) and [Effect](https://effect.website/) out there for building TypeScript applications. So the question is why build another one? -## Pure Core - Imperative Shell +While those frameworks heavily emphasize either object-oriented or functional programming patterns this comes with the cost of a steep learning curve. Nimbus aim is to have a learning curve that is as flat as possible. + +:::tip Start with Nimbus +Our recommendation is to start with Nimbus and see where it takes you. +In case you really need specific features or want to add more complexity you can always add it later. +::: + +## Architecture Philosophy + +Nimbus is built around the idea of a **Pure Core** and an **Imperative Shell**, aligning well with Hexagonal Architecture (Ports & Adapters) and supporting modern patterns like CQRS and Event Sourcing. ![Illustration of the pure core imperative shell architecture](/nimbus-pure-core-imperative-shell.svg) ### The Pure Core -As our business logic - the things that make our application unique - is the most valuable part of our code, we should be able to focus on it without worrying about outside dependencies or side effects interfering with it. +The business logic represents the most valuable part of any application. It should be focused, testable, and free from external dependencies. + +The pure core contains domain logic that: -So the main goal is to keep all the business logic inside the pure core of our application. +- Accepts type-safe inputs and returns type-safe outputs +- Has no side effects (no I/O operations) +- Can be tested by running functions with different inputs and comparing outputs - no mocking needed! +- Represents the unique value proposition of the application -The core only accepts type safe inputs and returns type safe outputs. It is side-effect free and can be tested easily by running functions with different inputs and comparing the outputs. +Example from a recipe management system: + +```typescript +// Pure domain logic - no I/O, completely side-effect free +export const addRecipe = ( + command: AddRecipeCommand, + state: RecipeState, +): { + newState: Recipe; + events: RecipeAddedEvent[]; +} => { + if (state !== null) { + throw new InvalidInputException('Recipe already exists', { + errorCode: 'DUPLICATE_RECIPE', + }); + } + + const subject = recipeSubject(command.data.slug); + const event = createRecipeAddedEvent(command, subject); + + return { + newState: command.data, + events: [event], + }; +}; +``` ### The Imperative Shell -As we for sure need to interact with the outside world, we need to have a place that is responsible for all the I/O operations like HTTP calls, database interactions, or filesystem operations. +The shell handles all interactions with the outside world - HTTP requests, database operations, file system access, and other I/O operations. It orchestrates the pure core by providing it with data and persisting the results. + +The shell's responsibilities include: + +- Receiving external input (HTTP requests, messages, etc.) +- Fetching data from external sources +- Calling pure core functions +- Persisting results +- Sending responses + +Example handler in the shell: + +```typescript +// Shell layer - handles all I/O and orchestrates core logic +export const addRecipeHandler: MessageHandler = + async (command) => { + const subject = recipeSubject(command.data.slug); -This place is in the shell of our application. It is responsible for the side effects and connects all external interactions with the pure core. + // Shell loads current state from event store + const snapshot = await loadAggregate( + eventStore, + subject, + null, + recipeReducer, + ); -Nimbus goal is to reduce the shell overhead in the first place and do the heavy lifting for you if necessary. + // Shell calls pure core logic (no I/O happens here) + const { newState, events } = addRecipe(command, snapshot.state); -### Shell to Core to Shell + // Shell persists events with optimistic concurrency control + await eventStore.writeEvents(events, { + preconditions: snapshot.lastEventId !== undefined + ? [{ type: 'isSubjectOnEventId', payload: { subject, eventId: snapshot.lastEventId } }] + : [{ type: 'isSubjectPristine', payload: { subject } }], + }); -The flow of information always goes from the shell to the core and back to the shell. -This means the shell can call the core at any time but the core will never call the shell. + return newState; + }; +``` + +### Flow of Information + +Information flows in one direction: **Shell → Core → Shell** + +The shell can call the core at any time, but the core never calls the shell. This unidirectional flow ensures that business logic remains pure and testable. ![Illustration of the flow of information](/nimbus-flow-of-information.svg) -When we look at an example of an HTTP API, the shell handles the incoming HTTP request, reads from the Database, calls the core, writes changes to the Database, and sends the response back to the client. +In an HTTP API scenario: -Sometimes it is necessary to run business logic with some information from the database before executing another database query based on the logic's result. In this case the core functions can be split into multiple parts so the shell can call them in the right order. +1. Shell receives HTTP request +2. Shell fetches necessary data from database +3. Shell calls core business logic +4. Shell persists results to database +5. Shell sends HTTP response -### Thoughts? +For complex scenarios requiring multiple database queries with business logic in between, core functions can be composed and called sequentially by the shell. -:::info Isn't it called Functional Core, Imperative Shell? -As stated above Nimbus goal is to keep it simple and therefore avoid overly complex OOP (Object-Oriented-Programming) principles. The same goes for overly complex FP (Functional Programming) principles. +## Message-Driven Architecture -That is why Nimbus prefers the term **Pure Core** as it can, but not have to follow FP patterns. -::: +Nimbus uses a message-driven approach with three core message types: -:::info Isn't it called a Hexagonal Architecture? -Nimbus can fit nicely into an App with Hexagonal Architecture. -But if you do not want to follow this pattern, you are still able to use Nimbus without defining ports and adapters for everything. -::: +### Commands + +Commands represent write operations - intentions to change system state. They are processed by command handlers that execute business logic and persist changes. + +```typescript +export type AddRecipeCommand = Command & { + type: "at.overlap.nimbus.app-recipe"; +}; +``` + +### Queries + +Queries represent read operations - requests for information without changing state. Query handlers fetch and return data. + +```typescript +export type GetRecipeQuery = Query & { + type: "at.overlap.nimbus.get-recipe"; +}; +``` + +### Events + +Events represent facts - things that have already happened in the system. Event handlers react to these facts to update read models, trigger notifications, or coordinate between different parts of the system. + +```typescript +export type RecipeAddedEvent = Event & { + type: "at.overlap.nimbus.recipe-added"; + subject: string; +}; +``` -## Event Driven +## Domain-Driven Design & Hexagonal Architecture -If the real world is asynchronous, why should your application be synchronous? +Nimbus encourages organizing code around business domains using DDD principles and hexagonal architecture: + +``` +src/ +├── contexts/ # Bounded contexts +│ └── recipe/ +│ ├── core/ # Pure domain logic +│ │ ├── domain/ # Entities, value objects, aggregates +│ │ ├── commands/ # Command logic +│ │ ├── queries/ # Query logic +│ │ ├── events/ # Event definitions +│ │ └── ports/ # Interfaces for external dependencies +│ └── infrastructure/ # Adapters & implementation details +│ ├── http/ # HTTP handlers +│ └── repository/ # Data access +└── shared/ # Shared kernel +``` + +While Nimbus works well with this structure, it remains agnostic and does not enforce any specific organizational pattern. Applications can be structured as needed. + +## CQRS & Event Sourcing + +Command Query Responsibility Segregation (CQRS) and Event Sourcing are highly recommended patterns when using Nimbus, especially as historical data becomes increasingly valuable for AI and data analytics. + +### Why CQRS? + +CQRS separates write operations (commands) from read operations (queries), allowing each to be optimized independently: + +- **Commands** change state and emit events +- **Queries** read from optimized read models +- **Events** synchronize write and read models + +### Why Event Sourcing? + +Event sourcing stores every state change as an immutable event, providing: + +- Complete audit trail of all changes +- Ability to reconstruct state at any point in time +- Historical data for analytics and AI training +- Natural fit with event-driven architectures + +Example of an event reducer reconstructing aggregate state: + +```typescript +export const recipeReducer: EventReducer = ( + state: RecipeState, + event: Event +): RecipeState => { + switch (event.type) { + case "at.overlap.nimbus.recipe-added": + return event.data as Recipe; + case "at.overlap.nimbus.recipe-updated": + return { ...state, ...(event.data as Partial) }; + case "at.overlap.nimbus.recipe-deleted": + return null; + default: + return state; + } +}; +``` + +Loading current state by replaying events: + +```typescript +const snapshot = await loadAggregate(eventStore, subject, null, recipeReducer); +``` + +While these patterns are recommended, Nimbus does not force their use. Applications can start simple and adopt these patterns as requirements evolve. + +## Package Ecosystem + +Nimbus provides a modular package ecosystem: + +- **[@nimbus/core](https://jsr.io/@nimbus/core)** - Core message types, routing, validation, and logging +- **[@nimbus/eventsourcing](https://jsr.io/@nimbus/eventsourcing)** - Event sourcing abstractions and aggregate utilities +- **[@nimbus/eventsourcingdb](https://jsr.io/@nimbus/eventsourcingdb)** - EventSourcingDB integration for event storage +- **[@nimbus/oak](https://jsr.io/@nimbus/oak)** - Oak HTTP framework integration +- **[@nimbus/mongodb](https://jsr.io/@nimbus/mongodb)** - MongoDB integration for read models + +Each package can be used independently or combined as needed. ## Deno & JSR -**"Keep it simple all the way."** -That is why Nimbus is built with [Deno](https://deno.com) and published on [jsr.io](https://jsr.io/packages?search=@nimbus) +Following the principle of "keep it simple all the way," Nimbus is built with [Deno](https://deno.com) and published on [jsr.io/@nimbus](https://jsr.io/packages?search=@nimbus). + +Nimbus is a TypeScript framework compatible with any Node.js runtime, though Deno is recommended for the best development experience. + +## Roadmap -Nimbus is a Typescript framework and runs with every Node.js compatible runtime. But it is recommended to try it with Deno. +Future development focuses on: + +- AsyncAPI and OpenAPI specification support for schema-driven development +- CLI tool for project initialization and code generation +- Schema registry for contract management + +## FAQ + +:::info Isn't it called Functional Core, Imperative Shell? +Nimbus aims to keep things simple and avoid overly complex Object-Oriented Programming (OOP) principles. The same applies to overly complex Functional Programming (FP) principles. + +The term **Pure Core** is preferred as it can follow FP patterns but does not require them. +::: diff --git a/docs/package-lock.json b/docs/package-lock.json index f6dfb17..0a67793 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -6,7 +6,22 @@ "": { "name": "@nimbus/docs", "dependencies": { - "vitepress": "^1.5.0" + "vitepress": "1.6.4" + } + }, + "node_modules/@algolia/abtesting": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@algolia/abtesting/-/abtesting-1.5.0.tgz", + "integrity": "sha512-W/ohRkbKQsqDWALJg28X15KF7Tcyg53L1MfdOkLgvkcCcofdzGHSimHHeNG05ojjFw9HK8+VPhe/Vwq4MozIJg==", + "license": "MIT", + "dependencies": { + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" + }, + "engines": { + "node": ">= 14.0.0" } }, "node_modules/@algolia/autocomplete-core": { @@ -55,210 +70,210 @@ } }, "node_modules/@algolia/client-abtesting": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-abtesting/-/client-abtesting-5.15.0.tgz", - "integrity": "sha512-FaEM40iuiv1mAipYyiptP4EyxkJ8qHfowCpEeusdHUC4C7spATJYArD2rX3AxkVeREkDIgYEOuXcwKUbDCr7Nw==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-abtesting/-/client-abtesting-5.39.0.tgz", + "integrity": "sha512-Vf0ZVe+qo3sHDrCinouJqlg8VoxM4Qo/KxNIqMYybkuctutfnp3kIY9OmESplOQ/9NGBthU9EG+4d5fBibWK/A==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-analytics": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-5.15.0.tgz", - "integrity": "sha512-lho0gTFsQDIdCwyUKTtMuf9nCLwq9jOGlLGIeQGKDxXF7HbiAysFIu5QW/iQr1LzMgDyM9NH7K98KY+BiIFriQ==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-5.39.0.tgz", + "integrity": "sha512-V16ITZxYIwcv1arNce65JZmn94Ft6vKlBZ//gXw8AvIH32glJz1KcbaVAUr9p7PYlGZ/XVHP6LxDgrpNdtwgcA==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-common": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.15.0.tgz", - "integrity": "sha512-IofrVh213VLsDkPoSKMeM9Dshrv28jhDlBDLRcVJQvlL8pzue7PEB1EZ4UoJFYS3NSn7JOcJ/V+olRQzXlJj1w==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.39.0.tgz", + "integrity": "sha512-UCJTuwySEQeiKPWV3wruhuI/wHbDYenHzgL9pYsvh6r/u5Z+g61ip1iwdAlFp02CnywzI9O7+AQPh2ManYyHmQ==", "license": "MIT", "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-insights": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-insights/-/client-insights-5.15.0.tgz", - "integrity": "sha512-bDDEQGfFidDi0UQUCbxXOCdphbVAgbVmxvaV75cypBTQkJ+ABx/Npw7LkFGw1FsoVrttlrrQbwjvUB6mLVKs/w==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-insights/-/client-insights-5.39.0.tgz", + "integrity": "sha512-s0ia8M/ZZR+iO2uLNTBrlQdEb6ZMAMcKMHckp5mcoglxrf8gHifL4LmdhGKdAxAn3UIagtqIP0RCnIymHUbm7A==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-personalization": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-5.15.0.tgz", - "integrity": "sha512-LfaZqLUWxdYFq44QrasCDED5bSYOswpQjSiIL7Q5fYlefAAUO95PzBPKCfUhSwhb4rKxigHfDkd81AvEicIEoA==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-5.39.0.tgz", + "integrity": "sha512-vZPIt7Lw+toNsHZUiPhNIc1Z3vUjDp7nzn6AMOaPC73gEuTq2iLPNvM06CSB6aHePo5eMeJIP5YEKBUQUA/PJA==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-query-suggestions": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-query-suggestions/-/client-query-suggestions-5.15.0.tgz", - "integrity": "sha512-wu8GVluiZ5+il8WIRsGKu8VxMK9dAlr225h878GGtpTL6VBvwyJvAyLdZsfFIpY0iN++jiNb31q2C1PlPL+n/A==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-query-suggestions/-/client-query-suggestions-5.39.0.tgz", + "integrity": "sha512-jcPQr3iKTWNVli2NYHPv02aNLwixDjPCpOgMp9CZTvEiPI6Ec4jHX+oFr3LDZagOFY9e1xJhc/JrgMGGW1sHnw==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/client-search": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.15.0.tgz", - "integrity": "sha512-Z32gEMrRRpEta5UqVQA612sLdoqY3AovvUPClDfMxYrbdDAebmGDVPtSogUba1FZ4pP5dx20D3OV3reogLKsRA==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.39.0.tgz", + "integrity": "sha512-/IYpF10BpthGZEJQZMhMqV4AqWr5avcWfZm/SIKK1RvUDmzGqLoW/+xeJVX9C8ZnNkIC8hivbIQFaNaRw0BFZQ==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/ingestion": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/@algolia/ingestion/-/ingestion-1.15.0.tgz", - "integrity": "sha512-MkqkAxBQxtQ5if/EX2IPqFA7LothghVyvPoRNA/meS2AW2qkHwcxjuiBxv4H6mnAVEPfJlhu9rkdVz9LgCBgJg==", + "version": "1.39.0", + "resolved": "https://registry.npmjs.org/@algolia/ingestion/-/ingestion-1.39.0.tgz", + "integrity": "sha512-IgSHKUiuecqLfBlXiuCSdRTdsO3/yvpmXrMFz8fAJ8M4QmDtHkOuD769dmybRYqsbYMHivw+lir4BgbRGMtOIQ==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/monitoring": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/@algolia/monitoring/-/monitoring-1.15.0.tgz", - "integrity": "sha512-QPrFnnGLMMdRa8t/4bs7XilPYnoUXDY8PMQJ1sf9ZFwhUysYYhQNX34/enoO0LBjpoOY6rLpha39YQEFbzgKyQ==", + "version": "1.39.0", + "resolved": "https://registry.npmjs.org/@algolia/monitoring/-/monitoring-1.39.0.tgz", + "integrity": "sha512-8Xnd4+609SKC/hqVsuFc4evFBmvA2765/4NcH+Dpr756SKPbL1BY0X8kVxlmM3YBLNqnduSQxHxpDJUK58imCA==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/recommend": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-5.15.0.tgz", - "integrity": "sha512-5eupMwSqMLDObgSMF0XG958zR6GJP3f7jHDQ3/WlzCM9/YIJiWIUoJFGsko9GYsA5xbLDHE/PhWtq4chcCdaGQ==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-5.39.0.tgz", + "integrity": "sha512-D7Ye2Ss/5xqUkQUxKm/VqEJLt5kARd9IMmjdzlxaKhGgNlOemTay0lwBmOVFuJRp7UODjp5c9+K+B8g0ORObIw==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "@algolia/client-common": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/requester-browser-xhr": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.15.0.tgz", - "integrity": "sha512-Po/GNib6QKruC3XE+WKP1HwVSfCDaZcXu48kD+gwmtDlqHWKc7Bq9lrS0sNZ456rfCKhXksOmMfUs4wRM/Y96w==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.39.0.tgz", + "integrity": "sha512-mgPte1ZJqpk9dkVs44J3wKAbHATvHZNlSpzhMdjMLIg/3qTycSZyDiomLiSlxE8CLsxyBAOJWnyKRHfom+Z1rg==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0" + "@algolia/client-common": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/requester-fetch": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.15.0.tgz", - "integrity": "sha512-rOZ+c0P7ajmccAvpeeNrUmEKoliYFL8aOR5qGW5pFq3oj3Iept7Y5mEtEsOBYsRt6qLnaXn4zUKf+N8nvJpcIw==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.39.0.tgz", + "integrity": "sha512-LIrCkrxu1WnO3ev1+w6NnZ12JZL/o+2H9w6oWnZAjQZIlA/Ym6M9QHkt+OQ/SwkuoiNkW3DAo+Pi4A2V9FPtqg==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0" + "@algolia/client-common": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@algolia/requester-node-http": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.15.0.tgz", - "integrity": "sha512-b1jTpbFf9LnQHEJP5ddDJKE2sAlhYd7EVSOWgzo/27n/SfCoHfqD0VWntnWYD83PnOKvfe8auZ2+xCb0TXotrQ==", + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.39.0.tgz", + "integrity": "sha512-6beG+egPwXmvhAg+m0STCj+ZssDcjrLzf4L05aKm2nGglMXSSPz0cH/rM+kVD9krNfldiMctURd4wjojW1fV0w==", "license": "MIT", "dependencies": { - "@algolia/client-common": "5.15.0" + "@algolia/client-common": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", - "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.26.3", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.3.tgz", - "integrity": "sha512-WJ/CvmY8Mea8iDXo6a7RK2wbmJITT5fN3BEkRuFlxVyNx8jOKIIhmC4fSkTcPcf8JyavbBwIe6OpiCOBXt/IcA==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", "license": "MIT", "dependencies": { - "@babel/types": "^7.26.3" + "@babel/types": "^7.28.4" }, "bin": { "parser": "bin/babel-parser.js" @@ -268,44 +283,44 @@ } }, "node_modules/@babel/types": { - "version": "7.26.3", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.3.tgz", - "integrity": "sha512-vN5p+1kl59GVKMvTHt55NzzmYVxprfJD+ql7U9NFIfKCBkYE55LYtS+WtPlaYOyzydrKI8Nezd+aZextrd+FMA==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@docsearch/css": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.8.0.tgz", - "integrity": "sha512-pieeipSOW4sQ0+bE5UFC51AOZp9NGxg89wAlZ1BAQFaiRAGK1IKUaPQ0UGZeNctJXyqZ1UvBtOQh2HH+U5GtmA==", + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.8.2.tgz", + "integrity": "sha512-y05ayQFyUmCXze79+56v/4HpycYF3uFqB78pLPrSV5ZKAlDuIAAJNhaRi8tTdRNXh05yxX/TyNnzD6LwSM89vQ==", "license": "MIT" }, "node_modules/@docsearch/js": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/@docsearch/js/-/js-3.8.0.tgz", - "integrity": "sha512-PVuV629f5UcYRtBWqK7ID6vNL5647+2ADJypwTjfeBIrJfwPuHtzLy39hMGMfFK+0xgRyhTR0FZ83EkdEraBlg==", + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/@docsearch/js/-/js-3.8.2.tgz", + "integrity": "sha512-Q5wY66qHn0SwA7Taa0aDbHiJvaFJLOJyHmooQ7y8hlwwQLQ/5WwCcoX0g7ii04Qi2DJlHsd0XXzJ8Ypw9+9YmQ==", "license": "MIT", "dependencies": { - "@docsearch/react": "3.8.0", + "@docsearch/react": "3.8.2", "preact": "^10.0.0" } }, "node_modules/@docsearch/react": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.8.0.tgz", - "integrity": "sha512-WnFK720+iwTVt94CxY3u+FgX6exb3BfN5kE9xUY6uuAH/9W/UFboBZFLlrw/zxFRHoHZCOXRtOylsXF+6LHI+Q==", + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.8.2.tgz", + "integrity": "sha512-xCRrJQlTt8N9GU0DG4ptwHRkfnSnD/YpdeaXe02iKfqs97TkZJv60yE+1eq/tjPcVnTW8dP5qLP7itifFVV5eg==", "license": "MIT", "dependencies": { "@algolia/autocomplete-core": "1.17.7", "@algolia/autocomplete-preset-algolia": "1.17.7", - "@docsearch/css": "3.8.0", - "algoliasearch": "^5.12.0" + "@docsearch/css": "3.8.2", + "algoliasearch": "^5.14.2" }, "peerDependencies": { "@types/react": ">= 16.8.0 < 19.0.0", @@ -697,9 +712,9 @@ } }, "node_modules/@iconify-json/simple-icons": { - "version": "1.2.14", - "resolved": "https://registry.npmjs.org/@iconify-json/simple-icons/-/simple-icons-1.2.14.tgz", - "integrity": "sha512-zLqb48pM1B5vegMBDouyv7FzrROV5HRIjDpl+/PKjY3P7AeSySaOeT6mzutF6hDZCJvn1J7qQ7lug3FOgegiiA==", + "version": "1.2.54", + "resolved": "https://registry.npmjs.org/@iconify-json/simple-icons/-/simple-icons-1.2.54.tgz", + "integrity": "sha512-OQQYl8yC5j3QklZOYnK31QYe5h47IhyCoxSLd53f0e0nA4dgi8VOZS30SgSAbsecQ+S0xlGJMjXIHTIqZ+ML3w==", "license": "CC0-1.0", "dependencies": { "@iconify/types": "*" @@ -712,15 +727,15 @@ "license": "MIT" }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", - "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", "license": "MIT" }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.28.0.tgz", - "integrity": "sha512-wLJuPLT6grGZsy34g4N1yRfYeouklTgPhH1gWXCYspenKYD0s3cR99ZevOGw5BexMNywkbV3UkjADisozBmpPQ==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.52.4.tgz", + "integrity": "sha512-BTm2qKNnWIQ5auf4deoetINJm2JzvihvGb9R6K/ETwKLql/Bb3Eg2H1FBp1gUb4YGbydMA3jcmQTR73q7J+GAA==", "cpu": [ "arm" ], @@ -731,9 +746,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.28.0.tgz", - "integrity": "sha512-eiNkznlo0dLmVG/6wf+Ifi/v78G4d4QxRhuUl+s8EWZpDewgk7PX3ZyECUXU0Zq/Ca+8nU8cQpNC4Xgn2gFNDA==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.52.4.tgz", + "integrity": "sha512-P9LDQiC5vpgGFgz7GSM6dKPCiqR3XYN1WwJKA4/BUVDjHpYsf3iBEmVz62uyq20NGYbiGPR5cNHI7T1HqxNs2w==", "cpu": [ "arm64" ], @@ -744,9 +759,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.28.0.tgz", - "integrity": "sha512-lmKx9yHsppblnLQZOGxdO66gT77bvdBtr/0P+TPOseowE7D9AJoBw8ZDULRasXRWf1Z86/gcOdpBrV6VDUY36Q==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.52.4.tgz", + "integrity": "sha512-QRWSW+bVccAvZF6cbNZBJwAehmvG9NwfWHwMy4GbWi/BQIA/laTIktebT2ipVjNncqE6GLPxOok5hsECgAxGZg==", "cpu": [ "arm64" ], @@ -757,9 +772,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.28.0.tgz", - "integrity": "sha512-8hxgfReVs7k9Js1uAIhS6zq3I+wKQETInnWQtgzt8JfGx51R1N6DRVy3F4o0lQwumbErRz52YqwjfvuwRxGv1w==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.52.4.tgz", + "integrity": "sha512-hZgP05pResAkRJxL1b+7yxCnXPGsXU0fG9Yfd6dUaoGk+FhdPKCJ5L1Sumyxn8kvw8Qi5PvQ8ulenUbRjzeCTw==", "cpu": [ "x64" ], @@ -770,9 +785,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.28.0.tgz", - "integrity": "sha512-lA1zZB3bFx5oxu9fYud4+g1mt+lYXCoch0M0V/xhqLoGatbzVse0wlSQ1UYOWKpuSu3gyN4qEc0Dxf/DII1bhQ==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.52.4.tgz", + "integrity": "sha512-xmc30VshuBNUd58Xk4TKAEcRZHaXlV+tCxIXELiE9sQuK3kG8ZFgSPi57UBJt8/ogfhAF5Oz4ZSUBN77weM+mQ==", "cpu": [ "arm64" ], @@ -783,9 +798,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.28.0.tgz", - "integrity": "sha512-aI2plavbUDjCQB/sRbeUZWX9qp12GfYkYSJOrdYTL/C5D53bsE2/nBPuoiJKoWp5SN78v2Vr8ZPnB+/VbQ2pFA==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.52.4.tgz", + "integrity": "sha512-WdSLpZFjOEqNZGmHflxyifolwAiZmDQzuOzIq9L27ButpCVpD7KzTRtEG1I0wMPFyiyUdOO+4t8GvrnBLQSwpw==", "cpu": [ "x64" ], @@ -796,9 +811,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.28.0.tgz", - "integrity": "sha512-WXveUPKtfqtaNvpf0iOb0M6xC64GzUX/OowbqfiCSXTdi/jLlOmH0Ba94/OkiY2yTGTwteo4/dsHRfh5bDCZ+w==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.52.4.tgz", + "integrity": "sha512-xRiOu9Of1FZ4SxVbB0iEDXc4ddIcjCv2aj03dmW8UrZIW7aIQ9jVJdLBIhxBI+MaTnGAKyvMwPwQnoOEvP7FgQ==", "cpu": [ "arm" ], @@ -809,9 +824,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.28.0.tgz", - "integrity": "sha512-yLc3O2NtOQR67lI79zsSc7lk31xjwcaocvdD1twL64PK1yNaIqCeWI9L5B4MFPAVGEVjH5k1oWSGuYX1Wutxpg==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.52.4.tgz", + "integrity": "sha512-FbhM2p9TJAmEIEhIgzR4soUcsW49e9veAQCziwbR+XWB2zqJ12b4i/+hel9yLiD8pLncDH4fKIPIbt5238341Q==", "cpu": [ "arm" ], @@ -822,9 +837,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.28.0.tgz", - "integrity": "sha512-+P9G9hjEpHucHRXqesY+3X9hD2wh0iNnJXX/QhS/J5vTdG6VhNYMxJ2rJkQOxRUd17u5mbMLHM7yWGZdAASfcg==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.52.4.tgz", + "integrity": "sha512-4n4gVwhPHR9q/g8lKCyz0yuaD0MvDf7dV4f9tHt0C73Mp8h38UCtSCSE6R9iBlTbXlmA8CjpsZoujhszefqueg==", "cpu": [ "arm64" ], @@ -835,9 +850,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.28.0.tgz", - "integrity": "sha512-1xsm2rCKSTpKzi5/ypT5wfc+4bOGa/9yI/eaOLW0oMs7qpC542APWhl4A37AENGZ6St6GBMWhCCMM6tXgTIplw==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.52.4.tgz", + "integrity": "sha512-u0n17nGA0nvi/11gcZKsjkLj1QIpAuPFQbR48Subo7SmZJnGxDpspyw2kbpuoQnyK+9pwf3pAoEXerJs/8Mi9g==", "cpu": [ "arm64" ], @@ -847,10 +862,23 @@ "linux" ] }, - "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.28.0.tgz", - "integrity": "sha512-zgWxMq8neVQeXL+ouSf6S7DoNeo6EPgi1eeqHXVKQxqPy1B2NvTbaOUWPn/7CfMKL7xvhV0/+fq/Z/J69g1WAQ==", + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.52.4.tgz", + "integrity": "sha512-0G2c2lpYtbTuXo8KEJkDkClE/+/2AFPdPAbmaHoE870foRFs4pBrDehilMcrSScrN/fB/1HTaWO4bqw+ewBzMQ==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.52.4.tgz", + "integrity": "sha512-teSACug1GyZHmPDv14VNbvZFX779UqWTsd7KtTM9JIZRDI5NUwYSIS30kzI8m06gOPB//jtpqlhmraQ68b5X2g==", "cpu": [ "ppc64" ], @@ -861,9 +889,22 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.28.0.tgz", - "integrity": "sha512-VEdVYacLniRxbRJLNtzwGt5vwS0ycYshofI7cWAfj7Vg5asqj+pt+Q6x4n+AONSZW/kVm+5nklde0qs2EUwU2g==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.52.4.tgz", + "integrity": "sha512-/MOEW3aHjjs1p4Pw1Xk4+3egRevx8Ji9N6HUIA1Ifh8Q+cg9dremvFCUbOX2Zebz80BwJIgCBUemjqhU5XI5Eg==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.52.4.tgz", + "integrity": "sha512-1HHmsRyh845QDpEWzOFtMCph5Ts+9+yllCrREuBR/vg2RogAQGGBRC8lDPrPOMnrdOJ+mt1WLMOC2Kao/UwcvA==", "cpu": [ "riscv64" ], @@ -874,9 +915,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.28.0.tgz", - "integrity": "sha512-LQlP5t2hcDJh8HV8RELD9/xlYtEzJkm/aWGsauvdO2ulfl3QYRjqrKW+mGAIWP5kdNCBheqqqYIGElSRCaXfpw==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.52.4.tgz", + "integrity": "sha512-seoeZp4L/6D1MUyjWkOMRU6/iLmCU2EjbMTyAG4oIOs1/I82Y5lTeaxW0KBfkUdHAWN7j25bpkt0rjnOgAcQcA==", "cpu": [ "s390x" ], @@ -887,9 +928,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.28.0.tgz", - "integrity": "sha512-Nl4KIzteVEKE9BdAvYoTkW19pa7LR/RBrT6F1dJCV/3pbjwDcaOq+edkP0LXuJ9kflW/xOK414X78r+K84+msw==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.52.4.tgz", + "integrity": "sha512-Wi6AXf0k0L7E2gteNsNHUs7UMwCIhsCTs6+tqQ5GPwVRWMaflqGec4Sd8n6+FNFDw9vGcReqk2KzBDhCa1DLYg==", "cpu": [ "x64" ], @@ -900,9 +941,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.28.0.tgz", - "integrity": "sha512-eKpJr4vBDOi4goT75MvW+0dXcNUqisK4jvibY9vDdlgLx+yekxSm55StsHbxUsRxSTt3JEQvlr3cGDkzcSP8bw==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.52.4.tgz", + "integrity": "sha512-dtBZYjDmCQ9hW+WgEkaffvRRCKm767wWhxsFW3Lw86VXz/uJRuD438/XvbZT//B96Vs8oTA8Q4A0AfHbrxP9zw==", "cpu": [ "x64" ], @@ -912,10 +953,23 @@ "linux" ] }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.52.4.tgz", + "integrity": "sha512-1ox+GqgRWqaB1RnyZXL8PD6E5f7YyRUJYnCqKpNzxzP0TkaUh112NDrR9Tt+C8rJ4x5G9Mk8PQR3o7Ku2RKqKA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.28.0.tgz", - "integrity": "sha512-Vi+WR62xWGsE/Oj+mD0FNAPY2MEox3cfyG0zLpotZdehPFXwz6lypkGs5y38Jd/NVSbOD02aVad6q6QYF7i8Bg==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.52.4.tgz", + "integrity": "sha512-8GKr640PdFNXwzIE0IrkMWUNUomILLkfeHjXBi/nUvFlpZP+FA8BKGKpacjW6OUUHaNI6sUURxR2U2g78FOHWQ==", "cpu": [ "arm64" ], @@ -926,9 +980,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.28.0.tgz", - "integrity": "sha512-kN/Vpip8emMLn/eOza+4JwqDZBL6MPNpkdaEsgUtW1NYN3DZvZqSQrbKzJcTL6hd8YNmFTn7XGWMwccOcJBL0A==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.52.4.tgz", + "integrity": "sha512-AIy/jdJ7WtJ/F6EcfOb2GjR9UweO0n43jNObQMb6oGxkYTfLcnN7vYYpG+CN3lLxrQkzWnMOoNSHTW54pgbVxw==", "cpu": [ "ia32" ], @@ -938,10 +992,23 @@ "win32" ] }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.52.4.tgz", + "integrity": "sha512-UF9KfsH9yEam0UjTwAgdK0anlQ7c8/pWPU2yVjyWcF1I1thABt6WXE47cI71pGiZ8wGvxohBoLnxM04L/wj8mQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.28.0.tgz", - "integrity": "sha512-Bvno2/aZT6usSa7lRDL2+hMjVAGjuqaymF1ApZm31JXzniR/hvr14jpU+/z4X6Gt5BPlzosscyJZGUvguXIqeQ==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.52.4.tgz", + "integrity": "sha512-bf9PtUa0u8IXDVxzRToFQKsNCRz9qLYfR/MpECxl4mRoWYjAeFjgxj1XdZr2M/GNVpT05p+LgQOHopYDlUu6/w==", "cpu": [ "x64" ], @@ -952,69 +1019,88 @@ ] }, "node_modules/@shikijs/core": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-1.24.0.tgz", - "integrity": "sha512-6pvdH0KoahMzr6689yh0QJ3rCgF4j1XsXRHNEeEN6M4xJTfQ6QPWrmHzIddotg+xPJUPEPzYzYCKzpYyhTI6Gw==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-2.5.0.tgz", + "integrity": "sha512-uu/8RExTKtavlpH7XqnVYBrfBkUc20ngXiX9NSrBhOVZYv/7XQRKUyhtkeflY5QsxC0GbJThCerruZfsUaSldg==", "license": "MIT", "dependencies": { - "@shikijs/engine-javascript": "1.24.0", - "@shikijs/engine-oniguruma": "1.24.0", - "@shikijs/types": "1.24.0", - "@shikijs/vscode-textmate": "^9.3.0", + "@shikijs/engine-javascript": "2.5.0", + "@shikijs/engine-oniguruma": "2.5.0", + "@shikijs/types": "2.5.0", + "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4", - "hast-util-to-html": "^9.0.3" + "hast-util-to-html": "^9.0.4" } }, "node_modules/@shikijs/engine-javascript": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-1.24.0.tgz", - "integrity": "sha512-ZA6sCeSsF3Mnlxxr+4wGEJ9Tto4RHmfIS7ox8KIAbH0MTVUkw3roHPHZN+LlJMOHJJOVupe6tvuAzRpN8qK1vA==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-2.5.0.tgz", + "integrity": "sha512-VjnOpnQf8WuCEZtNUdjjwGUbtAVKuZkVQ/5cHy/tojVVRIRtlWMYVjyWhxOmIq05AlSOv72z7hRNRGVBgQOl0w==", "license": "MIT", "dependencies": { - "@shikijs/types": "1.24.0", - "@shikijs/vscode-textmate": "^9.3.0", - "oniguruma-to-es": "0.7.0" + "@shikijs/types": "2.5.0", + "@shikijs/vscode-textmate": "^10.0.2", + "oniguruma-to-es": "^3.1.0" } }, "node_modules/@shikijs/engine-oniguruma": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-1.24.0.tgz", - "integrity": "sha512-Eua0qNOL73Y82lGA4GF5P+G2+VXX9XnuUxkiUuwcxQPH4wom+tE39kZpBFXfUuwNYxHSkrSxpB1p4kyRW0moSg==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-2.5.0.tgz", + "integrity": "sha512-pGd1wRATzbo/uatrCIILlAdFVKdxImWJGQ5rFiB5VZi2ve5xj3Ax9jny8QvkaV93btQEwR/rSz5ERFpC5mKNIw==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "2.5.0", + "@shikijs/vscode-textmate": "^10.0.2" + } + }, + "node_modules/@shikijs/langs": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-2.5.0.tgz", + "integrity": "sha512-Qfrrt5OsNH5R+5tJ/3uYBBZv3SuGmnRPejV9IlIbFH3HTGLDlkqgHymAlzklVmKBjAaVmkPkyikAV/sQ1wSL+w==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "2.5.0" + } + }, + "node_modules/@shikijs/themes": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-2.5.0.tgz", + "integrity": "sha512-wGrk+R8tJnO0VMzmUExHR+QdSaPUl/NKs+a4cQQRWyoc3YFbUzuLEi/KWK1hj+8BfHRKm2jNhhJck1dfstJpiw==", "license": "MIT", "dependencies": { - "@shikijs/types": "1.24.0", - "@shikijs/vscode-textmate": "^9.3.0" + "@shikijs/types": "2.5.0" } }, "node_modules/@shikijs/transformers": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/@shikijs/transformers/-/transformers-1.24.0.tgz", - "integrity": "sha512-Qf/hby+PRPkoHncjYnJf5svK1aCsOUtQhuLzKPnmeXJtuUZCmbH0pTpdNtXe9tgln/RHlyRJnv7q46HHS1sO0Q==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/transformers/-/transformers-2.5.0.tgz", + "integrity": "sha512-SI494W5X60CaUwgi8u4q4m4s3YAFSxln3tzNjOSYqq54wlVgz0/NbbXEb3mdLbqMBztcmS7bVTaEd2w0qMmfeg==", "license": "MIT", "dependencies": { - "shiki": "1.24.0" + "@shikijs/core": "2.5.0", + "@shikijs/types": "2.5.0" } }, "node_modules/@shikijs/types": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-1.24.0.tgz", - "integrity": "sha512-aptbEuq1Pk88DMlCe+FzXNnBZ17LCiLIGWAeCWhoFDzia5Q5Krx3DgnULLiouSdd6+LUM39XwXGppqYE0Ghtug==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-2.5.0.tgz", + "integrity": "sha512-ygl5yhxki9ZLNuNpPitBWvcy9fsSKKaRuO4BAlMyagszQidxcpLAr0qiW/q43DtSIDxO6hEbtYLiFZNXO/hdGw==", "license": "MIT", "dependencies": { - "@shikijs/vscode-textmate": "^9.3.0", + "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "node_modules/@shikijs/vscode-textmate": { - "version": "9.3.0", - "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-9.3.0.tgz", - "integrity": "sha512-jn7/7ky30idSkd/O5yDBfAnVt+JJpepofP/POZ1iMOxK59cOfqIgg/Dj0eFsjOTMw+4ycJN0uhZH/Eb0bs/EUA==", + "version": "10.0.2", + "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz", + "integrity": "sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==", "license": "MIT" }, "node_modules/@types/estree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", - "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "license": "MIT" }, "node_modules/@types/hast": { @@ -1064,21 +1150,21 @@ "license": "MIT" }, "node_modules/@types/web-bluetooth": { - "version": "0.0.20", - "resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.20.tgz", - "integrity": "sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==", + "version": "0.0.21", + "resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.21.tgz", + "integrity": "sha512-oIQLCGWtcFZy2JW77j9k8nHzAOpqMHLQejDA48XXMWH6tjCQHz5RCFz1bzsmROyL6PUm+LLnUiI4BCn221inxA==", "license": "MIT" }, "node_modules/@ungap/structured-clone": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", - "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", "license": "ISC" }, "node_modules/@vitejs/plugin-vue": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-5.2.1.tgz", - "integrity": "sha512-cxh314tzaWwOLqVes2gnnCtvBDcM1UMdn+iFR+UjAn411dPT3tOmqrJjbMd7koZpMAmBM/GqeV4n9ge7JSiJJQ==", + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-5.2.4.tgz", + "integrity": "sha512-7Yx/SXSOcQq5HiiV3orevHUFn+pmMB4cgbEkDYgnkUWb0WfeQ/wa2yFv6D5ICiCQOVpjA7vYDXrC7AGO8yjDHA==", "license": "MIT", "engines": { "node": "^18.0.0 || >=20.0.0" @@ -1089,188 +1175,162 @@ } }, "node_modules/@vue/compiler-core": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.13.tgz", - "integrity": "sha512-oOdAkwqUfW1WqpwSYJce06wvt6HljgY3fGeM9NcVA1HaYOij3mZG9Rkysn0OHuyUAGMbEbARIpsG+LPVlBJ5/Q==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.22.tgz", + "integrity": "sha512-jQ0pFPmZwTEiRNSb+i9Ow/I/cHv2tXYqsnHKKyCQ08irI2kdF5qmYedmF8si8mA7zepUFmJ2hqzS8CQmNOWOkQ==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.25.3", - "@vue/shared": "3.5.13", + "@babel/parser": "^7.28.4", + "@vue/shared": "3.5.22", "entities": "^4.5.0", "estree-walker": "^2.0.2", - "source-map-js": "^1.2.0" + "source-map-js": "^1.2.1" } }, "node_modules/@vue/compiler-dom": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.13.tgz", - "integrity": "sha512-ZOJ46sMOKUjO3e94wPdCzQ6P1Lx/vhp2RSvfaab88Ajexs0AHeV0uasYhi99WPaogmBlRHNRuly8xV75cNTMDA==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.22.tgz", + "integrity": "sha512-W8RknzUM1BLkypvdz10OVsGxnMAuSIZs9Wdx1vzA3mL5fNMN15rhrSCLiTm6blWeACwUwizzPVqGJgOGBEN/hA==", "license": "MIT", "dependencies": { - "@vue/compiler-core": "3.5.13", - "@vue/shared": "3.5.13" + "@vue/compiler-core": "3.5.22", + "@vue/shared": "3.5.22" } }, "node_modules/@vue/compiler-sfc": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.13.tgz", - "integrity": "sha512-6VdaljMpD82w6c2749Zhf5T9u5uLBWKnVue6XWxprDobftnletJ8+oel7sexFfM3qIxNmVE7LSFGTpv6obNyaQ==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.22.tgz", + "integrity": "sha512-tbTR1zKGce4Lj+JLzFXDq36K4vcSZbJ1RBu8FxcDv1IGRz//Dh2EBqksyGVypz3kXpshIfWKGOCcqpSbyGWRJQ==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.25.3", - "@vue/compiler-core": "3.5.13", - "@vue/compiler-dom": "3.5.13", - "@vue/compiler-ssr": "3.5.13", - "@vue/shared": "3.5.13", + "@babel/parser": "^7.28.4", + "@vue/compiler-core": "3.5.22", + "@vue/compiler-dom": "3.5.22", + "@vue/compiler-ssr": "3.5.22", + "@vue/shared": "3.5.22", "estree-walker": "^2.0.2", - "magic-string": "^0.30.11", - "postcss": "^8.4.48", - "source-map-js": "^1.2.0" + "magic-string": "^0.30.19", + "postcss": "^8.5.6", + "source-map-js": "^1.2.1" } }, "node_modules/@vue/compiler-ssr": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.13.tgz", - "integrity": "sha512-wMH6vrYHxQl/IybKJagqbquvxpWCuVYpoUJfCqFZwa/JY1GdATAQ+TgVtgrwwMZ0D07QhA99rs/EAAWfvG6KpA==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.22.tgz", + "integrity": "sha512-GdgyLvg4R+7T8Nk2Mlighx7XGxq/fJf9jaVofc3IL0EPesTE86cP/8DD1lT3h1JeZr2ySBvyqKQJgbS54IX1Ww==", "license": "MIT", "dependencies": { - "@vue/compiler-dom": "3.5.13", - "@vue/shared": "3.5.13" + "@vue/compiler-dom": "3.5.22", + "@vue/shared": "3.5.22" } }, "node_modules/@vue/devtools-api": { - "version": "7.6.7", - "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-7.6.7.tgz", - "integrity": "sha512-PV4I31WaV2rfA8RGauM+69uFEzWkqtP561RiLU2wK+Ce85u3zyKW3aoESlLCNzkc4y0JaJyskH6zAE3xWOP8+Q==", + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-7.7.7.tgz", + "integrity": "sha512-lwOnNBH2e7x1fIIbVT7yF5D+YWhqELm55/4ZKf45R9T8r9dE2AIOy8HKjfqzGsoTHFbWbr337O4E0A0QADnjBg==", "license": "MIT", "dependencies": { - "@vue/devtools-kit": "^7.6.7" + "@vue/devtools-kit": "^7.7.7" } }, "node_modules/@vue/devtools-kit": { - "version": "7.6.7", - "resolved": "https://registry.npmjs.org/@vue/devtools-kit/-/devtools-kit-7.6.7.tgz", - "integrity": "sha512-V8/jrXY/swHgnblABG9U4QCbE60c6RuPasmv2d9FvVqc5d94t1vDiESuvRmdNJBdWz4/D3q6ffgyAfRVjwHYEw==", + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@vue/devtools-kit/-/devtools-kit-7.7.7.tgz", + "integrity": "sha512-wgoZtxcTta65cnZ1Q6MbAfePVFxfM+gq0saaeytoph7nEa7yMXoi6sCPy4ufO111B9msnw0VOWjPEFCXuAKRHA==", "license": "MIT", "dependencies": { - "@vue/devtools-shared": "^7.6.7", - "birpc": "^0.2.19", + "@vue/devtools-shared": "^7.7.7", + "birpc": "^2.3.0", "hookable": "^5.5.3", "mitt": "^3.0.1", "perfect-debounce": "^1.0.0", "speakingurl": "^14.0.1", - "superjson": "^2.2.1" + "superjson": "^2.2.2" } }, "node_modules/@vue/devtools-shared": { - "version": "7.6.7", - "resolved": "https://registry.npmjs.org/@vue/devtools-shared/-/devtools-shared-7.6.7.tgz", - "integrity": "sha512-QggO6SviAsolrePAXZ/sA1dSicSPt4TueZibCvydfhNDieL1lAuyMTgQDGst7TEvMGb4vgYv2I+1sDkO4jWNnw==", + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@vue/devtools-shared/-/devtools-shared-7.7.7.tgz", + "integrity": "sha512-+udSj47aRl5aKb0memBvcUG9koarqnxNM5yjuREvqwK6T3ap4mn3Zqqc17QrBFTqSMjr3HK1cvStEZpMDpfdyw==", "license": "MIT", "dependencies": { "rfdc": "^1.4.1" } }, "node_modules/@vue/reactivity": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.13.tgz", - "integrity": "sha512-NaCwtw8o48B9I6L1zl2p41OHo/2Z4wqYGGIK1Khu5T7yxrn+ATOixn/Udn2m+6kZKB/J7cuT9DbWWhRxqixACg==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.22.tgz", + "integrity": "sha512-f2Wux4v/Z2pqc9+4SmgZC1p73Z53fyD90NFWXiX9AKVnVBEvLFOWCEgJD3GdGnlxPZt01PSlfmLqbLYzY/Fw4A==", "license": "MIT", "dependencies": { - "@vue/shared": "3.5.13" + "@vue/shared": "3.5.22" } }, "node_modules/@vue/runtime-core": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.13.tgz", - "integrity": "sha512-Fj4YRQ3Az0WTZw1sFe+QDb0aXCerigEpw418pw1HBUKFtnQHWzwojaukAs2X/c9DQz4MQ4bsXTGlcpGxU/RCIw==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.22.tgz", + "integrity": "sha512-EHo4W/eiYeAzRTN5PCextDUZ0dMs9I8mQ2Fy+OkzvRPUYQEyK9yAjbasrMCXbLNhF7P0OUyivLjIy0yc6VrLJQ==", "license": "MIT", "dependencies": { - "@vue/reactivity": "3.5.13", - "@vue/shared": "3.5.13" + "@vue/reactivity": "3.5.22", + "@vue/shared": "3.5.22" } }, "node_modules/@vue/runtime-dom": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.13.tgz", - "integrity": "sha512-dLaj94s93NYLqjLiyFzVs9X6dWhTdAlEAciC3Moq7gzAc13VJUdCnjjRurNM6uTLFATRHexHCTu/Xp3eW6yoog==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.22.tgz", + "integrity": "sha512-Av60jsryAkI023PlN7LsqrfPvwfxOd2yAwtReCjeuugTJTkgrksYJJstg1e12qle0NarkfhfFu1ox2D+cQotww==", "license": "MIT", "dependencies": { - "@vue/reactivity": "3.5.13", - "@vue/runtime-core": "3.5.13", - "@vue/shared": "3.5.13", + "@vue/reactivity": "3.5.22", + "@vue/runtime-core": "3.5.22", + "@vue/shared": "3.5.22", "csstype": "^3.1.3" } }, "node_modules/@vue/server-renderer": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.13.tgz", - "integrity": "sha512-wAi4IRJV/2SAW3htkTlB+dHeRmpTiVIK1OGLWV1yeStVSebSQQOwGwIq0D3ZIoBj2C2qpgz5+vX9iEBkTdk5YA==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.22.tgz", + "integrity": "sha512-gXjo+ao0oHYTSswF+a3KRHZ1WszxIqO7u6XwNHqcqb9JfyIL/pbWrrh/xLv7jeDqla9u+LK7yfZKHih1e1RKAQ==", "license": "MIT", "dependencies": { - "@vue/compiler-ssr": "3.5.13", - "@vue/shared": "3.5.13" + "@vue/compiler-ssr": "3.5.22", + "@vue/shared": "3.5.22" }, "peerDependencies": { - "vue": "3.5.13" + "vue": "3.5.22" } }, "node_modules/@vue/shared": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.13.tgz", - "integrity": "sha512-/hnE/qP5ZoGpol0a5mDi45bOd7t3tjYJBjsgCsivow7D48cJeV5l05RD82lPqi7gRiphZM37rnhW1l6ZoCNNnQ==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.22.tgz", + "integrity": "sha512-F4yc6palwq3TT0u+FYf0Ns4Tfl9GRFURDN2gWG7L1ecIaS/4fCIuFOjMTnCyjsu/OK6vaDKLCrGAa+KvvH+h4w==", "license": "MIT" }, "node_modules/@vueuse/core": { - "version": "11.3.0", - "resolved": "https://registry.npmjs.org/@vueuse/core/-/core-11.3.0.tgz", - "integrity": "sha512-7OC4Rl1f9G8IT6rUfi9JrKiXy4bfmHhZ5x2Ceojy0jnd3mHNEvV4JaRygH362ror6/NZ+Nl+n13LPzGiPN8cKA==", + "version": "12.8.2", + "resolved": "https://registry.npmjs.org/@vueuse/core/-/core-12.8.2.tgz", + "integrity": "sha512-HbvCmZdzAu3VGi/pWYm5Ut+Kd9mn1ZHnn4L5G8kOQTPs/IwIAmJoBrmYk2ckLArgMXZj0AW3n5CAejLUO+PhdQ==", "license": "MIT", "dependencies": { - "@types/web-bluetooth": "^0.0.20", - "@vueuse/metadata": "11.3.0", - "@vueuse/shared": "11.3.0", - "vue-demi": ">=0.14.10" + "@types/web-bluetooth": "^0.0.21", + "@vueuse/metadata": "12.8.2", + "@vueuse/shared": "12.8.2", + "vue": "^3.5.13" }, "funding": { "url": "https://github.com/sponsors/antfu" } }, - "node_modules/@vueuse/core/node_modules/vue-demi": { - "version": "0.14.10", - "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.10.tgz", - "integrity": "sha512-nMZBOwuzabUO0nLgIcc6rycZEebF6eeUfaiQx9+WSk8e29IbLvPU9feI6tqW4kTo3hvoYAJkMh8n8D0fuISphg==", - "hasInstallScript": true, - "license": "MIT", - "bin": { - "vue-demi-fix": "bin/vue-demi-fix.js", - "vue-demi-switch": "bin/vue-demi-switch.js" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/antfu" - }, - "peerDependencies": { - "@vue/composition-api": "^1.0.0-rc.1", - "vue": "^3.0.0-0 || ^2.6.0" - }, - "peerDependenciesMeta": { - "@vue/composition-api": { - "optional": true - } - } - }, "node_modules/@vueuse/integrations": { - "version": "11.3.0", - "resolved": "https://registry.npmjs.org/@vueuse/integrations/-/integrations-11.3.0.tgz", - "integrity": "sha512-5fzRl0apQWrDezmobchoiGTkGw238VWESxZHazfhP3RM7pDSiyXy18QbfYkILoYNTd23HPAfQTJpkUc5QbkwTw==", + "version": "12.8.2", + "resolved": "https://registry.npmjs.org/@vueuse/integrations/-/integrations-12.8.2.tgz", + "integrity": "sha512-fbGYivgK5uBTRt7p5F3zy6VrETlV9RtZjBqd1/HxGdjdckBgBM4ugP8LHpjolqTj14TXTxSK1ZfgPbHYyGuH7g==", "license": "MIT", "dependencies": { - "@vueuse/core": "11.3.0", - "@vueuse/shared": "11.3.0", - "vue-demi": ">=0.14.10" + "@vueuse/core": "12.8.2", + "@vueuse/shared": "12.8.2", + "vue": "^3.5.13" }, "funding": { "url": "https://github.com/sponsors/antfu" @@ -1328,107 +1388,56 @@ } } }, - "node_modules/@vueuse/integrations/node_modules/vue-demi": { - "version": "0.14.10", - "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.10.tgz", - "integrity": "sha512-nMZBOwuzabUO0nLgIcc6rycZEebF6eeUfaiQx9+WSk8e29IbLvPU9feI6tqW4kTo3hvoYAJkMh8n8D0fuISphg==", - "hasInstallScript": true, - "license": "MIT", - "bin": { - "vue-demi-fix": "bin/vue-demi-fix.js", - "vue-demi-switch": "bin/vue-demi-switch.js" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/antfu" - }, - "peerDependencies": { - "@vue/composition-api": "^1.0.0-rc.1", - "vue": "^3.0.0-0 || ^2.6.0" - }, - "peerDependenciesMeta": { - "@vue/composition-api": { - "optional": true - } - } - }, "node_modules/@vueuse/metadata": { - "version": "11.3.0", - "resolved": "https://registry.npmjs.org/@vueuse/metadata/-/metadata-11.3.0.tgz", - "integrity": "sha512-pwDnDspTqtTo2HwfLw4Rp6yywuuBdYnPYDq+mO38ZYKGebCUQC/nVj/PXSiK9HX5otxLz8Fn7ECPbjiRz2CC3g==", + "version": "12.8.2", + "resolved": "https://registry.npmjs.org/@vueuse/metadata/-/metadata-12.8.2.tgz", + "integrity": "sha512-rAyLGEuoBJ/Il5AmFHiziCPdQzRt88VxR+Y/A/QhJ1EWtWqPBBAxTAFaSkviwEuOEZNtW8pvkPgoCZQ+HxqW1A==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/antfu" } }, "node_modules/@vueuse/shared": { - "version": "11.3.0", - "resolved": "https://registry.npmjs.org/@vueuse/shared/-/shared-11.3.0.tgz", - "integrity": "sha512-P8gSSWQeucH5821ek2mn/ciCk+MS/zoRKqdQIM3bHq6p7GXDAJLmnRRKmF5F65sAVJIfzQlwR3aDzwCn10s8hA==", + "version": "12.8.2", + "resolved": "https://registry.npmjs.org/@vueuse/shared/-/shared-12.8.2.tgz", + "integrity": "sha512-dznP38YzxZoNloI0qpEfpkms8knDtaoQ6Y/sfS0L7Yki4zh40LFHEhur0odJC6xTHG5dxWVPiUWBXn+wCG2s5w==", "license": "MIT", "dependencies": { - "vue-demi": ">=0.14.10" + "vue": "^3.5.13" }, "funding": { "url": "https://github.com/sponsors/antfu" } }, - "node_modules/@vueuse/shared/node_modules/vue-demi": { - "version": "0.14.10", - "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.10.tgz", - "integrity": "sha512-nMZBOwuzabUO0nLgIcc6rycZEebF6eeUfaiQx9+WSk8e29IbLvPU9feI6tqW4kTo3hvoYAJkMh8n8D0fuISphg==", - "hasInstallScript": true, - "license": "MIT", - "bin": { - "vue-demi-fix": "bin/vue-demi-fix.js", - "vue-demi-switch": "bin/vue-demi-switch.js" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/antfu" - }, - "peerDependencies": { - "@vue/composition-api": "^1.0.0-rc.1", - "vue": "^3.0.0-0 || ^2.6.0" - }, - "peerDependenciesMeta": { - "@vue/composition-api": { - "optional": true - } - } - }, "node_modules/algoliasearch": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-5.15.0.tgz", - "integrity": "sha512-Yf3Swz1s63hjvBVZ/9f2P1Uu48GjmjCN+Esxb6MAONMGtZB1fRX8/S1AhUTtsuTlcGovbYLxpHgc7wEzstDZBw==", - "license": "MIT", - "dependencies": { - "@algolia/client-abtesting": "5.15.0", - "@algolia/client-analytics": "5.15.0", - "@algolia/client-common": "5.15.0", - "@algolia/client-insights": "5.15.0", - "@algolia/client-personalization": "5.15.0", - "@algolia/client-query-suggestions": "5.15.0", - "@algolia/client-search": "5.15.0", - "@algolia/ingestion": "1.15.0", - "@algolia/monitoring": "1.15.0", - "@algolia/recommend": "5.15.0", - "@algolia/requester-browser-xhr": "5.15.0", - "@algolia/requester-fetch": "5.15.0", - "@algolia/requester-node-http": "5.15.0" + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-5.39.0.tgz", + "integrity": "sha512-DzTfhUxzg9QBNGzU/0kZkxEV72TeA4MmPJ7RVfLnQwHNhhliPo7ynglEWJS791rNlLFoTyrKvkapwr/P3EXV9A==", + "license": "MIT", + "dependencies": { + "@algolia/abtesting": "1.5.0", + "@algolia/client-abtesting": "5.39.0", + "@algolia/client-analytics": "5.39.0", + "@algolia/client-common": "5.39.0", + "@algolia/client-insights": "5.39.0", + "@algolia/client-personalization": "5.39.0", + "@algolia/client-query-suggestions": "5.39.0", + "@algolia/client-search": "5.39.0", + "@algolia/ingestion": "1.39.0", + "@algolia/monitoring": "1.39.0", + "@algolia/recommend": "5.39.0", + "@algolia/requester-browser-xhr": "5.39.0", + "@algolia/requester-fetch": "5.39.0", + "@algolia/requester-node-http": "5.39.0" }, "engines": { "node": ">= 14.0.0" } }, "node_modules/birpc": { - "version": "0.2.19", - "resolved": "https://registry.npmjs.org/birpc/-/birpc-0.2.19.tgz", - "integrity": "sha512-5WeXXAvTmitV1RqJFppT5QtUiz2p1mRSYU000Jkft5ZUCLJIk4uQriYNO50HknxKwM6jd8utNc66K1qGIwwWBQ==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/birpc/-/birpc-2.6.1.tgz", + "integrity": "sha512-LPnFhlDpdSH6FJhJyn4M0kFO7vtQ5iPw24FnG0y21q09xC7e8+1LeR31S1MAIrDAHp4m7aas4bEkTDTvMAtebQ==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/antfu" @@ -1580,9 +1589,9 @@ "license": "MIT" }, "node_modules/focus-trap": { - "version": "7.6.2", - "resolved": "https://registry.npmjs.org/focus-trap/-/focus-trap-7.6.2.tgz", - "integrity": "sha512-9FhUxK1hVju2+AiQIDJ5Dd//9R2n2RAfJ0qfhF4IHGHgcoEUTMpbTeG/zbEuwaiYXfuAH6XE0/aCyxDdRM+W5w==", + "version": "7.6.5", + "resolved": "https://registry.npmjs.org/focus-trap/-/focus-trap-7.6.5.tgz", + "integrity": "sha512-7Ke1jyybbbPZyZXFxEftUtxFGLMpE2n6A+z//m4CRDlj0hW+o3iYSmh8nFlYMurOiJVDmJRilUQtJr08KfIxlg==", "license": "MIT", "dependencies": { "tabbable": "^6.2.0" @@ -1603,9 +1612,9 @@ } }, "node_modules/hast-util-to-html": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.3.tgz", - "integrity": "sha512-M17uBDzMJ9RPCqLMO92gNNUDuBSq10a25SDBI08iCCxmorf4Yy6sYHK57n9WAbRAAaU+DuR4W6GN9K4DFZesYg==", + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz", + "integrity": "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==", "license": "MIT", "dependencies": { "@types/hast": "^3.0.0", @@ -1615,7 +1624,7 @@ "hast-util-whitespace": "^3.0.0", "html-void-elements": "^3.0.0", "mdast-util-to-hast": "^13.0.0", - "property-information": "^6.0.0", + "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "stringify-entities": "^4.0.0", "zwitch": "^2.0.4" @@ -1667,12 +1676,12 @@ } }, "node_modules/magic-string": { - "version": "0.30.14", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.14.tgz", - "integrity": "sha512-5c99P1WKTed11ZC0HMJOj6CDIue6F8ySu+bJL+85q1zBEIY8IklrJ1eiKC2NDRh3Ct3FcvmJPyQHb9erXMTJNw==", + "version": "0.30.19", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.19.tgz", + "integrity": "sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==", "license": "MIT", "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0" + "@jridgewell/sourcemap-codec": "^1.5.5" } }, "node_modules/mark.js": { @@ -1776,9 +1785,9 @@ "license": "MIT" }, "node_modules/micromark-util-types": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.1.tgz", - "integrity": "sha512-534m2WhVTddrcKVepwmVEVnUAmtrx9bfIjNoQHRqfnvdaHQiFytEhJoTgpWJvDEXCO5gLTQh3wYC1PgOJA4NSQ==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", + "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", "funding": [ { "type": "GitHub Sponsors", @@ -1792,9 +1801,9 @@ "license": "MIT" }, "node_modules/minisearch": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/minisearch/-/minisearch-7.1.1.tgz", - "integrity": "sha512-b3YZEYCEH4EdCAtYP7OlDyx7FdPwNzuNwLQ34SfJpM9dlbBZzeXndGavTrC+VCiRWomL21SWfMc6SCKO/U2ZNw==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/minisearch/-/minisearch-7.2.0.tgz", + "integrity": "sha512-dqT2XBYUOZOiC5t2HRnwADjhNS2cecp9u+TJRiJ1Qp/f5qjkeT5APcGPjHw+bz89Ms8Jp+cG4AlE+QZ/QnDglg==", "license": "MIT" }, "node_modules/mitt": { @@ -1804,9 +1813,9 @@ "license": "MIT" }, "node_modules/nanoid": { - "version": "3.3.8", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", - "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", "funding": [ { "type": "github", @@ -1822,14 +1831,14 @@ } }, "node_modules/oniguruma-to-es": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-0.7.0.tgz", - "integrity": "sha512-HRaRh09cE0gRS3+wi2zxekB+I5L8C/gN60S+vb11eADHUaB/q4u8wGGOX3GvwvitG8ixaeycZfeoyruKQzUgNg==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-3.1.1.tgz", + "integrity": "sha512-bUH8SDvPkH3ho3dvwJwfonjlQ4R80vjyvrU8YpxuROddv55vAEJrTuCuCVUhhsHbtlD9tGGbaNApGQckXhS8iQ==", "license": "MIT", "dependencies": { "emoji-regex-xs": "^1.0.0", - "regex": "^5.0.2", - "regex-recursion": "^4.3.0" + "regex": "^6.0.1", + "regex-recursion": "^6.0.2" } }, "node_modules/perfect-debounce": { @@ -1845,9 +1854,9 @@ "license": "ISC" }, "node_modules/postcss": { - "version": "8.4.49", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.49.tgz", - "integrity": "sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==", + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "funding": [ { "type": "opencollective", @@ -1864,7 +1873,7 @@ ], "license": "MIT", "dependencies": { - "nanoid": "^3.3.7", + "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" }, @@ -1873,9 +1882,9 @@ } }, "node_modules/preact": { - "version": "10.25.1", - "resolved": "https://registry.npmjs.org/preact/-/preact-10.25.1.tgz", - "integrity": "sha512-frxeZV2vhQSohQwJ7FvlqC40ze89+8friponWUFeVEkaCfhC6Eu4V0iND5C9CXz8JLndV07QRDeXzH1+Anz5Og==", + "version": "10.27.2", + "resolved": "https://registry.npmjs.org/preact/-/preact-10.27.2.tgz", + "integrity": "sha512-5SYSgFKSyhCbk6SrXyMpqjb5+MQBgfvEKE/OC+PujcY34sOpqtr+0AZQtPYx5IA6VxynQ7rUPCtKzyovpj9Bpg==", "license": "MIT", "funding": { "type": "opencollective", @@ -1883,9 +1892,9 @@ } }, "node_modules/property-information": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", - "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", + "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", "license": "MIT", "funding": { "type": "github", @@ -1893,18 +1902,18 @@ } }, "node_modules/regex": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/regex/-/regex-5.0.2.tgz", - "integrity": "sha512-/pczGbKIQgfTMRV0XjABvc5RzLqQmwqxLHdQao2RTXPk+pmTXB2P0IaUHYdYyk412YLwUIkaeMd5T+RzVgTqnQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/regex/-/regex-6.0.1.tgz", + "integrity": "sha512-uorlqlzAKjKQZ5P+kTJr3eeJGSVroLKoHmquUj4zHWuR+hEyNqlXsSKlYYF5F4NI6nl7tWCs0apKJ0lmfsXAPA==", "license": "MIT", "dependencies": { "regex-utilities": "^2.3.0" } }, "node_modules/regex-recursion": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/regex-recursion/-/regex-recursion-4.3.0.tgz", - "integrity": "sha512-5LcLnizwjcQ2ALfOj95MjcatxyqF5RPySx9yT+PaXu3Gox2vyAtLDjHB8NTJLtMGkvyau6nI3CfpwFCjPUIs/A==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/regex-recursion/-/regex-recursion-6.0.2.tgz", + "integrity": "sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==", "license": "MIT", "dependencies": { "regex-utilities": "^2.3.0" @@ -1923,12 +1932,12 @@ "license": "MIT" }, "node_modules/rollup": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.28.0.tgz", - "integrity": "sha512-G9GOrmgWHBma4YfCcX8PjH0qhXSdH8B4HDE2o4/jaxj93S4DPCIDoLcXz99eWMji4hB29UFCEd7B2gwGJDR9cQ==", + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.52.4.tgz", + "integrity": "sha512-CLEVl+MnPAiKh5pl4dEWSyMTpuflgNQiLGhMv8ezD5W/qP8AKvmYpCOKRRNOh7oRKnauBZ4SyeYkMS+1VSyKwQ==", "license": "MIT", "dependencies": { - "@types/estree": "1.0.6" + "@types/estree": "1.0.8" }, "bin": { "rollup": "dist/bin/rollup" @@ -1938,24 +1947,28 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.28.0", - "@rollup/rollup-android-arm64": "4.28.0", - "@rollup/rollup-darwin-arm64": "4.28.0", - "@rollup/rollup-darwin-x64": "4.28.0", - "@rollup/rollup-freebsd-arm64": "4.28.0", - "@rollup/rollup-freebsd-x64": "4.28.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.28.0", - "@rollup/rollup-linux-arm-musleabihf": "4.28.0", - "@rollup/rollup-linux-arm64-gnu": "4.28.0", - "@rollup/rollup-linux-arm64-musl": "4.28.0", - "@rollup/rollup-linux-powerpc64le-gnu": "4.28.0", - "@rollup/rollup-linux-riscv64-gnu": "4.28.0", - "@rollup/rollup-linux-s390x-gnu": "4.28.0", - "@rollup/rollup-linux-x64-gnu": "4.28.0", - "@rollup/rollup-linux-x64-musl": "4.28.0", - "@rollup/rollup-win32-arm64-msvc": "4.28.0", - "@rollup/rollup-win32-ia32-msvc": "4.28.0", - "@rollup/rollup-win32-x64-msvc": "4.28.0", + "@rollup/rollup-android-arm-eabi": "4.52.4", + "@rollup/rollup-android-arm64": "4.52.4", + "@rollup/rollup-darwin-arm64": "4.52.4", + "@rollup/rollup-darwin-x64": "4.52.4", + "@rollup/rollup-freebsd-arm64": "4.52.4", + "@rollup/rollup-freebsd-x64": "4.52.4", + "@rollup/rollup-linux-arm-gnueabihf": "4.52.4", + "@rollup/rollup-linux-arm-musleabihf": "4.52.4", + "@rollup/rollup-linux-arm64-gnu": "4.52.4", + "@rollup/rollup-linux-arm64-musl": "4.52.4", + "@rollup/rollup-linux-loong64-gnu": "4.52.4", + "@rollup/rollup-linux-ppc64-gnu": "4.52.4", + "@rollup/rollup-linux-riscv64-gnu": "4.52.4", + "@rollup/rollup-linux-riscv64-musl": "4.52.4", + "@rollup/rollup-linux-s390x-gnu": "4.52.4", + "@rollup/rollup-linux-x64-gnu": "4.52.4", + "@rollup/rollup-linux-x64-musl": "4.52.4", + "@rollup/rollup-openharmony-arm64": "4.52.4", + "@rollup/rollup-win32-arm64-msvc": "4.52.4", + "@rollup/rollup-win32-ia32-msvc": "4.52.4", + "@rollup/rollup-win32-x64-gnu": "4.52.4", + "@rollup/rollup-win32-x64-msvc": "4.52.4", "fsevents": "~2.3.2" } }, @@ -1967,16 +1980,18 @@ "peer": true }, "node_modules/shiki": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/shiki/-/shiki-1.24.0.tgz", - "integrity": "sha512-qIneep7QRwxRd5oiHb8jaRzH15V/S8F3saCXOdjwRLgozZJr5x2yeBhQtqkO3FSzQDwYEFAYuifg4oHjpDghrg==", - "license": "MIT", - "dependencies": { - "@shikijs/core": "1.24.0", - "@shikijs/engine-javascript": "1.24.0", - "@shikijs/engine-oniguruma": "1.24.0", - "@shikijs/types": "1.24.0", - "@shikijs/vscode-textmate": "^9.3.0", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/shiki/-/shiki-2.5.0.tgz", + "integrity": "sha512-mI//trrsaiCIPsja5CNfsyNOqgAZUb6VpJA+340toL42UpzQlXpwRV9nch69X6gaUxrr9kaOOa6e3y3uAkGFxQ==", + "license": "MIT", + "dependencies": { + "@shikijs/core": "2.5.0", + "@shikijs/engine-javascript": "2.5.0", + "@shikijs/engine-oniguruma": "2.5.0", + "@shikijs/langs": "2.5.0", + "@shikijs/themes": "2.5.0", + "@shikijs/types": "2.5.0", + "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, @@ -2023,9 +2038,9 @@ } }, "node_modules/superjson": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/superjson/-/superjson-2.2.1.tgz", - "integrity": "sha512-8iGv75BYOa0xRJHK5vRLEjE2H/i4lulTjzpUXic3Eg8akftYjkmQDa8JARQ42rlczXyFR3IeRoeFCc7RxHsYZA==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/superjson/-/superjson-2.2.2.tgz", + "integrity": "sha512-5JRxVqC8I8NuOUjzBbvVJAKNM8qoVuH0O77h4WInc/qC2q5IreqKxYwgkga3PfA22OayK2ikceb/B26dztPl+Q==", "license": "MIT", "dependencies": { "copy-anything": "^3.0.2" @@ -2133,9 +2148,9 @@ } }, "node_modules/vfile-message": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", - "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz", + "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==", "license": "MIT", "dependencies": { "@types/unist": "^3.0.0", @@ -2147,9 +2162,9 @@ } }, "node_modules/vite": { - "version": "5.4.11", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.11.tgz", - "integrity": "sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==", + "version": "5.4.20", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.20.tgz", + "integrity": "sha512-j3lYzGC3P+B5Yfy/pfKNgVEg4+UtcIJcVRt2cDjIOmhLourAqPqf8P7acgxeiSgUB7E3p2P8/3gNIgDLpwzs4g==", "license": "MIT", "dependencies": { "esbuild": "^0.21.3", @@ -2206,29 +2221,29 @@ } }, "node_modules/vitepress": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/vitepress/-/vitepress-1.5.0.tgz", - "integrity": "sha512-q4Q/G2zjvynvizdB3/bupdYkCJe2umSAMv9Ju4d92E6/NXJ59z70xB0q5p/4lpRyAwflDsbwy1mLV9Q5+nlB+g==", + "version": "1.6.4", + "resolved": "https://registry.npmjs.org/vitepress/-/vitepress-1.6.4.tgz", + "integrity": "sha512-+2ym1/+0VVrbhNyRoFFesVvBvHAVMZMK0rw60E3X/5349M1GuVdKeazuksqopEdvkKwKGs21Q729jX81/bkBJg==", "license": "MIT", "dependencies": { - "@docsearch/css": "^3.6.2", - "@docsearch/js": "^3.6.2", - "@iconify-json/simple-icons": "^1.2.10", - "@shikijs/core": "^1.22.2", - "@shikijs/transformers": "^1.22.2", - "@shikijs/types": "^1.22.2", + "@docsearch/css": "3.8.2", + "@docsearch/js": "3.8.2", + "@iconify-json/simple-icons": "^1.2.21", + "@shikijs/core": "^2.1.0", + "@shikijs/transformers": "^2.1.0", + "@shikijs/types": "^2.1.0", "@types/markdown-it": "^14.1.2", - "@vitejs/plugin-vue": "^5.1.4", - "@vue/devtools-api": "^7.5.4", - "@vue/shared": "^3.5.12", - "@vueuse/core": "^11.1.0", - "@vueuse/integrations": "^11.1.0", - "focus-trap": "^7.6.0", + "@vitejs/plugin-vue": "^5.2.1", + "@vue/devtools-api": "^7.7.0", + "@vue/shared": "^3.5.13", + "@vueuse/core": "^12.4.0", + "@vueuse/integrations": "^12.4.0", + "focus-trap": "^7.6.4", "mark.js": "8.11.1", - "minisearch": "^7.1.0", - "shiki": "^1.22.2", - "vite": "^5.4.10", - "vue": "^3.5.12" + "minisearch": "^7.1.1", + "shiki": "^2.1.0", + "vite": "^5.4.14", + "vue": "^3.5.13" }, "bin": { "vitepress": "bin/vitepress.js" @@ -2247,16 +2262,16 @@ } }, "node_modules/vue": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.13.tgz", - "integrity": "sha512-wmeiSMxkZCSc+PM2w2VRsOYAZC8GdipNFRTsLSfodVqI9mbejKeXEGr8SckuLnrQPGe3oJN5c3K0vpoU9q/wCQ==", + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.22.tgz", + "integrity": "sha512-toaZjQ3a/G/mYaLSbV+QsQhIdMo9x5rrqIpYRObsJ6T/J+RyCSFwN2LHNVH9v8uIcljDNa3QzPVdv3Y6b9hAJQ==", "license": "MIT", "dependencies": { - "@vue/compiler-dom": "3.5.13", - "@vue/compiler-sfc": "3.5.13", - "@vue/runtime-dom": "3.5.13", - "@vue/server-renderer": "3.5.13", - "@vue/shared": "3.5.13" + "@vue/compiler-dom": "3.5.22", + "@vue/compiler-sfc": "3.5.22", + "@vue/runtime-dom": "3.5.22", + "@vue/server-renderer": "3.5.22", + "@vue/shared": "3.5.22" }, "peerDependencies": { "typescript": "*" diff --git a/docs/package.json b/docs/package.json index c9348f5..83dc38c 100644 --- a/docs/package.json +++ b/docs/package.json @@ -6,6 +6,6 @@ "preview": "vitepress preview" }, "dependencies": { - "vitepress": "^1.5.0" + "vitepress": "1.6.4" } -} +} \ No newline at end of file diff --git a/examples/gustav/src/contexts/meal-planner/.gitkeep b/examples/gustav/src/contexts/meal-planner/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/examples/gustav/src/contexts/notification/.gitkeep b/examples/gustav/src/contexts/notification/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts index 0cedd65..86aea08 100644 --- a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts @@ -1,9 +1,8 @@ import { Command, InvalidInputException } from '@nimbus/core'; -import { type EventStore, loadAggregate } from '@nimbus/eventsourcing'; import { getEnv } from '@nimbus/utils'; import { ulid } from '@std/ulid'; import { Recipe } from '../domain/recipe.ts'; -import { recipeReducer, recipeSubject } from '../domain/recipeAggregate.ts'; +import { RecipeState, recipeSubject } from '../domain/recipeAggregate.ts'; import { RecipeAddedCommandType, RecipeAddedEvent, @@ -15,25 +14,18 @@ export type AddRecipeCommand = Command & { type: typeof AddRecipeCommandType; }; -export const addRecipe = async ( +export const addRecipe = ( command: AddRecipeCommand, - eventStore: EventStore, -): Promise => { + state: RecipeState, +): { + newState: Recipe; + events: RecipeAddedEvent[]; +} => { const { EVENT_SOURCE } = getEnv({ variables: ['EVENT_SOURCE'], }); - const subject = recipeSubject(command.data.slug); - - // Load current aggregate state by replaying events - const snapshot = await loadAggregate( - eventStore, - subject, - null, - recipeReducer, - ); - - if (snapshot.state !== null) { + if (state !== null) { throw new InvalidInputException('Recipe already exists', { errorCode: 'DUPLICATE_RECIPE', reason: @@ -41,7 +33,8 @@ export const addRecipe = async ( }); } - // Create event + const subject = recipeSubject(command.data.slug); + const recipeAddedEvent: RecipeAddedEvent = { specversion: '1.0', id: ulid(), @@ -54,11 +47,8 @@ export const addRecipe = async ( datacontenttype: 'application/json', }; - await eventStore.writeEvents( - [ - recipeAddedEvent, - ], - ); - - return command.data; + return { + newState: command.data, + events: [recipeAddedEvent], + }; }; diff --git a/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts index 5fabefb..a118f9a 100644 --- a/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts @@ -1,9 +1,8 @@ import { Command } from '@nimbus/core'; -import { type EventStore, loadAggregate } from '@nimbus/eventsourcing'; import { getEnv } from '@nimbus/utils'; import { ulid } from '@std/ulid'; import { - recipeReducer, + RecipeState, recipeSubject, requireRecipe, } from '../domain/recipeAggregate.ts'; @@ -23,26 +22,21 @@ export type DeleteRecipeCommand = type: typeof DeleteRecipeCommandType; }; -export const deleteRecipe = async ( +export const deleteRecipe = ( command: DeleteRecipeCommand, - eventStore: EventStore, -): Promise => { + state: RecipeState, +): { + newState: RecipeState; + events: RecipeDeletedEvent[]; +} => { const { EVENT_SOURCE } = getEnv({ variables: ['EVENT_SOURCE'], }); - const subject = recipeSubject(command.data.slug); - - // Load current aggregate state by replaying events - const snapshot = await loadAggregate( - eventStore, - subject, - null, - recipeReducer, - ); - // Validate recipe exists - requireRecipe(snapshot.state); + requireRecipe(state); + + const subject = recipeSubject(command.data.slug); // Create event const recipeDeletedEvent: RecipeDeletedEvent = { @@ -57,24 +51,8 @@ export const deleteRecipe = async ( datacontenttype: 'application/json', }; - // Write event with optimistic concurrency control - // Use isSubjectOnEventId to ensure no other updates happened since we read - await eventStore.writeEvents( - [ - recipeDeletedEvent, - ], - { - preconditions: snapshot.lastEventId - ? [ - { - type: 'isSubjectOnEventId', - payload: { - subject, - eventId: snapshot.lastEventId, - }, - }, - ] - : undefined, - }, - ); + return { + newState: null, + events: [recipeDeletedEvent], + }; }; diff --git a/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts index 2ed3f82..0869a6c 100644 --- a/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts +++ b/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts @@ -1,10 +1,9 @@ import { Command } from '@nimbus/core'; -import { type EventStore, loadAggregate } from '@nimbus/eventsourcing'; import { getEnv } from '@nimbus/utils'; import { ulid } from '@std/ulid'; import { Recipe } from '../domain/recipe.ts'; import { - recipeReducer, + RecipeState, recipeSubject, requireRecipe, } from '../domain/recipeAggregate.ts'; @@ -26,26 +25,21 @@ export type UpdateRecipeCommand = type: typeof UpdateRecipeCommandType; }; -export const updateRecipe = async ( +export const updateRecipe = ( command: UpdateRecipeCommand, - eventStore: EventStore, -): Promise => { + state: RecipeState, +): { + newState: Recipe; + events: RecipeUpdatedEvent[]; +} => { const { EVENT_SOURCE } = getEnv({ variables: ['EVENT_SOURCE'], }); const subject = recipeSubject(command.data.slug); - // Load current aggregate state by replaying events - const snapshot = await loadAggregate( - eventStore, - subject, - null, - recipeReducer, - ); - // Validate recipe exists - const currentRecipe = requireRecipe(snapshot.state); + const currentRecipe = requireRecipe(state); // Prevent slug changes const updates: Partial = { @@ -55,12 +49,6 @@ export const updateRecipe = async ( delete updates.slug; } - // Apply updates to get new state - const updatedRecipe = { - ...currentRecipe, - ...updates, - }; - // Create event const recipeUpdatedEvent: RecipeUpdatedEvent = { specversion: '1.0', @@ -77,26 +65,11 @@ export const updateRecipe = async ( datacontenttype: 'application/json', }; - // Write event with optimistic concurrency control - // Use isSubjectOnEventId to ensure no other updates happened since we read - await eventStore.writeEvents( - [ - recipeUpdatedEvent, - ], - { - preconditions: snapshot.lastEventId - ? [ - { - type: 'isSubjectOnEventId', - payload: { - subject, - eventId: snapshot.lastEventId, - }, - }, - ] - : undefined, + return { + newState: { + ...currentRecipe, + ...updates, }, - ); - - return updatedRecipe; + events: [recipeUpdatedEvent], + }; }; diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts b/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts index 0352045..ea9830c 100644 --- a/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts +++ b/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts @@ -1,6 +1,5 @@ import { type Event, getLogger } from '@nimbus/core'; import { Recipe } from '../domain/recipe.ts'; -import { RecipeRepository } from '../ports/recipeRepository.ts'; export const RecipeAddedCommandType = 'at.overlap.nimbus.recipe-added' as const; @@ -8,16 +7,11 @@ export type RecipeAddedEvent = Event & { type: typeof RecipeAddedCommandType; }; -export const recipeAdded = async ( +export const recipeAdded = ( event: RecipeAddedEvent, - repository: RecipeRepository, ) => { getLogger().info({ message: 'recipeAdded Handler', data: event.data, }); - - const recipe = await repository.insert(event.data); - - return recipe; }; diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts b/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts index 4bc3b66..59effbc 100644 --- a/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts +++ b/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts @@ -1,5 +1,4 @@ import { type Event, getLogger } from '@nimbus/core'; -import { RecipeRepository } from '../ports/recipeRepository.ts'; export const RecipeDeletedEventType = 'at.overlap.nimbus.recipe-deleted' as const; @@ -8,14 +7,11 @@ export type RecipeDeletedEvent = Event<{ slug: string }> & { type: typeof RecipeDeletedEventType; }; -export const recipeDeleted = async ( +export const recipeDeleted = ( event: RecipeDeletedEvent, - repository: RecipeRepository, ) => { getLogger().info({ message: 'recipeDeleted Handler', data: event.data, }); - - await repository.delete(event.data.slug); }; diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts b/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts index cedc39c..4add5f9 100644 --- a/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts +++ b/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts @@ -1,6 +1,5 @@ import { type Event, getLogger } from '@nimbus/core'; import { Recipe } from '../domain/recipe.ts'; -import { RecipeRepository } from '../ports/recipeRepository.ts'; export const RecipeUpdatedEventType = 'at.overlap.nimbus.recipe-updated' as const; @@ -14,14 +13,11 @@ export type RecipeUpdatedEvent = type: typeof RecipeUpdatedEventType; }; -export const recipeUpdated = async ( +export const recipeUpdated = ( event: RecipeUpdatedEvent, - repository: RecipeRepository, ) => { getLogger().info({ message: 'recipeUpdated Handler', data: event.data, }); - - await repository.update(event.data.slug, event.data.updates); }; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeAdded.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeAdded.handler.ts new file mode 100644 index 0000000..a7ddf07 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeAdded.handler.ts @@ -0,0 +1,18 @@ +import { MessageHandler } from '@nimbus/core'; +import { Recipe } from '../../core/domain/recipe.ts'; +import { + recipeAdded, + RecipeAddedEvent, +} from '../../core/events/recipeAdded.ts'; +import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; + +export const recipeAddedHandler: MessageHandler< + RecipeAddedEvent, + Recipe +> = async (event) => { + recipeAdded(event); + + const recipe = await recipeMemoryRepository.insert(event.data); + + return recipe; +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeDeleted.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeDeleted.handler.ts new file mode 100644 index 0000000..22cb9c0 --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeDeleted.handler.ts @@ -0,0 +1,15 @@ +import { MessageHandler } from '@nimbus/core'; +import { + recipeDeleted, + RecipeDeletedEvent, +} from '../../core/events/recipeDeleted.ts'; +import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; + +export const recipeDeletedHandler: MessageHandler< + RecipeDeletedEvent, + void +> = async (event) => { + recipeDeleted(event); + + await recipeMemoryRepository.delete(event.data.slug); +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeUpdated.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeUpdated.handler.ts new file mode 100644 index 0000000..cb7561f --- /dev/null +++ b/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeUpdated.handler.ts @@ -0,0 +1,21 @@ +import { MessageHandler } from '@nimbus/core'; +import { Recipe } from '../../core/domain/recipe.ts'; +import { + recipeUpdated, + RecipeUpdatedEvent, +} from '../../core/events/recipeUpdated.ts'; +import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; + +export const recipeUpdatedHandler: MessageHandler< + RecipeUpdatedEvent, + Recipe +> = async (event) => { + recipeUpdated(event); + + const recipe = await recipeMemoryRepository.update( + event.data.slug, + event.data.updates, + ); + + return recipe; +}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts index 5377ea6..7138123 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts @@ -1,19 +1,58 @@ import { MessageHandler } from '@nimbus/core'; +import { loadAggregate } from '@nimbus/eventsourcing'; import { eventStore } from '../../../../../shared/infrastructure/eventStore.ts'; import { addRecipe, AddRecipeCommand, } from '../../../core/commands/addRecipe.ts'; import { Recipe } from '../../../core/domain/recipe.ts'; +import { + recipeReducer, + recipeSubject, +} from '../../../core/domain/recipeAggregate.ts'; export const addRecipeHandler: MessageHandler< AddRecipeCommand, Recipe > = async (command) => { - const recipe = await addRecipe( - command, + const subject = recipeSubject(command.data.slug); + + // Load current aggregate state by replaying events + const snapshot = await loadAggregate( eventStore, + subject, + null, + recipeReducer, ); - return recipe; + const { newState, events } = addRecipe( + command, + snapshot.state, + ); + + // Write event with optimistic concurrency control + // In case we already have events for this subject, we use isSubjectOnEventId + // In case we don't have any events for this subject, we use isSubjectPristine + await eventStore.writeEvents(events, { + preconditions: snapshot.lastEventId !== undefined + ? [ + { + type: 'isSubjectOnEventId', + payload: { + subject, + eventId: snapshot.lastEventId, + }, + }, + ] + : [ + { + type: 'isSubjectPristine', + payload: { + subject, + }, + }, + ], + }); + + return newState; }; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts index ba35039..9b2b9d8 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts @@ -1,16 +1,47 @@ import { MessageHandler } from '@nimbus/core'; +import { loadAggregate } from '@nimbus/eventsourcing'; import { eventStore } from '../../../../../shared/infrastructure/eventStore.ts'; import { deleteRecipe, DeleteRecipeCommand, } from '../../../core/commands/deleteRecipe.ts'; +import { + recipeReducer, + recipeSubject, +} from '../../../core/domain/recipeAggregate.ts'; export const deleteRecipeHandler: MessageHandler< DeleteRecipeCommand, void > = async (command) => { - await deleteRecipe( - command, + const subject = recipeSubject(command.data.slug); + + // Load current aggregate state by replaying events + const snapshot = await loadAggregate( eventStore, + subject, + null, + recipeReducer, ); + + const { events } = deleteRecipe( + command, + snapshot.state, + ); + + // Write event with optimistic concurrency control + // Use isSubjectOnEventId to ensure no other updates happened since we read + await eventStore.writeEvents(events, { + preconditions: snapshot.lastEventId !== undefined + ? [ + { + type: 'isSubjectOnEventId', + payload: { + subject, + eventId: snapshot.lastEventId, + }, + }, + ] + : undefined, + }); }; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts deleted file mode 100644 index 3133934..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/recipeAdded.handler.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { MessageHandler } from '@nimbus/core'; -import { Recipe } from '../../../core/domain/recipe.ts'; -import { - recipeAdded, - RecipeAddedEvent, -} from '../../../core/events/recipeAdded.ts'; -import { recipeMemoryRepository } from '../../repository/recipeMemoryRepository.ts'; - -export const recipeAddedHandler: MessageHandler< - RecipeAddedEvent, - Recipe -> = async (event) => { - const recipe = await recipeAdded(event, recipeMemoryRepository); - - return recipe; -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts index 2bd12b4..e004aa1 100644 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts +++ b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts @@ -1,19 +1,50 @@ import { MessageHandler } from '@nimbus/core'; +import { loadAggregate } from '@nimbus/eventsourcing'; import { eventStore } from '../../../../../shared/infrastructure/eventStore.ts'; import { updateRecipe, UpdateRecipeCommand, } from '../../../core/commands/updateRecipe.ts'; import { Recipe } from '../../../core/domain/recipe.ts'; +import { + recipeReducer, + recipeSubject, +} from '../../../core/domain/recipeAggregate.ts'; export const updateRecipeHandler: MessageHandler< UpdateRecipeCommand, Recipe > = async (command) => { - const recipe = await updateRecipe( - command, + const subject = recipeSubject(command.data.slug); + + // Load current aggregate state by replaying events + const snapshot = await loadAggregate( eventStore, + subject, + null, + recipeReducer, ); - return recipe; + const { newState, events } = updateRecipe( + command, + snapshot.state, + ); + + // Write event with optimistic concurrency control + // Use isSubjectOnEventId to ensure no other updates happened since we read + await eventStore.writeEvents(events, { + preconditions: snapshot.lastEventId !== undefined + ? [ + { + type: 'isSubjectOnEventId', + payload: { + subject, + eventId: snapshot.lastEventId, + }, + }, + ] + : undefined, + }); + + return newState; }; diff --git a/examples/gustav/src/shared/infrastructure/eventStore.ts b/examples/gustav/src/shared/infrastructure/eventStore.ts index fd8c161..699c207 100644 --- a/examples/gustav/src/shared/infrastructure/eventStore.ts +++ b/examples/gustav/src/shared/infrastructure/eventStore.ts @@ -3,18 +3,20 @@ import type { EventStoreSubscription } from '@nimbus/eventsourcing'; import { EventSourcingDBStore } from '@nimbus/eventsourcingdb'; import { getEnv } from '@nimbus/utils'; import { - recipeAdded, RecipeAddedCommandType, + RecipeAddedEvent, } from '../../contexts/recipe/core/events/recipeAdded.ts'; import { - recipeDeleted, + RecipeDeletedEvent, RecipeDeletedEventType, } from '../../contexts/recipe/core/events/recipeDeleted.ts'; import { - recipeUpdated, + RecipeUpdatedEvent, RecipeUpdatedEventType, } from '../../contexts/recipe/core/events/recipeUpdated.ts'; -import { recipeMemoryRepository } from '../../contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts'; +import { recipeAddedHandler } from '../../contexts/recipe/infrastructure/eventHandler/recipeAdded.handler.ts'; +import { recipeDeletedHandler } from '../../contexts/recipe/infrastructure/eventHandler/recipeDeleted.handler.ts'; +import { recipeUpdatedHandler } from '../../contexts/recipe/infrastructure/eventHandler/recipeUpdated.handler.ts'; export let eventStore: EventSourcingDBStore; let subscription: EventStoreSubscription | undefined; @@ -69,15 +71,15 @@ export const initEventStore = async () => { // Route events to appropriate read model handlers switch (event.type) { case RecipeAddedCommandType: - await recipeAdded(event as any, recipeMemoryRepository); + await recipeAddedHandler(event as RecipeAddedEvent); break; case RecipeUpdatedEventType: - await recipeUpdated(event as any, recipeMemoryRepository); + await recipeUpdatedHandler(event as RecipeUpdatedEvent); break; case RecipeDeletedEventType: - await recipeDeleted(event as any, recipeMemoryRepository); + await recipeDeletedHandler(event as RecipeDeletedEvent); break; default: From c50d7663a4334c2cf3da8aab8be81cf24c9af2a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Thu, 15 Jan 2026 21:19:24 +0100 Subject: [PATCH 14/37] feat(hono): adds hono package and demo --- .claude/settings.local.json | 5 +- .zed/settings.json | 40 +++++ TODO.md | 244 -------------------------- deno.json | 6 +- deno.lock | 47 +++-- examples/hono-demo/deno.json | 33 ++++ examples/hono-demo/src/main.ts | 55 ++++++ examples/hono-demo/start-with-otel.sh | 16 ++ packages/hono/README.md | 14 ++ packages/hono/deno.json | 37 ++++ packages/hono/src/index.ts | 1 + packages/hono/src/lib/logger.ts | 129 ++++++++++++++ 12 files changed, 364 insertions(+), 263 deletions(-) create mode 100644 .zed/settings.json delete mode 100644 TODO.md create mode 100644 examples/hono-demo/deno.json create mode 100644 examples/hono-demo/src/main.ts create mode 100644 examples/hono-demo/start-with-otel.sh create mode 100644 packages/hono/README.md create mode 100644 packages/hono/deno.json create mode 100644 packages/hono/src/index.ts create mode 100644 packages/hono/src/lib/logger.ts diff --git a/.claude/settings.local.json b/.claude/settings.local.json index 3e1cf0c..c4fed74 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -8,9 +8,10 @@ "Bash(find:*)", "Bash(deno doc:*)", "WebFetch(domain:docs.eventsourcingdb.io)", - "Bash(cat:*)" + "Bash(cat:*)", + "WebSearch" ], "deny": [], "ask": [] } -} \ No newline at end of file +} diff --git a/.zed/settings.json b/.zed/settings.json new file mode 100644 index 0000000..1ac2386 --- /dev/null +++ b/.zed/settings.json @@ -0,0 +1,40 @@ +{ + "lsp": { + "deno": { + "settings": { + "deno": { + "enable": true, + }, + }, + }, + }, + "languages": { + "JavaScript": { + "language_servers": [ + "deno", + "!typescript-language-server", + "!vtsls", + "!eslint", + ], + "formatter": "language_server", + }, + "TypeScript": { + "language_servers": [ + "deno", + "!typescript-language-server", + "!vtsls", + "!eslint", + ], + "formatter": "language_server", + }, + "TSX": { + "language_servers": [ + "deno", + "!typescript-language-server", + "!vtsls", + "!eslint", + ], + "formatter": "language_server", + }, + }, +} diff --git a/TODO.md b/TODO.md deleted file mode 100644 index cf1b66a..0000000 --- a/TODO.md +++ /dev/null @@ -1,244 +0,0 @@ -# Plan and ToDos - -## Vision - -Have a CLI tool to initialize a new Nimbus project. -This sets up the project structure and installs the necessary dependencies. - -The Structure will follow a Domain Driven Design (DDD) approach with a Hexagonal Architecture. - -The Source of truth for the API will be an AsyncAPI specification in JSON format. - -Based on the AsyncAPI specification the Nimbus framework should register the commands, queries and event on the router structure. -Also the router will provide a route for a health check and routes for a schema registry. -The user now only has to implement the business logic and connects it to the route handlers. - -## Claude conversation: - -**Me:** -I like the simple aproach. When it comes to Schema management my overall vision is to have a asyncapi.json file which will hold the -messages and schemas for the application. And based on that file we could build a Nimbus CLI that generates the boilerplate code for -handlers routes and also schemas. That is the reason why i choose AJV and wrote the validator -(packages/core/src/lib/validator/validator.ts) so a schmea can be added with the addSchema method and if a message which is routet on the -router contains a dataschema value this schema is picked up in the validator. Also I would not reverse the allowUnsafeInput logic as I -want to follow the philosophy of have input validation required as best practive. What do you think is this a good way of handling -things? Or do you suggest something else? Also based on the asyncapi definition and the schema registration on the validator I had the -idea to then also have the option to publish a schema registry later on. - -**Claude:** -Your vision is excellent and very well thought out. This approach aligns perfectly with your simplicity goals while providing -professional-grade tooling. Let me validate your thinking: - -Why AsyncAPI + AJV + Schema Registry is Smart - -1. AsyncAPI as Single Source of Truth - -- Defines all messages (commands/queries/events) in one place -- Documents your API automatically -- Industry standard (like OpenAPI but for async/event-driven) -- Enables codegen → less boilerplate, fewer mistakes - -2. AJV Validator Design is Correct - -- Pre-register schemas via validator.addSchema() -- Messages reference schemas via dataschema URL -- Router validates automatically when dataschema is present -- If no dataschema → requires explicit allowUnsafeInput: true - -This forces intentional decisions: "I'm skipping validation" vs. accidental omission. - -3. Schema Registry Future - -- AsyncAPI → Schema Registry is a natural progression -- Enables schema evolution (versioning, compatibility checks) -- Supports microservices discovering each other's contracts -- You're building the foundation now (URL-based schema references) - -Validating Your Philosophy - -✅ Keep allowUnsafeInput as opt-in (unsafe = explicit flag) - -You're right. Best practice = validate by default. The current warning (router.ts:182-185) is good - it reminds developers during -prototyping that they're being unsafe. In production, messages should have schemas. - -✅ Keep per-type routers (command/query/event) - -I questioned this earlier, but with AsyncAPI in mind, it makes sense: - -- AsyncAPI documents commands, queries, events separately -- Different message types may have different routing strategies later (commands = 1:1, events = pub/sub) -- Codegen can generate separate routers based on AsyncAPI channels - -✅ Schema registration pattern is right - -// Generated by Nimbus CLI from asyncapi.json -validator.addSchema(addRecipeSchema); -validator.addSchema(getRecipeSchema); - -// Messages now reference these schemas -const command: Command = { -// ... -dataschema: 'https://nimbus.overlap.at/schemas/commands/add-recipe/v1', -data: { ... } -}; - -This is exactly how JSON Schema + HTTP APIs work. You're applying proven patterns to message-driven architecture. - -Suggested Refinements - -1. AsyncAPI Structure - -Your AsyncAPI should look something like: -asyncapi: 3.0.0 -info: -title: Gustav Recipe API -version: 1.0.0 - -channels: -recipeCommands: -address: /command -messages: -addRecipe: -$ref: '#/components/messages/AddRecipe' - - recipeQueries: - address: /query - messages: - getRecipe: - $ref: '#/components/messages/GetRecipe' - -components: -messages: -AddRecipe: -name: at.overlap.nimbus.add-recipe -payload: -$ref: '#/components/schemas/Recipe' -GetRecipe: -name: at.overlap.nimbus.get-recipe -payload: -$ref: '#/components/schemas/GetRecipeParams' - - schemas: - Recipe: - type: object - properties: - slug: { type: string } - title: { type: string } - # ... - -2. Nimbus CLI Codegen Flow - -nimbus generate --asyncapi asyncapi.json --output src/generated/ - -Should generate: - -- Schema registration (src/generated/schemas.ts): - import { getValidator } from '@nimbus/core'; - -export function registerSchemas() { -getValidator().addSchema(addRecipeSchema); -getValidator().addSchema(getRecipeSchema); -// ... -} - -- Type definitions (src/generated/types.ts): - export type AddRecipeCommand = Command & { - type: 'at.overlap.nimbus.add-recipe'; - }; -- Router setup (src/generated/routers.ts): - export const commandRouter = new MessageRouter('command'); - // Handlers registered manually or via convention - -3. Schema URL Convention - -Standardize schema URLs: -https://nimbus.overlap.at/schemas/{type}/{name}/{version} - -Examples: - -- https://nimbus.overlap.at/schemas/commands/add-recipe/v1 -- https://nimbus.overlap.at/schemas/queries/get-recipe/v1 -- https://nimbus.overlap.at/schemas/events/recipe-added/v1 - -This enables: - -- Versioning (v1, v2, etc.) -- Schema registry lookups -- Clear naming conventions - -4. Validator Schema Registration Helper - -Add a convenience method to validator: -// validator.ts -public addSchemas(schemas: SchemaObject[]): void { -for (const schema of schemas) { -this.addSchema(schema); -} -} - -Then generated code can do: -registerSchemas() { -getValidator().addSchemas([ -addRecipeSchema, -getRecipeSchema, -// ... -]); -} - -5. Runtime Schema Validation Reporting - -In Gustav, you're using allowUnsafeInput: true everywhere. Once you have AsyncAPI + codegen: -// Before (manual, no schema) -router.register(AddRecipeCommandType, handler, { allowUnsafeInput: true }); - -// After (generated, with schema) -router.register(AddRecipeCommandType, handler); -// No allowUnsafeInput needed - schema is in the message - -Implementation Phases - -Phase 1: Current (Manual) - -- Write handlers manually -- Use allowUnsafeInput: true during dev -- Validate manually if needed - -Phase 2: AsyncAPI Definition - -- Create asyncapi.json for Gustav -- Define all messages and schemas -- Keep manual handlers - -Phase 3: Nimbus CLI Codegen - -- Build CLI that reads AsyncAPI -- Generate schema registration code -- Generate TypeScript types -- Optionally generate router boilerplate - -Phase 4: Schema Registry - -- Host schemas at URLs (Nimbus schema registry service) -- Support schema versioning -- Schema compatibility checking - -My Take - -Your approach is exactly right. You're building: - -1. A simple developer experience (MessageRouter, clean handlers) -2. With professional tooling (AsyncAPI, codegen, schema validation) -3. That scales (schema registry, versioning, microservices) - -The current implementation already supports this vision - you just need to build the CLI tooling around it. The validator design with -addSchema() + URL-based dataschema references is the perfect foundation. - -Don't change anything about the core design. Focus on: - -- Defining AsyncAPI schema for Gustav -- Building the Nimbus CLI codegen -- Proving the workflow end-to-end - -Once that works, the schema registry is just HTTP hosting of your schemas with versioning. - -This is a really solid architecture. You're threading the needle between "simple for small apps" and "scales to enterprise." diff --git a/deno.json b/deno.json index 34258ae..2a596c8 100644 --- a/deno.json +++ b/deno.json @@ -9,9 +9,11 @@ "./packages/eventsourcingdb", "./packages/mongodb", "./packages/oak", + "./packages/hono", "./packages/utils", "./examples/gustav", - "./examples/the-expense" + "./examples/the-expense", + "./examples/hono-demo" ], "nodeModulesDir": "none" -} \ No newline at end of file +} diff --git a/deno.lock b/deno.lock index 14ca611..ccd9c39 100644 --- a/deno.lock +++ b/deno.lock @@ -2,7 +2,7 @@ "version": "5", "specifiers": { "jsr:@oak/commons@1": "1.0.0", - "jsr:@oak/oak@^17.1.4": "17.1.4", + "jsr:@oak/oak@^17.1.4": "17.1.6", "jsr:@oak/oak@^17.1.6": "17.1.6", "jsr:@std/assert@1": "1.0.10", "jsr:@std/assert@^1.0.10": "1.0.10", @@ -10,7 +10,8 @@ "jsr:@std/bytes@^1.0.2": "1.0.4", "jsr:@std/crypto@1": "1.0.3", "jsr:@std/dotenv@*": "0.225.3", - "jsr:@std/dotenv@~0.225.5": "0.225.5", + "jsr:@std/dotenv@~0.225.5": "0.225.6", + "jsr:@std/dotenv@~0.225.6": "0.225.6", "jsr:@std/encoding@1": "1.0.6", "jsr:@std/encoding@^1.0.5": "1.0.6", "jsr:@std/fmt@^1.0.4": "1.0.5", @@ -23,14 +24,16 @@ "jsr:@std/text@^1.0.10": "1.0.10", "jsr:@std/ulid@1": "1.0.0", "jsr:@tajpouria/cors@^1.2.1": "1.2.1", + "npm:@opentelemetry/api@^1.9.0": "1.9.0", "npm:@types/node@*": "22.5.4", - "npm:ajv-formats@^3.0.1": "3.0.1_ajv@8.17.1", "npm:ajv@^8.17.1": "8.17.1", "npm:effect@^3.17.9": "3.17.9", + "npm:hono@^4.11.4": "4.11.4", "npm:mongodb@^6.12.0": "6.12.0", "npm:path-to-regexp@6.2.1": "6.2.1", "npm:path-to-regexp@^6.3.0": "6.3.0", - "npm:zod@^3.24.1": "3.24.1" + "npm:zod@^3.24.1": "3.24.1", + "npm:zod@^4.3.5": "4.3.5" }, "jsr": { "@oak/commons@1.0.0": { @@ -100,6 +103,9 @@ "@std/dotenv@0.225.5": { "integrity": "9ce6f9d0ec3311f74a32535aa1b8c62ed88b1ab91b7f0815797d77a6f60c922f" }, + "@std/dotenv@0.225.6": { + "integrity": "1d6f9db72f565bd26790fa034c26e45ecb260b5245417be76c2279e5734c421b" + }, "@std/encoding@1.0.6": { "integrity": "ca87122c196e8831737d9547acf001766618e78cd8c33920776c7f5885546069" }, @@ -147,6 +153,9 @@ "sparse-bitfield" ] }, + "@opentelemetry/api@1.9.0": { + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==" + }, "@standard-schema/spec@1.0.0": { "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==" }, @@ -165,15 +174,6 @@ "@types/webidl-conversions" ] }, - "ajv-formats@3.0.1_ajv@8.17.1": { - "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==", - "dependencies": [ - "ajv" - ], - "optionalPeers": [ - "ajv" - ] - }, "ajv@8.17.1": { "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", "dependencies": [ @@ -206,6 +206,9 @@ "fast-uri@3.1.0": { "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==" }, + "hono@4.11.4": { + "integrity": "sha512-U7tt8JsyrxSRKspfhtLET79pU8K+tInj5QZXs1jSugO1Vq5dFj3kmZsRldo29mTBfcjDRVRXrEZ6LS63Cog9ZA==" + }, "json-schema-traverse@1.0.0": { "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, @@ -269,6 +272,9 @@ }, "zod@3.24.1": { "integrity": "sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A==" + }, + "zod@4.3.5": { + "integrity": "sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==" } }, "redirects": { @@ -324,6 +330,12 @@ "npm:zod@^3.24.1" ] }, + "examples/hono-demo": { + "dependencies": [ + "jsr:@std/dotenv@~0.225.6", + "npm:hono@^4.11.4" + ] + }, "examples/the-expense": { "dependencies": [ "jsr:@oak/oak@^17.1.4", @@ -338,8 +350,13 @@ "jsr:@std/assert@^1.0.10", "jsr:@std/fmt@^1.0.5", "jsr:@std/ulid@1", - "npm:ajv-formats@^3.0.1", - "npm:ajv@^8.17.1" + "npm:zod@^4.3.5" + ] + }, + "packages/hono": { + "dependencies": [ + "npm:@opentelemetry/api@^1.9.0", + "npm:hono@^4.11.4" ] }, "packages/mongodb": { diff --git a/examples/hono-demo/deno.json b/examples/hono-demo/deno.json new file mode 100644 index 0000000..b353100 --- /dev/null +++ b/examples/hono-demo/deno.json @@ -0,0 +1,33 @@ +{ + "tasks": { + "dev": "deno run -A --watch src/main.ts", + "dev:otel": "sh start-with-otel.sh", + "test": "deno test -A" + }, + "lint": { + "include": ["src/"], + "exclude": [], + "rules": { + "tags": ["recommended"], + "include": [], + "exclude": ["no-explicit-any"] + } + }, + "fmt": { + "include": ["src/"], + "exclude": [], + "useTabs": false, + "lineWidth": 80, + "indentWidth": 4, + "semiColons": true, + "singleQuote": true, + "proseWrap": "always" + }, + "test": { + "include": ["src/"] + }, + "imports": { + "@std/dotenv": "jsr:@std/dotenv@^0.225.6", + "hono": "npm:hono@^4.11.4" + } +} diff --git a/examples/hono-demo/src/main.ts b/examples/hono-demo/src/main.ts new file mode 100644 index 0000000..1598e4d --- /dev/null +++ b/examples/hono-demo/src/main.ts @@ -0,0 +1,55 @@ +import '@std/dotenv/load'; +import process from 'node:process'; +import { + getLogger, + jsonLogFormatter, + parseLogLevel, + prettyLogFormatter, + setupLogger, +} from '@nimbus/core'; +import { logger } from '@nimbus/hono'; +import { Hono } from 'hono'; +import { cors } from 'hono/cors'; +import { secureHeaders } from 'hono/secure-headers'; +import { compress } from 'hono/compress'; + +setupLogger({ + logLevel: parseLogLevel(process.env.LOG_LEVEL), + formatter: process.env.LOG_FORMAT === 'pretty' + ? prettyLogFormatter + : jsonLogFormatter, + useConsoleColors: process.env.LOG_FORMAT === 'pretty', +}); + +const app = new Hono(); + +app.use(logger({ + enableTracing: true, + tracerName: 'api', +})); + +app.use(cors()); + +app.use(secureHeaders()); + +app.use(compress()); + +app.get('/health', (c) => { + return c.json({ status: 'ok' }); +}); + +if (process.env.PORT) { + const port = parseInt(process.env.PORT); + + Deno.serve({ port }, app.fetch); + getLogger().info({ + category: 'API', + message: `Started application on port ${port}`, + }); +} else { + getLogger().critical({ + category: 'API', + message: + `Could not start the application! Please define a valid port vienvironment variable.`, + }); +} diff --git a/examples/hono-demo/start-with-otel.sh b/examples/hono-demo/start-with-otel.sh new file mode 100644 index 0000000..07c7c98 --- /dev/null +++ b/examples/hono-demo/start-with-otel.sh @@ -0,0 +1,16 @@ +export OTEL_DENO=true +export OTEL_EXPORTER_OTLP_PROTOCOL="http/protobuf" +export OTEL_EXPORTER_OTLP_ENDPOINT="https://otlp-gateway-prod-eu-west-2.grafana.net/otlp" + +# Read OTLP headers from secret file +if [ -f "./.otel_token" ]; then + export OTEL_EXPORTER_OTLP_HEADERS="Authorization=Basic $(cat ./.otel_token)" +else + echo "Error: .otel_token file not found." + exit 1 +fi + +export OTEL_SERVICE_NAME=nimbus-hono-demo +export OTEL_RESOURCE_ATTRIBUTES=deployment.environment=development + +deno run -A src/main.ts diff --git a/packages/hono/README.md b/packages/hono/README.md new file mode 100644 index 0000000..7b41a5f --- /dev/null +++ b/packages/hono/README.md @@ -0,0 +1,14 @@ +Nimbus + +# Nimbus Hono + +Adapters and useful functionality to bridge Nimbus and [Hono](https://hono.dev/). + +Refer to the [Nimbus main repository](https://github.com/overlap-dev/Nimbus) or the [Nimbus documentation](https://nimbus.overlap.at) for more information about the Nimbus framework. + +# License + +The MIT License (MIT) diff --git a/packages/hono/deno.json b/packages/hono/deno.json new file mode 100644 index 0000000..31943ab --- /dev/null +++ b/packages/hono/deno.json @@ -0,0 +1,37 @@ +{ + "name": "@nimbus/hono", + "version": "0.17.0", + "license": "MIT", + "author": "Daniel Gördes (https://overlap.at)", + "repository": { + "type": "git", + "url": "https://github.com/overlap-dev/Nimbus" + }, + "bugs": { + "url": "https://github.com/overlap-dev/Nimbus/issues" + }, + "homepage": "https://nimbus.overlap.at", + "exports": "./src/index.ts", + "fmt": { + "include": ["src/"], + "useTabs": false, + "lineWidth": 80, + "indentWidth": 4, + "semiColons": true, + "singleQuote": true, + "proseWrap": "always" + }, + "lint": { + "include": ["src/"], + "rules": { + "exclude": ["no-explicit-any"] + } + }, + "test": { + "include": ["src/"] + }, + "imports": { + "@opentelemetry/api": "npm:@opentelemetry/api@^1.9.0", + "hono": "npm:hono@^4.11.4" + } +} diff --git a/packages/hono/src/index.ts b/packages/hono/src/index.ts new file mode 100644 index 0000000..1498f55 --- /dev/null +++ b/packages/hono/src/index.ts @@ -0,0 +1 @@ +export * from './lib/logger.ts'; diff --git a/packages/hono/src/lib/logger.ts b/packages/hono/src/lib/logger.ts new file mode 100644 index 0000000..f1d9d82 --- /dev/null +++ b/packages/hono/src/lib/logger.ts @@ -0,0 +1,129 @@ +import type { MiddlewareHandler } from 'hono'; +import { getLogger } from '@nimbus/core'; +import { + context, + propagation, + SpanKind, + SpanStatusCode, + trace, +} from '@opentelemetry/api'; + +/** + * Options for configuring the hono logger middleware. + */ +export type LoggerOptions = { + /** + * Enable OpenTelemetry tracing for HTTP requests. + * When enabled, the middleware creates spans for each request + * and propagates trace context from incoming headers. + */ + enableTracing?: boolean; + /** + * Optionally change the name of the tracer. + * Defaults to "nimbus-hono". + */ + tracerName?: string; +}; + +const humanize = (times: string[]) => { + const [delimiter, separator] = [',', '.']; + + const orderTimes = times.map((v) => + v.replace(/(\d)(?=(\d\d\d)+(?!\d))/g, '$1' + delimiter) + ); + + return orderTimes.join(separator); +}; + +const time = (start: number) => { + const delta = Date.now() - start; + return humanize([ + delta < 1000 ? delta + 'ms' : Math.round(delta / 1000) + 's', + ]); +}; + +/** + * Logger middleware for Hono with optional OpenTelemetry tracing. + * + * When tracing is enabled, the middleware: + * - Extracts trace context from incoming request headers (traceparent/tracestate) + * - Creates a server span for the HTTP request + * - Makes the span active so child spans can be created in handlers + * - Records HTTP method, path, and status code as span attributes + * + * @example + * ```ts + * import { Hono } from 'hono'; + * import { logger } from '@nimbus/hono'; + * + * const app = new Hono(); + * app.use(logger({ enableTracing: true })); + * ``` + */ +export const logger = (options?: LoggerOptions): MiddlewareHandler => { + const tracerName = options?.tracerName ?? 'nimbus-hono'; + const tracer = trace.getTracer(tracerName); + + return async (c, next) => { + const startTime = Date.now(); + + getLogger().info({ + category: 'API', + message: `REQ: [${c.req.method}] ${c.req.path}`, + }); + + if (options?.enableTracing) { + // Extract trace context from incoming headers (traceparent, tracestate) + const parentContext = propagation.extract( + context.active(), + c.req.raw.headers, + { + get: (headers, key) => headers.get(key) ?? undefined, + keys: (headers) => [...headers.keys()], + }, + ); + + // Run the request within the extracted context + await context.with(parentContext, async () => { + await tracer.startActiveSpan( + `HTTP ${c.req.method} ${c.req.path}`, + { + kind: SpanKind.SERVER, + attributes: { + 'http.method': c.req.method, + 'url.path': c.req.path, + 'http.target': c.req.url, + }, + }, + async (span) => { + try { + await next(); + span.setAttribute('http.status_code', c.res.status); + if (c.res.status >= 400) { + span.setStatus({ code: SpanStatusCode.ERROR }); + } + } catch (err) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: (err as Error).message, + }); + span.recordException(err as Error); + throw err; + } finally { + span.end(); + } + }, + ); + }); + } else { + await next(); + } + + getLogger().info({ + category: 'API', + message: `RES: [${c.req.method}] ${c.req.path} - ${ + time(startTime) + }`, + }); + }; +}; From 29badc073360a5819e31e8319a0d0342dac1b0ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Thu, 15 Jan 2026 21:59:36 +0100 Subject: [PATCH 15/37] feat(hono): adds middleware for correlation id handling --- examples/hono-demo/src/main.ts | 4 +- packages/hono/deno.json | 1 + packages/hono/src/index.ts | 1 + packages/hono/src/lib/correlationId.ts | 107 +++++++++++++++++++++++++ packages/hono/src/lib/logger.ts | 7 ++ 5 files changed, 119 insertions(+), 1 deletion(-) create mode 100644 packages/hono/src/lib/correlationId.ts diff --git a/examples/hono-demo/src/main.ts b/examples/hono-demo/src/main.ts index 1598e4d..38eaf19 100644 --- a/examples/hono-demo/src/main.ts +++ b/examples/hono-demo/src/main.ts @@ -7,7 +7,7 @@ import { prettyLogFormatter, setupLogger, } from '@nimbus/core'; -import { logger } from '@nimbus/hono'; +import { correlationId, logger } from '@nimbus/hono'; import { Hono } from 'hono'; import { cors } from 'hono/cors'; import { secureHeaders } from 'hono/secure-headers'; @@ -23,6 +23,8 @@ setupLogger({ const app = new Hono(); +app.use(correlationId()); + app.use(logger({ enableTracing: true, tracerName: 'api', diff --git a/packages/hono/deno.json b/packages/hono/deno.json index 31943ab..2a28316 100644 --- a/packages/hono/deno.json +++ b/packages/hono/deno.json @@ -32,6 +32,7 @@ }, "imports": { "@opentelemetry/api": "npm:@opentelemetry/api@^1.9.0", + "@std/ulid": "jsr:@std/ulid@^1.0.0", "hono": "npm:hono@^4.11.4" } } diff --git a/packages/hono/src/index.ts b/packages/hono/src/index.ts index 1498f55..5034518 100644 --- a/packages/hono/src/index.ts +++ b/packages/hono/src/index.ts @@ -1 +1,2 @@ +export * from './lib/correlationId.ts'; export * from './lib/logger.ts'; diff --git a/packages/hono/src/lib/correlationId.ts b/packages/hono/src/lib/correlationId.ts new file mode 100644 index 0000000..c900157 --- /dev/null +++ b/packages/hono/src/lib/correlationId.ts @@ -0,0 +1,107 @@ +import type { MiddlewareHandler } from 'hono'; +import { ulid } from '@std/ulid'; + +/** + * Header names to check for an existing correlation ID. + * Checked in order of priority. + */ +const CORRELATION_ID_HEADERS = [ + 'x-correlation-id', + 'x-request-id', + 'request-id', +] as const; + +/** + * The key used to store the correlation ID in the Hono context. + */ +export const CORRELATION_ID_KEY = 'correlationId' as const; + +/** + * Options for configuring the correlation ID middleware. + */ +export type CorrelationIdOptions = { + /** + * Add the correlation ID to the response headers. + * Defaults to true. + */ + addToResponseHeaders?: boolean; + /** + * The header name to use when adding to response headers. + * Defaults to "x-correlation-id". + */ + responseHeaderName?: string; +}; + +/** + * Correlation ID middleware for Hono. + * + * This middleware extracts the correlation ID from incoming request headers + * or generates a new one using ULID if not present. The correlation ID is + * stored in the Hono context and optionally added to response headers. + * + * Checked headers (in order): + * - x-correlation-id + * - x-request-id + * - request-id + * + * @example + * ```ts + * import { Hono } from 'hono'; + * import { correlationId, getCorrelationId } from '@nimbus/hono'; + * + * const app = new Hono(); + * app.use(correlationId()); + * + * app.get('/', (c) => { + * const id = getCorrelationId(c); + * return c.json({ correlationId: id }); + * }); + * ``` + */ +export const correlationId = ( + options?: CorrelationIdOptions, +): MiddlewareHandler => { + const addToResponseHeaders = options?.addToResponseHeaders ?? true; + const responseHeaderName = options?.responseHeaderName ?? + 'x-correlation-id'; + + return async (c, next) => { + let id: string | undefined; + + // Check incoming headers for existing correlation ID + for (const header of CORRELATION_ID_HEADERS) { + const value = c.req.header(header); + if (value) { + id = value; + break; + } + } + + // Generate new ID if not found + if (!id) { + id = ulid(); + } + + // Store in context + c.set(CORRELATION_ID_KEY, id); + + // Optionally add to response headers + if (addToResponseHeaders) { + c.header(responseHeaderName, id); + } + + await next(); + }; +}; + +/** + * Get the correlation ID from the Hono context. + * + * @param c - The Hono context + * @returns The correlation ID or undefined if not set + */ +export const getCorrelationId = (c: { + get: (key: typeof CORRELATION_ID_KEY) => string | undefined; +}): string | undefined => { + return c.get(CORRELATION_ID_KEY); +}; diff --git a/packages/hono/src/lib/logger.ts b/packages/hono/src/lib/logger.ts index f1d9d82..b545d14 100644 --- a/packages/hono/src/lib/logger.ts +++ b/packages/hono/src/lib/logger.ts @@ -7,6 +7,7 @@ import { SpanStatusCode, trace, } from '@opentelemetry/api'; +import { getCorrelationId } from './correlationId.ts'; /** * Options for configuring the hono logger middleware. @@ -66,10 +67,12 @@ export const logger = (options?: LoggerOptions): MiddlewareHandler => { return async (c, next) => { const startTime = Date.now(); + const correlationId = getCorrelationId(c); getLogger().info({ category: 'API', message: `REQ: [${c.req.method}] ${c.req.path}`, + correlationId, }); if (options?.enableTracing) { @@ -93,6 +96,9 @@ export const logger = (options?: LoggerOptions): MiddlewareHandler => { 'http.method': c.req.method, 'url.path': c.req.path, 'http.target': c.req.url, + ...(correlationId && { + correlation_id: correlationId, + }), }, }, async (span) => { @@ -124,6 +130,7 @@ export const logger = (options?: LoggerOptions): MiddlewareHandler => { message: `RES: [${c.req.method}] ${c.req.path} - ${ time(startTime) }`, + correlationId, }); }; }; From 4c855a464524ae2396d04031c898b5d537cedee1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Thu, 15 Jan 2026 22:10:02 +0100 Subject: [PATCH 16/37] feat(core): adds correlation id to pretty logs format --- packages/core/src/lib/log/logFormatter.ts | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/core/src/lib/log/logFormatter.ts b/packages/core/src/lib/log/logFormatter.ts index 5e6d25a..3d57247 100644 --- a/packages/core/src/lib/log/logFormatter.ts +++ b/packages/core/src/lib/log/logFormatter.ts @@ -60,6 +60,11 @@ export const prettyLogFormatter: LogFormatter = ( ): string | string[] => { let dataString = ''; let errorString = ''; + let correlationId = ''; + + if (logRecord.correlationId) { + correlationId = `(${logRecord.correlationId}) `; + } if (logRecord.data) { dataString = JSON.stringify(logRecord.data, null, 2); @@ -69,14 +74,14 @@ export const prettyLogFormatter: LogFormatter = ( errorString = JSON.stringify(logRecord.error, null, 2); return [ - `[${logRecord.category}] ${logRecord.level.toUpperCase()} :: ${logRecord.message}`, + `[${logRecord.category}] ${logRecord.level.toUpperCase()} ${correlationId}:: ${logRecord.message}`, errorString.length ? `\n${errorString}` : '', logRecord.error.stack ? `\n${logRecord.error.stack}` : '', dataString.length ? `\n${dataString}` : '', ]; } - return `[${logRecord.category}] ${logRecord.level.toUpperCase()} :: ${logRecord.message}${ + return `[${logRecord.category}] ${logRecord.level.toUpperCase()} ${correlationId}:: ${logRecord.message}${ dataString.length ? `\n${dataString}` : '' }`; }; From 1107393b564e531e9bb4935c6617ed63b7c91524 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Sat, 17 Jan 2026 16:32:22 +0100 Subject: [PATCH 17/37] simplify, go back to zod, cleanup --- deno.json | 7 +- deno.lock | 163 +------- docs/guide/core/commands.md | 315 ++++++++++----- docs/guide/core/events.md | 371 +++++++++++++++--- docs/guide/core/queries.md | 355 +++++++++++++---- docs/guide/what-is-nimbus.md | 21 +- examples/gustav/concurrency-tester.sh | 57 --- examples/gustav/deno.json | 38 -- .../infrastructure/http/auth.middleware.ts | 53 --- .../recipe/core/commands/addRecipe.ts | 54 --- .../contexts/recipe/core/commands/command.md | 6 - .../recipe/core/commands/deleteRecipe.ts | 58 --- .../recipe/core/commands/updateRecipe.ts | 75 ---- .../contexts/recipe/core/domain/ingredient.ts | 8 - .../src/contexts/recipe/core/domain/model.md | 3 - .../src/contexts/recipe/core/domain/recipe.ts | 9 - .../recipe/core/domain/recipeAggregate.ts | 88 ----- .../src/contexts/recipe/core/events/event.md | 3 - .../recipe/core/events/recipeAdded.ts | 17 - .../recipe/core/events/recipeDeleted.ts | 17 - .../recipe/core/events/recipeUpdated.ts | 23 -- .../src/contexts/recipe/core/ports/port.md | 21 - .../recipe/core/ports/recipeRepository.ts | 27 -- .../contexts/recipe/core/queries/getRecipe.ts | 20 - .../recipe/core/queries/listRecipes.ts | 26 -- .../src/contexts/recipe/core/queries/query.md | 6 - .../eventHandler/recipeAdded.handler.ts | 18 - .../eventHandler/recipeDeleted.handler.ts | 15 - .../eventHandler/recipeUpdated.handler.ts | 21 - .../infrastructure/http/commandRouter.ts | 35 -- .../http/handler/addRecipe.handler.ts | 58 --- .../http/handler/deleteRecipe.handler.ts | 47 --- .../http/handler/getRecipe.handler.ts | 13 - .../http/handler/listRecipes.handler.ts | 16 - .../http/handler/updateRecipe.handler.ts | 50 --- .../recipe/infrastructure/http/queryRouter.ts | 28 -- .../recipe/infrastructure/http/router.ts | 89 ----- .../http/schemas/addRecipeCommandSchema.ts | 66 ---- .../http/schemas/deleteRecipeCommandSchema.ts | 28 -- .../http/schemas/getRecipeQuerySchema.ts | 34 -- .../http/schemas/listRecipesQuerySchema.ts | 31 -- .../http/schemas/updateRecipeCommandSchema.ts | 66 ---- .../repository/recipeMemoryRepository.ts | 91 ----- examples/gustav/src/exampleApiSpec.json | 183 --------- examples/gustav/src/main.ts | 61 --- .../gustav/src/shared/core/domain/unit.ts | 7 - .../src/shared/infrastructure/eventStore.ts | 123 ------ .../src/shared/infrastructure/http/router.ts | 45 --- .../src/shared/infrastructure/http/schemas.ts | 24 -- .../src/shared/infrastructure/mongodb.ts | 33 -- examples/hono-demo/deno.json | 4 +- .../users/core/commands/addUser.command.ts | 38 ++ .../src/iam/users/core/domain/user.ts | 10 + .../iam/users/core/queries/getUser.query.ts | 0 .../src/iam/users/shell/http/router.ts | 31 ++ .../users/shell/messages/addUser.command.ts | 13 + .../shell/messages/registerUserMessages.ts | 14 + examples/hono-demo/src/main.ts | 29 +- examples/hono-demo/src/shared/shell/http.ts | 31 ++ .../src/shared/shell/messageRouter.ts | 29 ++ examples/the-expense/README.md | 16 - examples/the-expense/deno.json | 37 -- .../src/account/core/account.type.ts | 11 - .../src/account/core/commands/addAccount.ts | 31 -- .../account/core/commands/deleteAccount.ts | 26 -- .../src/account/core/events/accountAdded.ts | 14 - .../src/account/core/queries/getAccount.ts | 23 -- .../src/account/core/queries/listAccounts.ts | 27 -- .../src/account/shell/account.collection.ts | 29 -- .../src/account/shell/account.eventBus.ts | 10 - .../src/account/shell/account.repository.ts | 41 -- .../src/account/shell/account.router.ts | 39 -- .../shell/commands/addAccount.handler.ts | 53 --- .../shell/commands/deleteAccount.handler.ts | 25 -- .../shell/events/accountAdded.handler.ts | 23 -- .../shell/queries/getAccount.handler.ts | 21 - .../shell/queries/listAccounts.handler.ts | 47 --- .../src/auth/shell/auth.middleware.ts | 53 --- examples/the-expense/src/eventBus.ts | 22 -- examples/the-expense/src/main.ts | 61 --- examples/the-expense/src/mongodb.ts | 33 -- examples/the-expense/src/router.ts | 45 --- examples/the-expense/src/seedCollections.ts | 28 -- .../src/shared/withPagination.type.ts | 6 - examples/the-expense/start-with-otel.sh | 16 - packages/client/README.md | 10 - packages/client/deno.json | 34 -- packages/client/src/index.ts | 1 - packages/client/src/lib/sdk.ts | 6 - packages/core/deno.json | 3 +- packages/core/src/index.ts | 5 +- packages/core/src/lib/auth/authContext.ts | 7 - packages/core/src/lib/event/eventBus.ts | 3 +- .../src/lib/exception/concurrencyException.ts | 18 - packages/core/src/lib/exception/exception.ts | 12 +- .../src/lib/exception/forbiddenException.ts | 2 +- .../lib/exception/genericException.test.ts | 10 +- .../src/lib/exception/genericException.ts | 4 +- .../exception/invalidInputException.test.ts | 10 +- .../lib/exception/invalidInputException.ts | 7 +- .../lib/exception/notFoundException.test.ts | 8 +- .../src/lib/exception/notFoundException.ts | 2 +- .../lib/exception/unauthorizedException.ts | 2 +- packages/core/src/lib/message/command.ts | 141 ++++--- packages/core/src/lib/message/event.ts | 142 ++++--- packages/core/src/lib/message/query.ts | 119 +++--- packages/core/src/lib/message/router.ts | 173 ++++++++ .../core/src/lib/router/messageRouter.test.ts | 256 ------------ packages/core/src/lib/router/router.ts | 210 ---------- packages/core/src/lib/router/testFixtures.ts | 155 -------- packages/core/src/lib/validator/validator.ts | 228 ----------- packages/eventsourcing/README.md | 10 - packages/eventsourcing/deno.json | 43 -- packages/eventsourcing/src/index.ts | 2 - packages/eventsourcing/src/lib/aggregate.ts | 167 -------- packages/eventsourcing/src/lib/eventStore.ts | 258 ------------ packages/eventsourcingdb/README.md | 10 - packages/eventsourcingdb/deno.json | 43 -- packages/eventsourcingdb/src/index.ts | 1 - .../src/lib/eventSourcingDb.ts | 309 --------------- .../src/lib/eventSourcingDbSubscription.ts | 180 --------- packages/hono/src/index.ts | 5 +- .../src/lib/{ => middleware}/correlationId.ts | 4 +- .../hono/src/lib/{ => middleware}/logger.ts | 4 +- packages/hono/src/lib/onError.ts | 59 +++ 125 files changed, 1481 insertions(+), 5141 deletions(-) delete mode 100644 examples/gustav/concurrency-tester.sh delete mode 100644 examples/gustav/deno.json delete mode 100644 examples/gustav/src/contexts/iam/infrastructure/http/auth.middleware.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/commands/command.md delete mode 100644 examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/domain/ingredient.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/domain/model.md delete mode 100644 examples/gustav/src/contexts/recipe/core/domain/recipe.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/events/event.md delete mode 100644 examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/ports/port.md delete mode 100644 examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts delete mode 100644 examples/gustav/src/contexts/recipe/core/queries/query.md delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeAdded.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeDeleted.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeUpdated.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/listRecipes.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/queryRouter.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/router.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/schemas/deleteRecipeCommandSchema.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/schemas/listRecipesQuerySchema.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/http/schemas/updateRecipeCommandSchema.ts delete mode 100644 examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts delete mode 100644 examples/gustav/src/exampleApiSpec.json delete mode 100644 examples/gustav/src/main.ts delete mode 100644 examples/gustav/src/shared/core/domain/unit.ts delete mode 100644 examples/gustav/src/shared/infrastructure/eventStore.ts delete mode 100644 examples/gustav/src/shared/infrastructure/http/router.ts delete mode 100644 examples/gustav/src/shared/infrastructure/http/schemas.ts delete mode 100644 examples/gustav/src/shared/infrastructure/mongodb.ts create mode 100644 examples/hono-demo/src/iam/users/core/commands/addUser.command.ts create mode 100644 examples/hono-demo/src/iam/users/core/domain/user.ts create mode 100644 examples/hono-demo/src/iam/users/core/queries/getUser.query.ts create mode 100644 examples/hono-demo/src/iam/users/shell/http/router.ts create mode 100644 examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts create mode 100644 examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts create mode 100644 examples/hono-demo/src/shared/shell/http.ts create mode 100644 examples/hono-demo/src/shared/shell/messageRouter.ts delete mode 100644 examples/the-expense/README.md delete mode 100644 examples/the-expense/deno.json delete mode 100644 examples/the-expense/src/account/core/account.type.ts delete mode 100644 examples/the-expense/src/account/core/commands/addAccount.ts delete mode 100644 examples/the-expense/src/account/core/commands/deleteAccount.ts delete mode 100644 examples/the-expense/src/account/core/events/accountAdded.ts delete mode 100644 examples/the-expense/src/account/core/queries/getAccount.ts delete mode 100644 examples/the-expense/src/account/core/queries/listAccounts.ts delete mode 100644 examples/the-expense/src/account/shell/account.collection.ts delete mode 100644 examples/the-expense/src/account/shell/account.eventBus.ts delete mode 100644 examples/the-expense/src/account/shell/account.repository.ts delete mode 100644 examples/the-expense/src/account/shell/account.router.ts delete mode 100644 examples/the-expense/src/account/shell/commands/addAccount.handler.ts delete mode 100644 examples/the-expense/src/account/shell/commands/deleteAccount.handler.ts delete mode 100644 examples/the-expense/src/account/shell/events/accountAdded.handler.ts delete mode 100644 examples/the-expense/src/account/shell/queries/getAccount.handler.ts delete mode 100644 examples/the-expense/src/account/shell/queries/listAccounts.handler.ts delete mode 100644 examples/the-expense/src/auth/shell/auth.middleware.ts delete mode 100644 examples/the-expense/src/eventBus.ts delete mode 100644 examples/the-expense/src/main.ts delete mode 100644 examples/the-expense/src/mongodb.ts delete mode 100644 examples/the-expense/src/router.ts delete mode 100644 examples/the-expense/src/seedCollections.ts delete mode 100644 examples/the-expense/src/shared/withPagination.type.ts delete mode 100644 examples/the-expense/start-with-otel.sh delete mode 100644 packages/client/README.md delete mode 100644 packages/client/deno.json delete mode 100644 packages/client/src/index.ts delete mode 100644 packages/client/src/lib/sdk.ts delete mode 100644 packages/core/src/lib/auth/authContext.ts delete mode 100644 packages/core/src/lib/exception/concurrencyException.ts create mode 100644 packages/core/src/lib/message/router.ts delete mode 100644 packages/core/src/lib/router/messageRouter.test.ts delete mode 100644 packages/core/src/lib/router/router.ts delete mode 100644 packages/core/src/lib/router/testFixtures.ts delete mode 100644 packages/core/src/lib/validator/validator.ts delete mode 100644 packages/eventsourcing/README.md delete mode 100644 packages/eventsourcing/deno.json delete mode 100644 packages/eventsourcing/src/index.ts delete mode 100644 packages/eventsourcing/src/lib/aggregate.ts delete mode 100644 packages/eventsourcing/src/lib/eventStore.ts delete mode 100644 packages/eventsourcingdb/README.md delete mode 100644 packages/eventsourcingdb/deno.json delete mode 100644 packages/eventsourcingdb/src/index.ts delete mode 100644 packages/eventsourcingdb/src/lib/eventSourcingDb.ts delete mode 100644 packages/eventsourcingdb/src/lib/eventSourcingDbSubscription.ts rename packages/hono/src/lib/{ => middleware}/correlationId.ts (97%) rename packages/hono/src/lib/{ => middleware}/logger.ts (98%) create mode 100644 packages/hono/src/lib/onError.ts diff --git a/deno.json b/deno.json index 2a596c8..003be18 100644 --- a/deno.json +++ b/deno.json @@ -4,16 +4,11 @@ }, "workspace": [ "./packages/core", - "./packages/client", - "./packages/eventsourcing", - "./packages/eventsourcingdb", "./packages/mongodb", "./packages/oak", "./packages/hono", "./packages/utils", - "./examples/gustav", - "./examples/the-expense", "./examples/hono-demo" ], "nodeModulesDir": "none" -} +} \ No newline at end of file diff --git a/deno.lock b/deno.lock index ccd9c39..8331a80 100644 --- a/deno.lock +++ b/deno.lock @@ -1,102 +1,21 @@ { "version": "5", "specifiers": { - "jsr:@oak/commons@1": "1.0.0", - "jsr:@oak/oak@^17.1.4": "17.1.6", - "jsr:@oak/oak@^17.1.6": "17.1.6", - "jsr:@std/assert@1": "1.0.10", - "jsr:@std/assert@^1.0.10": "1.0.10", - "jsr:@std/bytes@1": "1.0.4", - "jsr:@std/bytes@^1.0.2": "1.0.4", - "jsr:@std/crypto@1": "1.0.3", "jsr:@std/dotenv@*": "0.225.3", - "jsr:@std/dotenv@~0.225.5": "0.225.6", "jsr:@std/dotenv@~0.225.6": "0.225.6", - "jsr:@std/encoding@1": "1.0.6", - "jsr:@std/encoding@^1.0.5": "1.0.6", "jsr:@std/fmt@^1.0.4": "1.0.5", "jsr:@std/fmt@^1.0.5": "1.0.5", - "jsr:@std/http@1": "1.0.12", - "jsr:@std/internal@^1.0.5": "1.0.5", - "jsr:@std/io@0.224": "0.224.9", - "jsr:@std/media-types@1": "1.1.0", - "jsr:@std/path@1": "1.0.8", "jsr:@std/text@^1.0.10": "1.0.10", "jsr:@std/ulid@1": "1.0.0", - "jsr:@tajpouria/cors@^1.2.1": "1.2.1", "npm:@opentelemetry/api@^1.9.0": "1.9.0", "npm:@types/node@*": "22.5.4", "npm:ajv@^8.17.1": "8.17.1", - "npm:effect@^3.17.9": "3.17.9", "npm:hono@^4.11.4": "4.11.4", "npm:mongodb@^6.12.0": "6.12.0", - "npm:path-to-regexp@6.2.1": "6.2.1", - "npm:path-to-regexp@^6.3.0": "6.3.0", - "npm:zod@^3.24.1": "3.24.1", + "npm:zod@^3.24.1": "3.25.76", "npm:zod@^4.3.5": "4.3.5" }, "jsr": { - "@oak/commons@1.0.0": { - "integrity": "49805b55603c3627a9d6235c0655aa2b6222d3036b3a13ff0380c16368f607ac", - "dependencies": [ - "jsr:@std/assert@1", - "jsr:@std/bytes@1", - "jsr:@std/crypto", - "jsr:@std/encoding@1", - "jsr:@std/http", - "jsr:@std/media-types" - ] - }, - "@oak/oak@17.1.3": { - "integrity": "d89296c22db91681dd3a2a1e1fd14e258d0d5a9654de55637aee5b661c159f33", - "dependencies": [ - "jsr:@oak/commons", - "jsr:@std/assert@1", - "jsr:@std/bytes@1", - "jsr:@std/crypto", - "jsr:@std/http", - "jsr:@std/io", - "jsr:@std/media-types", - "jsr:@std/path", - "npm:path-to-regexp@6.2.1" - ] - }, - "@oak/oak@17.1.4": { - "integrity": "60530b582bf276ff741e39cc664026781aa08dd5f2bc5134d756cc427bf2c13e", - "dependencies": [ - "jsr:@oak/commons", - "jsr:@std/assert@1", - "jsr:@std/bytes@1", - "jsr:@std/http", - "jsr:@std/media-types", - "jsr:@std/path", - "npm:path-to-regexp@^6.3.0" - ] - }, - "@oak/oak@17.1.6": { - "integrity": "c7eef2eec733fba8e72b679bba3b8cf2fceccf5ef489a8b8fb43571908c0335d", - "dependencies": [ - "jsr:@oak/commons", - "jsr:@std/assert@1", - "jsr:@std/bytes@1", - "jsr:@std/http", - "jsr:@std/media-types", - "jsr:@std/path", - "npm:path-to-regexp@^6.3.0" - ] - }, - "@std/assert@1.0.10": { - "integrity": "59b5cbac5bd55459a19045d95cc7c2ff787b4f8527c0dd195078ff6f9481fbb3", - "dependencies": [ - "jsr:@std/internal" - ] - }, - "@std/bytes@1.0.4": { - "integrity": "11a0debe522707c95c7b7ef89b478c13fb1583a7cfb9a85674cd2cc2e3a28abc" - }, - "@std/crypto@1.0.3": { - "integrity": "a2a32f51ddef632d299e3879cd027c630dcd4d1d9a5285d6e6788072f4e51e7f" - }, "@std/dotenv@0.225.3": { "integrity": "a95e5b812c27b0854c52acbae215856d9cce9d4bbf774d938c51d212711e8d4a" }, @@ -106,44 +25,17 @@ "@std/dotenv@0.225.6": { "integrity": "1d6f9db72f565bd26790fa034c26e45ecb260b5245417be76c2279e5734c421b" }, - "@std/encoding@1.0.6": { - "integrity": "ca87122c196e8831737d9547acf001766618e78cd8c33920776c7f5885546069" - }, "@std/fmt@1.0.4": { "integrity": "e14fe5bedee26f80877e6705a97a79c7eed599e81bb1669127ef9e8bc1e29a74" }, "@std/fmt@1.0.5": { "integrity": "0cfab43364bc36650d83c425cd6d99910fc20c4576631149f0f987eddede1a4d" }, - "@std/http@1.0.12": { - "integrity": "85246d8bfe9c8e2538518725b158bdc31f616e0869255f4a8d9e3de919cab2aa", - "dependencies": [ - "jsr:@std/encoding@^1.0.5" - ] - }, - "@std/internal@1.0.5": { - "integrity": "54a546004f769c1ac9e025abd15a76b6671ddc9687e2313b67376125650dc7ba" - }, - "@std/io@0.224.9": { - "integrity": "4414664b6926f665102e73c969cfda06d2c4c59bd5d0c603fd4f1b1c840d6ee3", - "dependencies": [ - "jsr:@std/bytes@^1.0.2" - ] - }, - "@std/media-types@1.1.0": { - "integrity": "c9d093f0c05c3512932b330e3cc1fe1d627b301db33a4c2c2185c02471d6eaa4" - }, - "@std/path@1.0.8": { - "integrity": "548fa456bb6a04d3c1a1e7477986b6cffbce95102d0bb447c67c4ee70e0364be" - }, "@std/text@1.0.10": { "integrity": "9dcab377450253c0efa9a9a0c731040bfd4e1c03f8303b5934381467b7954338" }, "@std/ulid@1.0.0": { "integrity": "d41c3d27a907714413649fee864b7cde8d42ee68437d22b79d5de4f81d808780" - }, - "@tajpouria/cors@1.2.1": { - "integrity": "eca42e4fb7cb3906ef0ee3d1e565dd6bb4632ccd8e70a95cf4279759743328f0" } }, "npm": { @@ -156,9 +48,6 @@ "@opentelemetry/api@1.9.0": { "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==" }, - "@standard-schema/spec@1.0.0": { - "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==" - }, "@types/node@22.5.4": { "integrity": "sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==", "dependencies": [ @@ -187,19 +76,6 @@ "integrity": "sha512-P92xmHDQjSKPLHqFxefqMxASNq/aWJMEZugpCjf+AF/pgcUpMMQCg7t7+ewko0/u8AapvF3luf/FoehddEK+sA==", "deprecated": true }, - "effect@3.17.9": { - "integrity": "sha512-Nkkn9n1zhy30Dq0MpQatDCH7nfYnOIiebkOHNxmmvoVnEDKCto+2ZwDDWFGzcN/ojwfqjRXWGC9Lo91K5kwZCg==", - "dependencies": [ - "@standard-schema/spec", - "fast-check" - ] - }, - "fast-check@3.23.2": { - "integrity": "sha512-h5+1OzzfCC3Ef7VbtKdcv7zsstUQwUDlYpUTvjeUsJAssPgLn7QzbboPtL5ro04Mq0rPOsMzl7q5hIbRs2wD1A==", - "dependencies": [ - "pure-rand" - ] - }, "fast-deep-equal@3.1.3": { "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" }, @@ -230,18 +106,9 @@ "mongodb-connection-string-url" ] }, - "path-to-regexp@6.2.1": { - "integrity": "sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw==" - }, - "path-to-regexp@6.3.0": { - "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==" - }, "punycode@2.3.1": { "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==" }, - "pure-rand@6.1.0": { - "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==" - }, "require-from-string@2.0.2": { "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" }, @@ -270,8 +137,8 @@ "webidl-conversions" ] }, - "zod@3.24.1": { - "integrity": "sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A==" + "zod@3.25.76": { + "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==" }, "zod@4.3.5": { "integrity": "sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==" @@ -318,31 +185,12 @@ }, "workspace": { "members": { - "examples/gustav": { - "dependencies": [ - "jsr:@oak/oak@^17.1.6", - "jsr:@std/dotenv@~0.225.5", - "jsr:@std/ulid@1", - "jsr:@tajpouria/cors@^1.2.1", - "npm:ajv@^8.17.1", - "npm:effect@^3.17.9", - "npm:mongodb@^6.12.0", - "npm:zod@^3.24.1" - ] - }, "examples/hono-demo": { "dependencies": [ "jsr:@std/dotenv@~0.225.6", - "npm:hono@^4.11.4" - ] - }, - "examples/the-expense": { - "dependencies": [ - "jsr:@oak/oak@^17.1.4", "jsr:@std/ulid@1", - "jsr:@tajpouria/cors@^1.2.1", - "npm:mongodb@^6.12.0", - "npm:zod@^3.24.1" + "npm:hono@^4.11.4", + "npm:zod@^4.3.5" ] }, "packages/core": { @@ -355,6 +203,7 @@ }, "packages/hono": { "dependencies": [ + "jsr:@std/ulid@1", "npm:@opentelemetry/api@^1.9.0", "npm:hono@^4.11.4" ] diff --git a/docs/guide/core/commands.md b/docs/guide/core/commands.md index 24b739b..93b56b4 100644 --- a/docs/guide/core/commands.md +++ b/docs/guide/core/commands.md @@ -1,127 +1,228 @@ # Commands -Commands are the messages that tell your application to do something. -Like "Hey, create a new account with the following data". +Commands represent write operations - intentions to change system state in the application. + +Commands follow the Command pattern from CQRS (Command Query Responsibility Segregation), where writes and reads are separated for better scalability and maintainability. ::: info Example Application -You can find the full example on GitHub [The Expense Repo](https://github.com/overlap-dev/Nimbus/tree/main/examples/the-expense) +The examples on this page reference the Gustav application. -Check it out and run it with `deno task dev` +You can find the full example on GitHub: [Gustav Recipe App](https://github.com/overlap-dev/Nimbus/tree/main/examples/gustav) ::: -## Example - -At first we define the command and the core functionality in a file called `addAccount.ts` in the `core/commands` folder. If you like you can also split the command definition and the function into separate files. Or add more functions to handle the core business logic involved when adding an account. - -Next we add a command handler in a fille called `addAccount.handler.ts` in the `shell/commands` folder. This is the first function that is executed when the app receives this specific command. - -The command handler contains all the glue needed to communicate with other parts of the application and to handle all the side-effects. In this example we first call the core function to get a new account. Then we write the account to the database, we publish an event that the account was added and finally we return the account to the caller. - -::: code-group - -```typescript [Core] -import { - AuthContext, - Command, - CommandMetadata, - InvalidInputException, -} from "@nimbus/core"; -import { ObjectId } from "mongodb"; -import { z } from "zod"; -import { Account } from "../account.type.ts"; - -// Define the data for the command -export const AddAccountData = z.object({ - name: z.string(), -}); -export type AddAccountData = z.infer; - -// Define the Command with it's unique name, data and metadata -export const AddAccountCommand = Command( - z.literal("ADD_ACCOUNT"), - AddAccountData, - CommandMetadata(AuthContext) // You can define you own meta data type if needed -); -export type AddAccountCommand = z.infer; - -// The core logic -// We take the command data and the authContext and return the new account. -// -// Apply any important business logic here if needed. -// For example to set the balance of the account to 0 -// or in case of a promotion add a starting balance. -export const addAccount = ( - data: AddAccountData, - authContext?: AuthContext -): Account => { - if (!authContext) { - throw new InvalidInputException(); - } +## Key Characteristics - return { - _id: new ObjectId().toString(), - name: data.name, - status: "active", - }; +- **Write Operations**: Commands modify application state +- **Intent-Based**: Commands express what should happen (e.g., "AddRecipe", "DeleteRecipe") +- **Validated**: Command data is validated before execution +- **Type-Safe**: Full TypeScript type safety for command data and handlers + +## Command Structure + +A command in Nimbus follows the CloudEvents specification and consists of: + +```typescript +export type Command = { + specversion: '1.0'; + id: string; + correlationid: string; + time: string; + source: string; + type: string; + data: T; + datacontenttype: string; +}; +``` + +## Example: Add Recipe Command + +### Define the Command Type + +Create a command type definition in the core layer: + +```typescript +// core/commands/addRecipe.ts +import { Command } from '@nimbus/core'; +import { Recipe } from '../domain/recipe.ts'; + +export const AddRecipeCommandType = 'at.overlap.nimbus.app-recipe' as const; + +export type AddRecipeCommand = Command & { + type: typeof AddRecipeCommandType; }; ``` -```typescript [Shell] -import { InvalidInputException, type RouteHandler } from "@nimbus/core"; -import { eventBus } from "../../../eventBus.ts"; -import { Account } from "../../core/account.type.ts"; -import { - addAccount, - AddAccountCommand, -} from "../../core/commands/addAccount.ts"; -import { AccountAddedEvent } from "../../core/events/accountAdded.ts"; -import { accountRepository } from "../account.repository.ts"; - -export const addAccountHandler: RouteHandler = async ( - command: AddAccountCommand -) => { - // Call the Core with validated and type-safe inputs. - // The Nimbus router takes care these are type checked and validated. - // Learn more about the router on the next sections of the guide. - let account = addAccount(command.data, command.metadata.authContext); - - // Write the new account to the database - try { - account = await accountRepository.insertOne({ item: account }); - } catch (error: any) { - if (error.message.startsWith("E11000")) { - throw new InvalidInputException("Account already exists", { - errorCode: "ACCOUNT_ALREADY_EXISTS", - reason: "An account with the same name already exists", - }); - } - - throw error; +### Implement Pure Core Logic + +The core function contains pure business logic with no I/O operations: + +```typescript +// core/commands/addRecipe.ts +export const addRecipe = ( + command: AddRecipeCommand, + state: RecipeState, +): { + newState: Recipe; + events: RecipeAddedEvent[]; +} => { + // Business validation + if (state !== null) { + throw new InvalidInputException('Recipe already exists', { + errorCode: 'DUPLICATE_RECIPE', + }); } - // We want to publish an event that the account was added - // See more about events in the next section of the guide - eventBus.putEvent({ - name: "ACCOUNT_ADDED", - data: { - account: account, - }, - metadata: { - correlationId: command.metadata.correlationId, - authContext: command.metadata.authContext, - }, - }); - - // Return the successful result + const subject = recipeSubject(command.data.slug); + const event = createRecipeAddedEvent(command, subject); + return { - statusCode: 200, - data: account, + newState: command.data, + events: [event], }; }; ``` -::: +### Implement Shell Handler + +The handler orchestrates I/O operations and calls the pure core logic: + +```typescript +// infrastructure/http/handler/addRecipe.handler.ts +import { MessageHandler } from '@nimbus/core'; +import { loadAggregate } from '@nimbus/eventsourcing'; +import { eventStore } from '../eventStore.ts'; +import { addRecipe, AddRecipeCommand } from '../../../core/commands/addRecipe.ts'; +import { recipeReducer, recipeSubject } from '../../../core/domain/recipeAggregate.ts'; + +export const addRecipeHandler: MessageHandler = + async (command) => { + const subject = recipeSubject(command.data.slug); + + // Load current state from event store + const snapshot = await loadAggregate( + eventStore, + subject, + null, + recipeReducer, + ); + + // Call pure core logic + const { newState, events } = addRecipe(command, snapshot.state); + + // Persist events with optimistic concurrency control + await eventStore.writeEvents(events, { + preconditions: snapshot.lastEventId !== undefined + ? [{ type: 'isSubjectOnEventId', payload: { subject, eventId: snapshot.lastEventId } }] + : [{ type: 'isSubjectPristine', payload: { subject } }], + }); + + return newState; + }; +``` + +## Architecture Pattern + +Commands follow the Pure Core - Imperative Shell pattern: + +1. **Core Layer** (Pure): + - Define command types + - Implement business logic + - No I/O operations + - Returns new state and events to persist + +2. **Infrastructure Layer** (Shell): + - Define message handlers + - Load current state from data sources + - Call pure core functions + - Persist results + - Handle errors and responses + +## Best Practices + +### Keep Core Pure + +The core command logic should be completely free of side effects: + +```typescript +// ✅ Good - Pure function +export const addRecipe = (command: AddRecipeCommand, state: RecipeState) => { + if (state !== null) throw new InvalidInputException('Recipe already exists'); + return { newState: command.data, events: [createEvent(command)] }; +}; + +// ❌ Bad - Has side effects +export const addRecipe = async (command: AddRecipeCommand) => { + const existing = await db.findRecipe(command.data.slug); // I/O in core! + if (existing) throw new InvalidInputException('Recipe already exists'); + await db.saveRecipe(command.data); // I/O in core! +}; +``` + +### Use Ports for Dependencies + +When the core needs external data, define ports (interfaces): + +```typescript +// core/ports/recipeRepository.ts +export interface RecipeRepository { + getBySlug(slug: string): Promise; + save(recipe: Recipe): Promise; +} +``` + +The shell provides the implementation (adapter): + +```typescript +// infrastructure/repository/recipeMemoryRepository.ts +export const recipeMemoryRepository: RecipeRepository = { + async getBySlug(slug: string) { /* implementation */ }, + async save(recipe: Recipe) { /* implementation */ }, +}; +``` + +### Validate Early + +Validate command data before reaching core logic using JSON schemas: + +```typescript +// infrastructure/http/schemas/addRecipeCommandSchema.ts +export const addRecipeCommandSchema = { + $id: 'https://nimbus.overlap.at/schemas/commands/add-recipe/v1', + type: 'object', + properties: { + slug: { type: 'string', minLength: 1, maxLength: 100 }, + title: { type: 'string', minLength: 1, maxLength: 200 }, + ingredients: { type: 'array', items: { $ref: '#/definitions/ingredient' } }, + }, + required: ['slug', 'title', 'ingredients'], +}; +``` + +### Emit Events + +Commands should emit domain events for other parts of the system to react to: + +```typescript +const event: RecipeAddedEvent = { + specversion: '1.0', + id: ulid(), + correlationid: command.correlationid, + time: new Date().toISOString(), + source: EVENT_SOURCE, + type: 'at.overlap.nimbus.recipe-added', + subject: `/recipes/${command.data.slug}`, + data: command.data, + datacontenttype: 'application/json', +}; +``` + +## Routing Commands + +Commands are routed to handlers using the message router. See the [HTTP Guide](/guide/http/) for more details on routing commands through HTTP endpoints. -## Receive and Route Commands +## Related Patterns -Learn more about how to receive and route commands in the [Router](/guide/core/router.md) guide. +- [Queries](/guide/core/queries) - Read operations +- [Events](/guide/core/events) - Domain events +- [Event Sourcing](/guide/eventsourcing/) - Persisting state as events +- [CQRS](/guide/what-is-nimbus#cqrs-event-sourcing) - Separating reads and writes diff --git a/docs/guide/core/events.md b/docs/guide/core/events.md index 212c166..1e359e3 100644 --- a/docs/guide/core/events.md +++ b/docs/guide/core/events.md @@ -1,74 +1,351 @@ # Events -Events are the messages that tell your application something has happened. -Like "Hey, the account with the ID 1234 has been updated". +Events represent facts - things that have already happened in the system. + +Events are immutable records of state changes that occurred in the application. They enable event-driven architectures, event sourcing, and asynchronous processing. ::: info Example Application -You can find the full example on GitHub [The Expense Repo](https://github.com/overlap-dev/Nimbus/tree/main/examples/the-expense) +The examples on this page reference the Gustav application. -Check it out and run it with `deno task dev` +You can find the full example on GitHub: [Gustav Recipe App](https://github.com/overlap-dev/Nimbus/tree/main/examples/gustav) ::: -## Example +## Key Characteristics + +- **Immutable Facts**: Events represent things that already happened and cannot be changed +- **Past Tense**: Event names use past tense (e.g., "RecipeAdded", not "AddRecipe") +- **Observable**: Other parts of the system can subscribe and react to events +- **Type-Safe**: Full TypeScript type safety for event data and handlers -At first we define the event in a file called `accountAdded.ts` in the `core/events` folder. +## Event Structure -Next we create an event handler function in a fille called `accountAdded.handler.ts` in the `shell/events` folder. This is the first function that is executed when the app receives this specific event. +An event in Nimbus follows the CloudEvents specification and consists of: -The event handler contains all the glue needed to communicate with other parts of the application and to handle all the side-effects. In this example we simply wait a second and log an info to the console. +```typescript +export type Event = { + specversion: '1.0'; + id: string; + correlationid: string; + time: string; + source: string; + type: string; + subject: string; + data: T; + datacontenttype: string; +}; +``` -::: code-group +## Example: Recipe Added Event -```typescript [Core] -import { AuthContext, Event, EventMetadata } from "@nimbus/core"; -import { z } from "zod"; -import { Account } from "../account.type.ts"; +### Define the Event Type -// Define the data for the event -export const AccountAddedData = z.object({ - account: Account, -}); -export type AccountAddedData = z.infer; +Create an event type definition in the core layer: -// Define the Event with it's unique name, data and metadata -export const AccountAddedEvent = Event( - z.literal("ACCOUNT_ADDED"), - AccountAddedData, - EventMetadata(AuthContext) // You can define you own meta data type if needed -); -export type AccountAddedEvent = z.infer; +```typescript +// core/events/recipeAdded.ts +import { Event } from '@nimbus/core'; +import { Recipe } from '../domain/recipe.ts'; + +export const RecipeAddedEventType = 'at.overlap.nimbus.recipe-added' as const; + +export type RecipeAddedEvent = Event & { + type: typeof RecipeAddedEventType; +}; ``` -```typescript [Shell] -import { getLogger, RouteHandler } from "@nimbus/core"; -import { - AccountAddedData, - AccountAddedEvent, -} from "../../core/events/accountAdded.ts"; +### Create Events in Command Handlers -export const accountAddedHandler: RouteHandler< - AccountAddedEvent, - AccountAddedData -> = async (event) => { - await new Promise((resolve) => setTimeout(resolve, 1000)); +Events are typically created and emitted by command handlers: - getLogger().info({ - message: `New account was added: ${event.data.account.name}`, - }); +```typescript +// core/commands/addRecipe.ts +import { ulid } from '@std/ulid'; +import { getEnv } from '@nimbus/utils'; + +export const addRecipe = ( + command: AddRecipeCommand, + state: RecipeState, +): { + newState: Recipe; + events: RecipeAddedEvent[]; +} => { + if (state !== null) { + throw new InvalidInputException('Recipe already exists'); + } - // This is just an example. - // Change the code to do what has to be done after an account got added. - // For example send a mail to the owner. + const { EVENT_SOURCE } = getEnv({ variables: ['EVENT_SOURCE'] }); + const subject = recipeSubject(command.data.slug); + + // Create the event + const recipeAddedEvent: RecipeAddedEvent = { + specversion: '1.0', + id: ulid(), + correlationid: command.correlationid, + time: new Date().toISOString(), + source: EVENT_SOURCE, + type: RecipeAddedEventType, + subject, + data: command.data, + datacontenttype: 'application/json', + }; return { - statusCode: 200, - data: event.data, + newState: command.data, + events: [recipeAddedEvent], }; }; ``` -::: +### Subscribe to Events + +Event handlers react to events by updating read models, sending notifications, or triggering other processes: + +```typescript +// infrastructure/eventHandler/recipeAdded.handler.ts +import { getLogger } from '@nimbus/core'; +import { RecipeAddedEvent } from '../../core/events/recipeAdded.ts'; + +export const recipeAdded = (event: RecipeAddedEvent) => { + getLogger().info({ + message: 'Recipe added', + slug: event.data.slug, + title: event.data.title, + }); + + // Additional side effects: + // - Update read model + // - Send notification + // - Trigger related processes +}; +``` + +### Observe Events from Event Store + +Use the event store observer to subscribe to events: + +```typescript +// shared/infrastructure/eventStore.ts +import { EventStore } from '@nimbus/eventsourcingdb'; +import { recipeAdded } from './eventHandler/recipeAdded.handler.ts'; + +export const eventStore = new EventStore(/* config */); + +// Subscribe to recipe events +eventStore.observe({ + subjects: ['/recipes/*'], + handler: async (event) => { + switch (event.type) { + case 'at.overlap.nimbus.recipe-added': + recipeAdded(event as RecipeAddedEvent); + break; + case 'at.overlap.nimbus.recipe-updated': + recipeUpdated(event as RecipeUpdatedEvent); + break; + case 'at.overlap.nimbus.recipe-deleted': + recipeDeleted(event as RecipeDeletedEvent); + break; + } + }, +}); +``` + +## Event Sourcing + +Events can be used as the source of truth for application state through event sourcing: + +### Event Reducer + +An event reducer reconstructs aggregate state by replaying events: + +```typescript +// core/domain/recipeAggregate.ts +import { Event } from '@nimbus/core'; +import { EventReducer } from '@nimbus/eventsourcing'; +import { Recipe } from './recipe.ts'; + +export type RecipeState = Recipe | null; + +export const recipeReducer: EventReducer = ( + state: RecipeState, + event: Event, +): RecipeState => { + switch (event.type) { + case 'at.overlap.nimbus.recipe-added': + return event.data as Recipe; + case 'at.overlap.nimbus.recipe-updated': + return { ...state, ...(event.data as Partial) }; + case 'at.overlap.nimbus.recipe-deleted': + return null; + default: + return state; + } +}; +``` + +### Load Aggregate from Events + +Load current state by replaying all events for a subject: + +```typescript +import { loadAggregate } from '@nimbus/eventsourcing'; + +const subject = recipeSubject(slug); +const snapshot = await loadAggregate( + eventStore, + subject, + null, + recipeReducer, +); + +// snapshot.state contains the current state +// snapshot.lastEventId can be used for optimistic concurrency +``` + +## Event Subjects + +Events use subjects to organize and filter events hierarchically: + +```typescript +// Subject patterns +'/recipes/carbonara' // Specific recipe +'/recipes/*' // All recipes +'/users/123/preferences' // User preferences +'/orders/456/items/*' // All items in an order +``` + +Observers can subscribe to subject patterns: + +```typescript +// Subscribe to all recipes +eventStore.observe({ + subjects: ['/recipes/*'], + handler: async (event) => { /* handle event */ }, +}); + +// Subscribe to multiple patterns +eventStore.observe({ + subjects: ['/recipes/*', '/ingredients/*'], + handler: async (event) => { /* handle event */ }, +}); +``` + +## Best Practices + +### Use Past Tense Names + +Event names should describe what happened, not what should happen: + +```typescript +// ✅ Good - Past tense +RecipeAddedEvent +UserRegisteredEvent +OrderShippedEvent + +// ❌ Bad - Imperative +AddRecipeEvent +RegisterUserEvent +ShipOrderEvent +``` + +### Keep Events Immutable + +Events should never be modified after creation: + +```typescript +// ✅ Good - Create new event +const updatedEvent = { ...originalEvent, data: newData }; + +// ❌ Bad - Mutate existing event +originalEvent.data = newData; +``` + +### Include Correlation IDs + +Maintain correlation IDs across commands and events for tracing: + +```typescript +const event: RecipeAddedEvent = { + // ... + correlationid: command.correlationid, // Inherit from command + // ... +}; +``` + +### Use Meaningful Subjects + +Subjects should be hierarchical and meaningful: + +```typescript +// ✅ Good - Hierarchical and clear +`/recipes/${slug}` +`/users/${userId}/orders/${orderId}` + +// ❌ Bad - Flat and unclear +`recipe-${slug}` +`order_${orderId}` +``` + +### Version Event Schemas + +Include version information in event types for schema evolution: + +```typescript +export const RecipeAddedEventType = 'at.overlap.nimbus.recipe-added.v1' as const; + +// Later, when schema changes +export const RecipeAddedEventTypeV2 = 'at.overlap.nimbus.recipe-added.v2' as const; +``` + +### Handle Event Ordering + +Be aware that events may arrive out of order in distributed systems. Use event IDs and timestamps when ordering matters. + +## Read Model Updates + +Events are commonly used to update read models in CQRS systems: + +```typescript +// Update read model based on events +eventStore.observe({ + subjects: ['/recipes/*'], + handler: async (event) => { + switch (event.type) { + case 'at.overlap.nimbus.recipe-added': + await recipeReadModel.insertOne(event.data); + break; + case 'at.overlap.nimbus.recipe-updated': + await recipeReadModel.updateOne( + { slug: event.data.slug }, + { $set: event.data }, + ); + break; + case 'at.overlap.nimbus.recipe-deleted': + await recipeReadModel.deleteOne({ slug: event.data.slug }); + break; + } + }, +}); +``` + +## Event Replay + +Event sourcing enables replaying events to rebuild state: + +```typescript +// Replay all events for a subject +const events = await eventStore.readEvents(subject); +const currentState = events.reduce(recipeReducer, null); + +// Replay events up to a specific point in time +const events = await eventStore.readEvents(subject, { + untilTime: '2024-01-01T00:00:00Z', +}); +const pastState = events.reduce(recipeReducer, null); +``` -## Publish and Subscribe to Events +## Related Patterns -Learn more about how to publish and subscribe to events in the [Event Bus](/guide/core/event-bus.md) guide. +- [Commands](/guide/core/commands) - Write operations that emit events +- [Queries](/guide/core/queries) - Read operations +- [Event Sourcing](/guide/eventsourcing/) - Using events as source of truth +- [CQRS](/guide/what-is-nimbus#cqrs-event-sourcing) - Separating reads and writes diff --git a/docs/guide/core/queries.md b/docs/guide/core/queries.md index ddfabd2..57f3d67 100644 --- a/docs/guide/core/queries.md +++ b/docs/guide/core/queries.md @@ -1,87 +1,304 @@ # Queries -Queries are the messages that tell your application to give you some information. -Like "Hey give me the account with the ID 1234". +Queries represent read operations - requests for information without changing application state. + +Queries follow the Query pattern from CQRS (Command Query Responsibility Segregation), where reads are separated from writes to allow independent optimization and scaling. ::: info Example Application -You can find the full example on GitHub [The Expense Repo](https://github.com/overlap-dev/Nimbus/tree/main/examples/the-expense) +The examples on this page reference the Gustav application. -Check it out and run it with `deno task dev` +You can find the full example on GitHub: [Gustav Recipe App](https://github.com/overlap-dev/Nimbus/tree/main/examples/gustav) ::: -## Example - -At first we define the query and the core functionality in a file called `getAccount.ts` in the `core/queries` folder. If you like you can also split the query definition and the function into separate files. Or add more functions to handle the core business logic involved when getting an account. - -Next we add a query handler in a fille called `getAccount.handler.ts` in the `shell/queries` folder. This is the first function that is executed when the app receives this specific query. - -The query handler contains all the glue needed to communicate with other parts of the application and to handle all the side-effects. In this example we first read the account from the database and then we call the core function to apply the business logic. Finally we return the account to the caller. - -::: code-group - -```typescript [Core] -import { - AuthContext, - InvalidInputException, - Query, - QueryMetadata, -} from "@nimbus/core"; -import { z } from "zod"; -import { Account } from "../account.type.ts"; - -// Define the Query with it's unique name, parameters and metadata -export const GetAccountQuery = Query( - z.literal("GET_ACCOUNT"), - z.object({ - id: z.string().length(24), - }), - QueryMetadata(AuthContext) // You can define you own meta data type if needed -); -export type GetAccountQuery = z.infer; - -export const getAccount = ( - data: Account, - authContext?: AuthContext -): Account => { - if (!authContext) { - throw new InvalidInputException(); - } +## Key Characteristics + +- **Read Operations**: Queries fetch data without modifying state +- **Idempotent**: Multiple executions return the same result (if data hasn't changed) +- **Optimized for Reading**: Can use specialized read models or databases +- **Type-Safe**: Full TypeScript type safety for query parameters and results + +## Query Structure + +A query in Nimbus follows the CloudEvents specification and consists of: + +```typescript +export type Query = { + specversion: '1.0'; + id: string; + correlationid: string; + time: string; + source: string; + type: string; + data: T; + datacontenttype: string; +}; +``` + +## Example: Get Recipe Query + +### Define the Query Type + +Create a query type definition in the core layer: - // Apply more business logic if necessary. - // For example remove sensitive properties based on permission levels. +```typescript +// core/queries/getRecipe.ts +import { Query } from '@nimbus/core'; +import { Recipe } from '../domain/recipe.ts'; - return data; +export const GetRecipeQueryType = 'at.overlap.nimbus.get-recipe' as const; + +export type GetRecipeParams = { + slug: string; +}; + +export type GetRecipeQuery = Query & { + type: typeof GetRecipeQueryType; }; ``` -```typescript [Shell] -import { RouteHandler } from "@nimbus/core"; -import { ObjectId } from "mongodb"; -import { Account } from "../../core/account.type.ts"; -import { getAccount, GetAccountQuery } from "../../core/queries/getAccount.ts"; -import { accountRepository } from "../account.repository.ts"; - -export const getAccountHandler: RouteHandler = async ( - query -) => { - // Read the account from the database - let account = await accountRepository.findOne({ - filter: { _id: new ObjectId(query.params.id) }, - }); - - // Call the core function - account = getAccount(account, query.metadata.authContext); - - // Return the successful result - return { - statusCode: 200, - data: account, +### Implement Core Logic with Port + +The core defines the query logic and uses a port (interface) for data access: + +```typescript +// core/queries/getRecipe.ts +import { RecipeRepository } from '../ports/recipeRepository.ts'; + +export const getRecipe = async ( + query: GetRecipeQuery, + repository: RecipeRepository, +): Promise => { + return await repository.getBySlug(query.data.slug); +}; +``` + +### Define the Port + +The port is an interface that defines the contract for data access: + +```typescript +// core/ports/recipeRepository.ts +export interface RecipeRepository { + getBySlug(slug: string): Promise; + list(): Promise; +} +``` + +### Implement Shell Handler + +The handler provides the repository implementation and calls the core: + +```typescript +// infrastructure/http/handler/getRecipe.handler.ts +import { MessageHandler } from '@nimbus/core'; +import { Recipe } from '../../../core/domain/recipe.ts'; +import { getRecipe, GetRecipeQuery } from '../../../core/queries/getRecipe.ts'; +import { recipeMemoryRepository } from '../../repository/recipeMemoryRepository.ts'; + +export const getRecipeHandler: MessageHandler = + async (query) => { + const recipe = await getRecipe(query, recipeMemoryRepository); + return recipe; }; +``` + +### Implement the Adapter + +The adapter provides the actual implementation of the repository port: + +```typescript +// infrastructure/repository/recipeMemoryRepository.ts +import { NotFoundException } from '@nimbus/core'; +import { Recipe } from '../../core/domain/recipe.ts'; +import { RecipeRepository } from '../../core/ports/recipeRepository.ts'; + +const recipes = new Map(); + +export const recipeMemoryRepository: RecipeRepository = { + async getBySlug(slug: string): Promise { + const recipe = recipes.get(slug); + if (!recipe) { + throw new NotFoundException('Recipe not found', { + errorCode: 'RECIPE_NOT_FOUND', + }); + } + return recipe; + }, + + async list(): Promise { + return Array.from(recipes.values()); + }, }; ``` -::: +## Architecture Pattern + +Queries follow the Pure Core - Imperative Shell pattern with ports and adapters: + +1. **Core Layer**: + - Define query types + - Define ports (interfaces) for data access + - Implement query logic that uses ports + - Apply business rules for data filtering/transformation + +2. **Infrastructure Layer** (Shell): + - Implement adapters that fulfill port contracts + - Define message handlers + - Connect handlers to adapters + - Handle errors and responses + +## Read Models + +In CQRS systems, queries often read from optimized read models rather than the event store: + +```typescript +// infrastructure/readModel/recipeReadModel.ts +import { MongoCollection } from '@nimbus/mongodb'; +import { Recipe } from '../../core/domain/recipe.ts'; + +export const recipeReadModel = new MongoCollection('recipes'); + +// Update read model when events occur +export const updateRecipeReadModel = async (event: RecipeAddedEvent) => { + await recipeReadModel.insertOne(event.data); +}; +``` + +The read model is kept in sync by subscribing to domain events: + +```typescript +// Event handler updates the read model +eventStore.observe({ + subjects: ['/recipes/*'], + handler: async (event) => { + switch (event.type) { + case 'at.overlap.nimbus.recipe-added': + await recipeReadModel.insertOne(event.data); + break; + case 'at.overlap.nimbus.recipe-updated': + await recipeReadModel.updateOne( + { slug: event.data.slug }, + event.data, + ); + break; + case 'at.overlap.nimbus.recipe-deleted': + await recipeReadModel.deleteOne({ slug: event.data.slug }); + break; + } + }, +}); +``` + +## Best Practices + +### Keep Queries Simple + +Queries should focus on data retrieval with minimal business logic: + +```typescript +// ✅ Good - Simple data retrieval +export const getRecipe = async ( + query: GetRecipeQuery, + repository: RecipeRepository, +): Promise => { + return await repository.getBySlug(query.data.slug); +}; + +// ⚠️ Consider - Business logic might belong in query +export const getRecipe = async ( + query: GetRecipeQuery, + repository: RecipeRepository, + authContext: AuthContext, +): Promise => { + const recipe = await repository.getBySlug(query.data.slug); + + // Filter sensitive data based on permissions + if (!authContext.hasRole('admin')) { + delete recipe.internalNotes; + } + + return recipe; +}; +``` + +### Use Pagination for Lists + +Always paginate list queries to prevent performance issues: + +```typescript +export type ListRecipesParams = { + page?: number; + pageSize?: number; + category?: string; +}; + +export type ListRecipesQuery = Query & { + type: typeof ListRecipesQueryType; +}; + +export const listRecipes = async ( + query: ListRecipesQuery, + repository: RecipeRepository, +): Promise<{ recipes: Recipe[]; total: number }> => { + const { page = 1, pageSize = 20, category } = query.data; + return await repository.list({ page, pageSize, category }); +}; +``` + +### Optimize Read Models + +Read models should be denormalized and optimized for specific query patterns: + +```typescript +// Denormalized read model for recipe list view +export type RecipeListItem = { + slug: string; + title: string; + category: string; + cookingTime: number; + difficulty: string; + thumbnailUrl: string; + // No full ingredient list or instructions +}; + +// Separate detailed read model for single recipe view +export type RecipeDetail = Recipe & { + relatedRecipes: string[]; + authorInfo: AuthorInfo; +}; +``` + +### Cache Frequently Accessed Data + +Consider caching for queries that are called frequently: + +```typescript +const recipeCache = new Map(); +const CACHE_TTL = 5 * 60 * 1000; // 5 minutes + +export const getRecipe = async ( + query: GetRecipeQuery, + repository: RecipeRepository, +): Promise => { + const cached = recipeCache.get(query.data.slug); + if (cached) return cached; + + const recipe = await repository.getBySlug(query.data.slug); + recipeCache.set(query.data.slug, recipe); + + setTimeout(() => recipeCache.delete(query.data.slug), CACHE_TTL); + + return recipe; +}; +``` + +## Routing Queries + +Queries are routed to handlers using the message router. See the [HTTP Guide](/guide/http/) for more details on routing queries through HTTP endpoints. -## Receive and Route Queries +## Related Patterns -Learn more about how to receive and route queries in the [Query Bus](/guide/core/event-bus.md) guide. +- [Commands](/guide/core/commands) - Write operations +- [Events](/guide/core/events) - Domain events +- [Event Sourcing](/guide/eventsourcing/) - Event-based state management +- [CQRS](/guide/what-is-nimbus#cqrs-event-sourcing) - Separating reads and writes diff --git a/docs/guide/what-is-nimbus.md b/docs/guide/what-is-nimbus.md index 7d691e6..8368674 100644 --- a/docs/guide/what-is-nimbus.md +++ b/docs/guide/what-is-nimbus.md @@ -127,38 +127,25 @@ For complex scenarios requiring multiple database queries with business logic in ## Message-Driven Architecture -Nimbus uses a message-driven approach with three core message types: +Nimbus uses a message-driven approach with three core message types that follow the Command Query Responsibility Segregation (CQRS) pattern. ### Commands Commands represent write operations - intentions to change system state. They are processed by command handlers that execute business logic and persist changes. -```typescript -export type AddRecipeCommand = Command & { - type: "at.overlap.nimbus.app-recipe"; -}; -``` +Learn more in the [Commands Guide](/guide/core/commands). ### Queries Queries represent read operations - requests for information without changing state. Query handlers fetch and return data. -```typescript -export type GetRecipeQuery = Query & { - type: "at.overlap.nimbus.get-recipe"; -}; -``` +Learn more in the [Queries Guide](/guide/core/queries). ### Events Events represent facts - things that have already happened in the system. Event handlers react to these facts to update read models, trigger notifications, or coordinate between different parts of the system. -```typescript -export type RecipeAddedEvent = Event & { - type: "at.overlap.nimbus.recipe-added"; - subject: string; -}; -``` +Learn more in the [Events Guide](/guide/core/events). ## Domain-Driven Design & Hexagonal Architecture diff --git a/examples/gustav/concurrency-tester.sh b/examples/gustav/concurrency-tester.sh deleted file mode 100644 index 00d079f..0000000 --- a/examples/gustav/concurrency-tester.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/bash - -# SETUP for testing the concurrency -# -# Go into the file examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts -# and add the following code right before eventStore.writeEvents. -# -# await new Promise((resolve) => -# setTimeout(resolve, Math.floor(Math.random() * (1000 - 200 + 1)) + 200) -# ); -# -# This adds a random delay between 200ms and 1000ms between the read and the write -# on the event store. This way we can easier shake up the order on multiple commands -# to provoke some concurrency issues. - -# Number of concurrent requests -CONCURRENCY=5 - -# Replace :slug with your actual recipe slug -SLUG="test" - -echo "Starting $CONCURRENCY concurrent requests at $(date +%T.%3N)" -echo "==========================================" - -for i in $(seq 1 $CONCURRENCY); do - ( - echo "[Request $i] Starting at $(date +%T.%3N)" - - RESPONSE=$(curl -s -w "\nHTTP_STATUS:%{http_code}\nTIME_TOTAL:%{time_total}" \ - --request PUT \ - --url "http://localhost:3100/recipes/$SLUG" \ - --header 'Authorization: Bearer very-special-secret' \ - --header 'Content-Type: application/json' \ - --header 'User-Agent: insomnium/0.2.3-a' \ - --data "{ - \"name\": \"Test Rezept Change $i\", - \"instructions\": [ - \"1. Wasser kochen\" - ] - }") - - HTTP_STATUS=$(echo "$RESPONSE" | grep "HTTP_STATUS:" | cut -d: -f2) - TIME_TOTAL=$(echo "$RESPONSE" | grep "TIME_TOTAL:" | cut -d: -f2) - BODY=$(echo "$RESPONSE" | sed '/HTTP_STATUS:/,$d') - - echo "[Request $i] Completed at $(date +%T.%3N)" - echo "[Request $i] Status: $HTTP_STATUS" - echo "[Request $i] Time: ${TIME_TOTAL}s" - echo "[Request $i] Response Body:" - echo "$BODY" | sed 's/^/ /' - echo "==========================================" - ) & -done - -wait - -echo "All requests completed at $(date +%T.%3N)" diff --git a/examples/gustav/deno.json b/examples/gustav/deno.json deleted file mode 100644 index 2bf7da2..0000000 --- a/examples/gustav/deno.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "tasks": { - "dev": "deno run -A --watch src/main.ts", - "test": "deno test -A" - }, - "lint": { - "include": ["src/"], - "exclude": [], - "rules": { - "tags": ["recommended"], - "include": [], - "exclude": ["no-explicit-any"] - } - }, - "fmt": { - "include": ["src/"], - "exclude": [], - "useTabs": false, - "lineWidth": 80, - "indentWidth": 4, - "semiColons": true, - "singleQuote": true, - "proseWrap": "always" - }, - "test": { - "include": ["src/"] - }, - "imports": { - "@oak/oak": "jsr:@oak/oak@^17.1.6", - "@std/dotenv": "jsr:@std/dotenv@^0.225.5", - "@std/ulid": "jsr:@std/ulid@^1.0.0", - "@tajpouria/cors": "jsr:@tajpouria/cors@^1.2.1", - "ajv": "npm:ajv@^8.17.1", - "mongodb": "npm:mongodb@^6.12.0", - "zod": "npm:zod@^3.24.1", - "effect": "npm:effect@^3.17.9" - } -} diff --git a/examples/gustav/src/contexts/iam/infrastructure/http/auth.middleware.ts b/examples/gustav/src/contexts/iam/infrastructure/http/auth.middleware.ts deleted file mode 100644 index fa8bcb8..0000000 --- a/examples/gustav/src/contexts/iam/infrastructure/http/auth.middleware.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { AuthContext, getLogger } from '@nimbus/core'; -import type { Context } from '@oak/oak/context'; -import type { Next } from '@oak/oak/middleware'; - -/** - * ! NOT FOR PRODUCTION USE - * - * This is just a simple example of how to implement a middleware for authentication. - */ -export const exampleAuthMiddleware = async ( - ctx: Context, - next: Next, -) => { - const authorization = ctx.request.headers.get('authorization'); - - if (!authorization) { - const anonymousAuthContext: AuthContext = { - sub: 'anonymous', - groups: [], - }; - - ctx.state.authContext = anonymousAuthContext; - - await next(); - } else { - try { - const token = authorization?.replace('Bearer ', ''); - - if (token === 'very-special-secret') { - const adminAuthContext: AuthContext = { - sub: '02e50464-b051-70fa-25ef-63038890d80c', - groups: ['admin'], - }; - - ctx.state.authContext = adminAuthContext; - } else { - throw new Error('Invalid token'); - } - - await next(); - } catch (error: any) { - getLogger().error({ - message: 'Failed to authenticate user', - error, - }); - - ctx.response.status = 401; - ctx.response.body = { - message: 'Unauthorized', - }; - } - } -}; diff --git a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts deleted file mode 100644 index 86aea08..0000000 --- a/examples/gustav/src/contexts/recipe/core/commands/addRecipe.ts +++ /dev/null @@ -1,54 +0,0 @@ -import { Command, InvalidInputException } from '@nimbus/core'; -import { getEnv } from '@nimbus/utils'; -import { ulid } from '@std/ulid'; -import { Recipe } from '../domain/recipe.ts'; -import { RecipeState, recipeSubject } from '../domain/recipeAggregate.ts'; -import { - RecipeAddedCommandType, - RecipeAddedEvent, -} from '../events/recipeAdded.ts'; - -export const AddRecipeCommandType = 'at.overlap.nimbus.app-recipe' as const; - -export type AddRecipeCommand = Command & { - type: typeof AddRecipeCommandType; -}; - -export const addRecipe = ( - command: AddRecipeCommand, - state: RecipeState, -): { - newState: Recipe; - events: RecipeAddedEvent[]; -} => { - const { EVENT_SOURCE } = getEnv({ - variables: ['EVENT_SOURCE'], - }); - - if (state !== null) { - throw new InvalidInputException('Recipe already exists', { - errorCode: 'DUPLICATE_RECIPE', - reason: - 'A recipe with this slug already exists. The slug for each recipe must be unique, please choose a different slug.', - }); - } - - const subject = recipeSubject(command.data.slug); - - const recipeAddedEvent: RecipeAddedEvent = { - specversion: '1.0', - id: ulid(), - correlationid: command.correlationid, - time: new Date().toISOString(), - source: EVENT_SOURCE, - type: RecipeAddedCommandType, - subject, - data: command.data, - datacontenttype: 'application/json', - }; - - return { - newState: command.data, - events: [recipeAddedEvent], - }; -}; diff --git a/examples/gustav/src/contexts/recipe/core/commands/command.md b/examples/gustav/src/contexts/recipe/core/commands/command.md deleted file mode 100644 index 9d03304..0000000 --- a/examples/gustav/src/contexts/recipe/core/commands/command.md +++ /dev/null @@ -1,6 +0,0 @@ -./src/contexts//core/commands/.ts - -We define the commands and command processors here. - -In other libraries or frameworks this place might be referred to as use-cases. -But as we want to emphasize the CQRS pattern we intentionally split up the write use-cases which occur through commands and the read use-cases which occur through queries. diff --git a/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts deleted file mode 100644 index a118f9a..0000000 --- a/examples/gustav/src/contexts/recipe/core/commands/deleteRecipe.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { Command } from '@nimbus/core'; -import { getEnv } from '@nimbus/utils'; -import { ulid } from '@std/ulid'; -import { - RecipeState, - recipeSubject, - requireRecipe, -} from '../domain/recipeAggregate.ts'; -import { - RecipeDeletedEvent, - RecipeDeletedEventType, -} from '../events/recipeDeleted.ts'; - -export const DeleteRecipeCommandType = - 'at.overlap.nimbus.delete-recipe' as const; - -export type DeleteRecipeCommand = - & Command<{ - slug: string; - }> - & { - type: typeof DeleteRecipeCommandType; - }; - -export const deleteRecipe = ( - command: DeleteRecipeCommand, - state: RecipeState, -): { - newState: RecipeState; - events: RecipeDeletedEvent[]; -} => { - const { EVENT_SOURCE } = getEnv({ - variables: ['EVENT_SOURCE'], - }); - - // Validate recipe exists - requireRecipe(state); - - const subject = recipeSubject(command.data.slug); - - // Create event - const recipeDeletedEvent: RecipeDeletedEvent = { - specversion: '1.0', - id: ulid(), - correlationid: command.correlationid, - time: new Date().toISOString(), - source: EVENT_SOURCE, - type: RecipeDeletedEventType, - subject, - data: { slug: command.data.slug }, - datacontenttype: 'application/json', - }; - - return { - newState: null, - events: [recipeDeletedEvent], - }; -}; diff --git a/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts b/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts deleted file mode 100644 index 0869a6c..0000000 --- a/examples/gustav/src/contexts/recipe/core/commands/updateRecipe.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { Command } from '@nimbus/core'; -import { getEnv } from '@nimbus/utils'; -import { ulid } from '@std/ulid'; -import { Recipe } from '../domain/recipe.ts'; -import { - RecipeState, - recipeSubject, - requireRecipe, -} from '../domain/recipeAggregate.ts'; -import { - RecipeUpdatedEvent, - RecipeUpdatedEventType, -} from '../events/recipeUpdated.ts'; - -export const UpdateRecipeCommandType = - 'at.overlap.nimbus.update-recipe' as const; - -export type UpdateRecipeData = Partial & { - slug: string; -}; - -export type UpdateRecipeCommand = - & Command - & { - type: typeof UpdateRecipeCommandType; - }; - -export const updateRecipe = ( - command: UpdateRecipeCommand, - state: RecipeState, -): { - newState: Recipe; - events: RecipeUpdatedEvent[]; -} => { - const { EVENT_SOURCE } = getEnv({ - variables: ['EVENT_SOURCE'], - }); - - const subject = recipeSubject(command.data.slug); - - // Validate recipe exists - const currentRecipe = requireRecipe(state); - - // Prevent slug changes - const updates: Partial = { - ...command.data, - }; - if (updates.slug) { - delete updates.slug; - } - - // Create event - const recipeUpdatedEvent: RecipeUpdatedEvent = { - specversion: '1.0', - id: ulid(), - correlationid: command.correlationid, - time: new Date().toISOString(), - source: EVENT_SOURCE, - type: RecipeUpdatedEventType, - subject, - data: { - slug: command.data.slug, - updates, - }, - datacontenttype: 'application/json', - }; - - return { - newState: { - ...currentRecipe, - ...updates, - }, - events: [recipeUpdatedEvent], - }; -}; diff --git a/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts b/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts deleted file mode 100644 index e1d258f..0000000 --- a/examples/gustav/src/contexts/recipe/core/domain/ingredient.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { Unit } from '../../../../shared/core/domain/unit.ts'; - -export type Ingredient = { - name: string; - amount: number; - unit: Unit; - productId?: string; -}; diff --git a/examples/gustav/src/contexts/recipe/core/domain/model.md b/examples/gustav/src/contexts/recipe/core/domain/model.md deleted file mode 100644 index 4302618..0000000 --- a/examples/gustav/src/contexts/recipe/core/domain/model.md +++ /dev/null @@ -1,3 +0,0 @@ -./src/contexts//core/domain/.ts - -We define the domain specific models and types here. diff --git a/examples/gustav/src/contexts/recipe/core/domain/recipe.ts b/examples/gustav/src/contexts/recipe/core/domain/recipe.ts deleted file mode 100644 index 02fe95e..0000000 --- a/examples/gustav/src/contexts/recipe/core/domain/recipe.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Ingredient } from './ingredient.ts'; - -export type Recipe = { - slug: string; - name: string; - instructions: string[]; - ingredients: Ingredient[]; - tags: string[]; -}; diff --git a/examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts b/examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts deleted file mode 100644 index 1b3974e..0000000 --- a/examples/gustav/src/contexts/recipe/core/domain/recipeAggregate.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { Event, NotFoundException } from '@nimbus/core'; -import { type EventReducer } from '@nimbus/eventsourcing'; -import { Recipe } from './recipe.ts'; - -/** - * Recipe aggregate state. - * - * Null represents a recipe that doesn't exist or has been deleted. - */ -export type RecipeState = Recipe | null; - -/** - * Recipe event reducer. - * - * Applies events to recipe state to reconstruct the aggregate. - * This is the core of event sourcing - replaying events to rebuild state. - * - * @param state - Current recipe state - * @param event - Event to apply - * @returns New recipe state after applying the event - * - * @example - * ```ts - * const events = await eventStore.readEvents('/recipes/carbonara'); - * const currentState = events.reduce(recipeReducer, null); - * ``` - */ -export const recipeReducer: EventReducer = ( - state: RecipeState, - event: Event, -): RecipeState => { - switch (event.type) { - case 'at.overlap.nimbus.recipe-added': { - // Create new recipe from event data - return event.data as Recipe; - } - - case 'at.overlap.nimbus.recipe-updated': { - if (!state) { - // Cannot update a recipe that doesn't exist - // In a real system, you might want to log this as a warning - return state; - } - - // Merge update data into existing recipe - return { - ...state, - ...(event.data as Partial), - }; - } - - case 'at.overlap.nimbus.recipe-deleted': { - // Mark recipe as deleted - return null; - } - - default: - // Unknown event type - return state unchanged - return state; - } -}; - -/** - * Helper to get subject for a recipe. - * - * @param slug - Recipe slug - * @returns Subject path for the recipe - */ -export function recipeSubject(slug: string): string { - return `/recipes/${slug}`; -} - -/** - * Helper to validate recipe state. - * - * @param state - Recipe state to validate - * @throws Error if recipe doesn't exist - * @returns The recipe (for chaining) - */ -export function requireRecipe(state: RecipeState): Recipe { - if (!state) { - throw new NotFoundException('Recipe not found', { - errorCode: 'RECIPE_NOT_FOUND', - reason: 'The recipe with the provided slug was not found', - }); - } - return state; -} diff --git a/examples/gustav/src/contexts/recipe/core/events/event.md b/examples/gustav/src/contexts/recipe/core/events/event.md deleted file mode 100644 index 7778c05..0000000 --- a/examples/gustav/src/contexts/recipe/core/events/event.md +++ /dev/null @@ -1,3 +0,0 @@ -./src/contexts//core/events/.ts - -We define the events here. diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts b/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts deleted file mode 100644 index ea9830c..0000000 --- a/examples/gustav/src/contexts/recipe/core/events/recipeAdded.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { type Event, getLogger } from '@nimbus/core'; -import { Recipe } from '../domain/recipe.ts'; - -export const RecipeAddedCommandType = 'at.overlap.nimbus.recipe-added' as const; - -export type RecipeAddedEvent = Event & { - type: typeof RecipeAddedCommandType; -}; - -export const recipeAdded = ( - event: RecipeAddedEvent, -) => { - getLogger().info({ - message: 'recipeAdded Handler', - data: event.data, - }); -}; diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts b/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts deleted file mode 100644 index 59effbc..0000000 --- a/examples/gustav/src/contexts/recipe/core/events/recipeDeleted.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { type Event, getLogger } from '@nimbus/core'; - -export const RecipeDeletedEventType = - 'at.overlap.nimbus.recipe-deleted' as const; - -export type RecipeDeletedEvent = Event<{ slug: string }> & { - type: typeof RecipeDeletedEventType; -}; - -export const recipeDeleted = ( - event: RecipeDeletedEvent, -) => { - getLogger().info({ - message: 'recipeDeleted Handler', - data: event.data, - }); -}; diff --git a/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts b/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts deleted file mode 100644 index 4add5f9..0000000 --- a/examples/gustav/src/contexts/recipe/core/events/recipeUpdated.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { type Event, getLogger } from '@nimbus/core'; -import { Recipe } from '../domain/recipe.ts'; - -export const RecipeUpdatedEventType = - 'at.overlap.nimbus.recipe-updated' as const; - -export type RecipeUpdatedEvent = - & Event<{ - slug: string; - updates: Partial; - }> - & { - type: typeof RecipeUpdatedEventType; - }; - -export const recipeUpdated = ( - event: RecipeUpdatedEvent, -) => { - getLogger().info({ - message: 'recipeUpdated Handler', - data: event.data, - }); -}; diff --git a/examples/gustav/src/contexts/recipe/core/ports/port.md b/examples/gustav/src/contexts/recipe/core/ports/port.md deleted file mode 100644 index ac95467..0000000 --- a/examples/gustav/src/contexts/recipe/core/ports/port.md +++ /dev/null @@ -1,21 +0,0 @@ -./src/contexts//core/ports/.ts - -We define specific ports in here which are the interfaces for external systems implemented in the infrastructure layer of the software. The core defines the contract that is needed by the command and query handler to work. - -For example a repository (a thing to store data) might be needed for commands and queries to change data and read data from. -So here in the core we will define a port for it. - -```typescript -export interface RecipeRepository { - insert: (recipe: Recipe) => Recipe; - update: (recipe: Recipe) => Recipe; - delete: (id: string) => void; - getById: (id: string) => Recipe; - listByOwner: ( - ownerId: string, - options?: { readonly offset?: number; readonly limit?: number } - ) => Recipe[]; -} -``` - -No we can use the repository in the command and query handler and know there is an insert method available for us. But we do not care about any implementation details like if it is a PostPostgreSQL, MongoDB or a memory store. diff --git a/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts b/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts deleted file mode 100644 index f34bb67..0000000 --- a/examples/gustav/src/contexts/recipe/core/ports/recipeRepository.ts +++ /dev/null @@ -1,27 +0,0 @@ -import type { Recipe } from '../domain/recipe.ts'; - -export interface RecipeRepository { - generateId: () => string; - - insert: (recipe: Recipe) => Promise; - - update: (slug: string, updates: Partial) => Promise; - - delete: (id: string) => Promise; - - getBySlug: (id: string) => Promise; - - list: ( - options?: { - limit?: number; - offset?: number; - filter?: Partial; - }, - ) => Promise; - - count: ( - options?: { - filter?: Partial; - }, - ) => Promise; -} diff --git a/examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts b/examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts deleted file mode 100644 index 53aa864..0000000 --- a/examples/gustav/src/contexts/recipe/core/queries/getRecipe.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Query } from '@nimbus/core'; -import type { Recipe } from '../domain/recipe.ts'; -import { RecipeRepository } from '../ports/recipeRepository.ts'; - -export const GetRecipeQueryType = 'at.overlap.nimbus.get-recipe' as const; - -export type GetRecipeParams = { - slug: string; -}; - -export type GetRecipeQuery = Query & { - type: typeof GetRecipeQueryType; -}; - -export const getRecipe = async ( - query: GetRecipeQuery, - repository: RecipeRepository, -): Promise => { - return await repository.getBySlug(query.data.slug); -}; diff --git a/examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts b/examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts deleted file mode 100644 index eedfc7d..0000000 --- a/examples/gustav/src/contexts/recipe/core/queries/listRecipes.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { Query } from '@nimbus/core'; -import { Recipe } from '../domain/recipe.ts'; -import { RecipeRepository } from '../ports/recipeRepository.ts'; - -export const ListRecipesQueryType = 'at.overlap.nimbus.list-recipes' as const; - -export type ListRecipesQuery = - & Query<{ - limit?: number; - offset?: number; - }> - & { - type: typeof ListRecipesQueryType; - }; - -export const listRecipes = async ( - query: ListRecipesQuery, - repository: RecipeRepository, -): Promise => { - const recipes = await repository.list({ - limit: query.data.limit, - offset: query.data.offset, - }); - - return recipes; -}; diff --git a/examples/gustav/src/contexts/recipe/core/queries/query.md b/examples/gustav/src/contexts/recipe/core/queries/query.md deleted file mode 100644 index f7cc217..0000000 --- a/examples/gustav/src/contexts/recipe/core/queries/query.md +++ /dev/null @@ -1,6 +0,0 @@ -./src/contexts//core/queries/.ts - -We define the queries and query processors here. - -In other libraries or frameworks this place might be referred to as use-cases. -But as we want to emphasize the CQRS pattern we intentionally split up the write use-cases which occur through commands and the read use-cases which occur through queries. diff --git a/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeAdded.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeAdded.handler.ts deleted file mode 100644 index a7ddf07..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeAdded.handler.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { MessageHandler } from '@nimbus/core'; -import { Recipe } from '../../core/domain/recipe.ts'; -import { - recipeAdded, - RecipeAddedEvent, -} from '../../core/events/recipeAdded.ts'; -import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; - -export const recipeAddedHandler: MessageHandler< - RecipeAddedEvent, - Recipe -> = async (event) => { - recipeAdded(event); - - const recipe = await recipeMemoryRepository.insert(event.data); - - return recipe; -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeDeleted.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeDeleted.handler.ts deleted file mode 100644 index 22cb9c0..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeDeleted.handler.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { MessageHandler } from '@nimbus/core'; -import { - recipeDeleted, - RecipeDeletedEvent, -} from '../../core/events/recipeDeleted.ts'; -import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; - -export const recipeDeletedHandler: MessageHandler< - RecipeDeletedEvent, - void -> = async (event) => { - recipeDeleted(event); - - await recipeMemoryRepository.delete(event.data.slug); -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeUpdated.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeUpdated.handler.ts deleted file mode 100644 index cb7561f..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/eventHandler/recipeUpdated.handler.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { MessageHandler } from '@nimbus/core'; -import { Recipe } from '../../core/domain/recipe.ts'; -import { - recipeUpdated, - RecipeUpdatedEvent, -} from '../../core/events/recipeUpdated.ts'; -import { recipeMemoryRepository } from '../repository/recipeMemoryRepository.ts'; - -export const recipeUpdatedHandler: MessageHandler< - RecipeUpdatedEvent, - Recipe -> = async (event) => { - recipeUpdated(event); - - const recipe = await recipeMemoryRepository.update( - event.data.slug, - event.data.updates, - ); - - return recipe; -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts deleted file mode 100644 index ad454f5..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/commandRouter.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { getLogger, MessageRouter } from '@nimbus/core'; -import { AddRecipeCommandType } from '../../core/commands/addRecipe.ts'; -import { DeleteRecipeCommandType } from '../../core/commands/deleteRecipe.ts'; -import { UpdateRecipeCommandType } from '../../core/commands/updateRecipe.ts'; -import { addRecipeHandler } from './handler/addRecipe.handler.ts'; -import { deleteRecipeHandler } from './handler/deleteRecipe.handler.ts'; -import { updateRecipeHandler } from './handler/updateRecipe.handler.ts'; - -export const commandRouter = new MessageRouter('command', { - logInput: (input: any) => { - getLogger().info({ - category: 'Nimbus', - ...(input?.correlationid && { - correlationId: input.correlationid, - }), - message: - `${input?.correlationid} - [Command] ${input?.type} from ${input?.source}`, - }); - }, -}); - -commandRouter.register( - AddRecipeCommandType, - addRecipeHandler, -); - -commandRouter.register( - UpdateRecipeCommandType, - updateRecipeHandler, -); - -commandRouter.register( - DeleteRecipeCommandType, - deleteRecipeHandler, -); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts deleted file mode 100644 index 7138123..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/addRecipe.handler.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { MessageHandler } from '@nimbus/core'; -import { loadAggregate } from '@nimbus/eventsourcing'; -import { eventStore } from '../../../../../shared/infrastructure/eventStore.ts'; -import { - addRecipe, - AddRecipeCommand, -} from '../../../core/commands/addRecipe.ts'; -import { Recipe } from '../../../core/domain/recipe.ts'; -import { - recipeReducer, - recipeSubject, -} from '../../../core/domain/recipeAggregate.ts'; - -export const addRecipeHandler: MessageHandler< - AddRecipeCommand, - Recipe -> = async (command) => { - const subject = recipeSubject(command.data.slug); - - // Load current aggregate state by replaying events - const snapshot = await loadAggregate( - eventStore, - subject, - null, - recipeReducer, - ); - - const { newState, events } = addRecipe( - command, - snapshot.state, - ); - - // Write event with optimistic concurrency control - // In case we already have events for this subject, we use isSubjectOnEventId - // In case we don't have any events for this subject, we use isSubjectPristine - await eventStore.writeEvents(events, { - preconditions: snapshot.lastEventId !== undefined - ? [ - { - type: 'isSubjectOnEventId', - payload: { - subject, - eventId: snapshot.lastEventId, - }, - }, - ] - : [ - { - type: 'isSubjectPristine', - payload: { - subject, - }, - }, - ], - }); - - return newState; -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts deleted file mode 100644 index 9b2b9d8..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/deleteRecipe.handler.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { MessageHandler } from '@nimbus/core'; -import { loadAggregate } from '@nimbus/eventsourcing'; -import { eventStore } from '../../../../../shared/infrastructure/eventStore.ts'; -import { - deleteRecipe, - DeleteRecipeCommand, -} from '../../../core/commands/deleteRecipe.ts'; -import { - recipeReducer, - recipeSubject, -} from '../../../core/domain/recipeAggregate.ts'; - -export const deleteRecipeHandler: MessageHandler< - DeleteRecipeCommand, - void -> = async (command) => { - const subject = recipeSubject(command.data.slug); - - // Load current aggregate state by replaying events - const snapshot = await loadAggregate( - eventStore, - subject, - null, - recipeReducer, - ); - - const { events } = deleteRecipe( - command, - snapshot.state, - ); - - // Write event with optimistic concurrency control - // Use isSubjectOnEventId to ensure no other updates happened since we read - await eventStore.writeEvents(events, { - preconditions: snapshot.lastEventId !== undefined - ? [ - { - type: 'isSubjectOnEventId', - payload: { - subject, - eventId: snapshot.lastEventId, - }, - }, - ] - : undefined, - }); -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts deleted file mode 100644 index 6dc486a..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/getRecipe.handler.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { MessageHandler } from '@nimbus/core'; -import { Recipe } from '../../../core/domain/recipe.ts'; -import { getRecipe, GetRecipeQuery } from '../../../core/queries/getRecipe.ts'; -import { recipeMemoryRepository } from '../../repository/recipeMemoryRepository.ts'; - -export const getRecipeHandler: MessageHandler< - GetRecipeQuery, - Recipe -> = async (query) => { - const recipe = await getRecipe(query, recipeMemoryRepository); - - return recipe; -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/listRecipes.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/listRecipes.handler.ts deleted file mode 100644 index 5ed5db5..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/listRecipes.handler.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { MessageHandler } from '@nimbus/core'; -import { Recipe } from '../../../core/domain/recipe.ts'; -import { - listRecipes, - ListRecipesQuery, -} from '../../../core/queries/listRecipes.ts'; -import { recipeMemoryRepository } from '../../repository/recipeMemoryRepository.ts'; - -export const listRecipesHandler: MessageHandler< - ListRecipesQuery, - Recipe[] -> = async (query) => { - const recipes = await listRecipes(query, recipeMemoryRepository); - - return recipes; -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts deleted file mode 100644 index e004aa1..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/handler/updateRecipe.handler.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { MessageHandler } from '@nimbus/core'; -import { loadAggregate } from '@nimbus/eventsourcing'; -import { eventStore } from '../../../../../shared/infrastructure/eventStore.ts'; -import { - updateRecipe, - UpdateRecipeCommand, -} from '../../../core/commands/updateRecipe.ts'; -import { Recipe } from '../../../core/domain/recipe.ts'; -import { - recipeReducer, - recipeSubject, -} from '../../../core/domain/recipeAggregate.ts'; - -export const updateRecipeHandler: MessageHandler< - UpdateRecipeCommand, - Recipe -> = async (command) => { - const subject = recipeSubject(command.data.slug); - - // Load current aggregate state by replaying events - const snapshot = await loadAggregate( - eventStore, - subject, - null, - recipeReducer, - ); - - const { newState, events } = updateRecipe( - command, - snapshot.state, - ); - - // Write event with optimistic concurrency control - // Use isSubjectOnEventId to ensure no other updates happened since we read - await eventStore.writeEvents(events, { - preconditions: snapshot.lastEventId !== undefined - ? [ - { - type: 'isSubjectOnEventId', - payload: { - subject, - eventId: snapshot.lastEventId, - }, - }, - ] - : undefined, - }); - - return newState; -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/queryRouter.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/queryRouter.ts deleted file mode 100644 index 38d240a..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/queryRouter.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { getLogger, MessageRouter } from '@nimbus/core'; -import { GetRecipeQueryType } from '../../core/queries/getRecipe.ts'; -import { ListRecipesQueryType } from '../../core/queries/listRecipes.ts'; -import { getRecipeHandler } from './handler/getRecipe.handler.ts'; -import { listRecipesHandler } from './handler/listRecipes.handler.ts'; - -export const queryRouter = new MessageRouter('query', { - logInput: (input: any) => { - getLogger().info({ - category: 'Nimbus', - ...(input?.correlationid && { - correlationId: input.correlationid, - }), - message: - `${input?.correlationid} - [Query] ${input?.type} from ${input?.source}`, - }); - }, -}); - -queryRouter.register( - GetRecipeQueryType, - getRecipeHandler, -); - -queryRouter.register( - ListRecipesQueryType, - listRecipesHandler, -); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts deleted file mode 100644 index d301ffa..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/router.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { NimbusOakRouter } from '@nimbus/oak'; -import { AddRecipeCommandType } from '../../core/commands/addRecipe.ts'; -import { DeleteRecipeCommandType } from '../../core/commands/deleteRecipe.ts'; -import { UpdateRecipeCommandType } from '../../core/commands/updateRecipe.ts'; -import { GetRecipeQueryType } from '../../core/queries/getRecipe.ts'; -import { ListRecipesQueryType } from '../../core/queries/listRecipes.ts'; -import { commandRouter } from './commandRouter.ts'; -import { queryRouter } from './queryRouter.ts'; -import { AddRecipeCommandSchemaUrl } from './schemas/addRecipeCommandSchema.ts'; -import { DeleteRecipeCommandSchemaUrl } from './schemas/deleteRecipeCommandSchema.ts'; -import { GetRecipeQuerySchemaUrl } from './schemas/getRecipeQuerySchema.ts'; -import { ListRecipesQuerySchemaUrl } from './schemas/listRecipesQuerySchema.ts'; -import { UpdateRecipeCommandSchemaUrl } from './schemas/updateRecipeCommandSchema.ts'; - -export const recipeRouter = new NimbusOakRouter(); - -// Add a new recipe -// -// POST /recipes -recipeRouter.command({ - path: '/', - messageType: AddRecipeCommandType, - router: commandRouter, - dataschema: AddRecipeCommandSchemaUrl, - // extractData defaults to reading JSON body -}); - -// Update a recipe -// -// PUT /recipes/:slug -recipeRouter.commandPut({ - path: '/:slug', - messageType: UpdateRecipeCommandType, - router: commandRouter, - dataschema: UpdateRecipeCommandSchemaUrl, - extractData: async (ctx) => { - const body = await ctx.request.body.json(); - return { - ...body, - slug: ctx.params.slug, - }; - }, -}); - -// Delete a recipe -// -// DELETE /recipes/:slug -recipeRouter.commandDelete({ - path: '/:slug', - messageType: DeleteRecipeCommandType, - router: commandRouter, - dataschema: DeleteRecipeCommandSchemaUrl, - extractData: (ctx) => ({ - slug: ctx.params.slug, - }), -}); - -// Get a recipe by slug -// -// GET /recipes/:slug -recipeRouter.query({ - path: '/:slug', - messageType: GetRecipeQueryType, - router: queryRouter, - dataschema: GetRecipeQuerySchemaUrl, - extractData: (ctx) => ({ - slug: ctx.params.slug, - }), -}); - -// List recipes -// -// GET /recipes -recipeRouter.query({ - path: '/', - messageType: ListRecipesQueryType, - router: queryRouter, - dataschema: ListRecipesQuerySchemaUrl, - extractData: (ctx) => { - const { limit, offset } = Object.fromEntries( - ctx.request.url.searchParams, - ); - - return { - limit: limit ? Number(limit) : undefined, - offset: offset ? Number(offset) : undefined, - }; - }, -}); diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts deleted file mode 100644 index d0e9606..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { commandSchema } from '@nimbus/core'; -import type { SchemaObject } from 'ajv'; - -export const AddRecipeCommandSchemaUrl = - 'https://api.gustav.app/schemas/commands/add-recipe/v1' as const; - -/** - * JSON Schema for AddRecipeCommand - * - * This schema validates the data payload of the add-recipe command. - * It will be registered with the validator and referenced via dataschema URL. - */ -export const addRecipeCommandSchema: SchemaObject = { - ...commandSchema, - $id: AddRecipeCommandSchemaUrl, - properties: { - ...commandSchema.properties, - data: { - type: 'object', - required: ['slug', 'name', 'ingredients', 'instructions', 'tags'], - properties: { - slug: { - type: 'string', - pattern: '^[a-z0-9-]+$', - minLength: 1, - maxLength: 100, - }, - name: { - type: 'string', - minLength: 1, - maxLength: 200, - }, - ingredients: { - type: 'array', - items: { - type: 'object', - required: ['name', 'amount', 'unit'], - properties: { - name: { type: 'string', minLength: 1 }, - amount: { type: 'number', minimum: 1 }, - unit: { type: 'string', minLength: 1 }, - productId: { type: 'string', minLength: 1 }, - }, - }, - }, - instructions: { - type: 'array', - items: { - type: 'string', - minLength: 1, - }, - }, - tags: { - type: 'array', - items: { - type: 'string', - minLength: 1, - }, - }, - }, - }, - dataschema: { - const: AddRecipeCommandSchemaUrl, - }, - }, -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/deleteRecipeCommandSchema.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/deleteRecipeCommandSchema.ts deleted file mode 100644 index 05a1a83..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/deleteRecipeCommandSchema.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { querySchema } from '@nimbus/core'; -import type { SchemaObject } from 'ajv'; - -export const DeleteRecipeCommandSchemaUrl = - 'https://api.gustav.app/schemas/commands/delete-recipe/v1' as const; - -export const deleteRecipeCommandSchema: SchemaObject = { - ...querySchema, - $id: DeleteRecipeCommandSchemaUrl, - properties: { - ...querySchema.properties, - data: { - type: 'object', - required: ['slug'], - properties: { - slug: { - type: 'string', - pattern: '^[a-z0-9-]+$', - minLength: 1, - maxLength: 100, - }, - }, - }, - dataschema: { - const: DeleteRecipeCommandSchemaUrl, - }, - }, -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts deleted file mode 100644 index c0f8a2c..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { querySchema } from '@nimbus/core'; -import type { SchemaObject } from 'ajv'; - -export const GetRecipeQuerySchemaUrl = - 'https://api.gustav.app/schemas/queries/get-recipe/v1' as const; - -/** - * JSON Schema for GetRecipeQuery - * - * This schema validates the data payload of the get-recipe query. - * It will be registered with the validator and referenced via dataschema URL. - */ -export const getRecipeQuerySchema: SchemaObject = { - ...querySchema, - $id: GetRecipeQuerySchemaUrl, - properties: { - ...querySchema.properties, - data: { - type: 'object', - required: ['slug'], - properties: { - slug: { - type: 'string', - pattern: '^[a-z0-9-]+$', - minLength: 1, - maxLength: 100, - }, - }, - }, - dataschema: { - const: GetRecipeQuerySchemaUrl, - }, - }, -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/listRecipesQuerySchema.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/listRecipesQuerySchema.ts deleted file mode 100644 index 495d8d4..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/listRecipesQuerySchema.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { querySchema } from '@nimbus/core'; -import type { SchemaObject } from 'ajv'; - -export const ListRecipesQuerySchemaUrl = - 'https://api.gustav.app/schemas/queries/list-recipes/v1' as const; - -export const listRecipesQuerySchema: SchemaObject = { - ...querySchema, - $id: ListRecipesQuerySchemaUrl, - properties: { - ...querySchema.properties, - data: { - type: 'object', - required: [], - properties: { - limit: { - type: 'number', - minimum: 1, - maximum: 100, - }, - offset: { - type: 'number', - minimum: 0, - }, - }, - }, - dataschema: { - const: ListRecipesQuerySchemaUrl, - }, - }, -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/updateRecipeCommandSchema.ts b/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/updateRecipeCommandSchema.ts deleted file mode 100644 index d5e43d0..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/http/schemas/updateRecipeCommandSchema.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { commandSchema } from '@nimbus/core'; -import type { SchemaObject } from 'ajv'; - -export const UpdateRecipeCommandSchemaUrl = - 'https://api.gustav.app/schemas/commands/update-recipe/v1' as const; - -/** - * JSON Schema for UpdateRecipeCommand - * - * This schema validates the data payload of the update-recipe command. - * It will be registered with the validator and referenced via dataschema URL. - */ -export const updateRecipeCommandSchema: SchemaObject = { - ...commandSchema, - $id: UpdateRecipeCommandSchemaUrl, - properties: { - ...commandSchema.properties, - data: { - type: 'object', - required: ['slug'], - properties: { - slug: { - type: 'string', - pattern: '^[a-z0-9-]+$', - minLength: 1, - maxLength: 100, - }, - name: { - type: 'string', - minLength: 1, - maxLength: 200, - }, - ingredients: { - type: 'array', - items: { - type: 'object', - required: ['name', 'amount', 'unit'], - properties: { - name: { type: 'string', minLength: 1 }, - amount: { type: 'number', minimum: 1 }, - unit: { type: 'string', minLength: 1 }, - productId: { type: 'string', minLength: 1 }, - }, - }, - }, - instructions: { - type: 'array', - items: { - type: 'string', - minLength: 1, - }, - }, - tags: { - type: 'array', - items: { - type: 'string', - minLength: 1, - }, - }, - }, - }, - dataschema: { - const: UpdateRecipeCommandSchemaUrl, - }, - }, -}; diff --git a/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts b/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts deleted file mode 100644 index aae62c9..0000000 --- a/examples/gustav/src/contexts/recipe/infrastructure/repository/recipeMemoryRepository.ts +++ /dev/null @@ -1,91 +0,0 @@ -import { NotFoundException } from '@nimbus/core'; -import { ulid } from '@std/ulid'; -import type { Recipe } from '../../core/domain/recipe.ts'; -import { RecipeRepository } from '../../core/ports/recipeRepository.ts'; - -const makeMemoryRepository = (): RecipeRepository => { - const store = new Map(); - - return { - generateId: () => { - return ulid(); - }, - - // deno-lint-ignore require-await - insert: async (recipe) => { - store.set(recipe.slug, recipe); - return recipe; - }, - - // deno-lint-ignore require-await - update: async (slug, updates) => { - const existingRecipe = store.get(slug); - - if (!existingRecipe) { - throw new NotFoundException('Recipe not found', { - errorCode: 'RECIPE_NOT_FOUND', - }); - } - - const updatedRecipe = { ...existingRecipe, ...updates }; - store.set(slug, updatedRecipe); - return updatedRecipe; - }, - - // deno-lint-ignore require-await - delete: async (slug) => { - const recipe = store.get(slug); - - if (!recipe) { - throw new NotFoundException('Recipe not found', { - errorCode: 'RECIPE_NOT_FOUND', - }); - } - - store.delete(slug); - }, - - // deno-lint-ignore require-await - getBySlug: async (slug) => { - const recipe = store.get(slug); - - if (!recipe) { - throw new NotFoundException('Recipe not found', { - errorCode: 'RECIPE_NOT_FOUND', - }); - } - - return recipe; - }, - - // deno-lint-ignore require-await - list: async (options) => { - const limit = options?.limit ?? 10; - const offset = options?.offset ?? 0; - const filter = options?.filter ?? {}; - - return Array.from(store.values()) - .filter((recipe) => { - return Object.entries(filter).every(([key, value]) => { - return recipe[key as keyof Recipe] === value; - }); - }) - .slice(offset, offset + limit); - }, - - // deno-lint-ignore require-await - count: async (options) => { - const filter = options?.filter ?? {}; - - return Array.from(store.values()) - .filter((recipe) => { - return Object.entries(filter).every(([key, value]) => { - return recipe[key as keyof Recipe] === value; - }); - }) - .length; - }, - }; -}; - -export const recipeMemoryRepository = makeMemoryRepository(); diff --git a/examples/gustav/src/exampleApiSpec.json b/examples/gustav/src/exampleApiSpec.json deleted file mode 100644 index b6eb571..0000000 --- a/examples/gustav/src/exampleApiSpec.json +++ /dev/null @@ -1,183 +0,0 @@ -{ - "asyncapi": "3.0.0", - "info": { - "title": "Nimbus Example Gustav", - "version": "1.0.0", - "description": "A Nimbus example " - }, - "defaultContentType": "application/json", - "servers": { - "http-api": { - "host": "api.nimbus.overlp.at", - "protocol": "https", - "description": "HTTP Server", - "security": [ - { - "$ref": "#/components/securitySchemes/apiKey" - } - ] - } - }, - "channels": { - "commands": { - "address": "/api/command", - "description": "API to send commands", - "bindings": { - "http": { - "method": "POST" - } - }, - "messages": { - "AddRecipeCommand": { - "$ref": "#/components/messages/AddRecipeCommand" - } - } - }, - "queries": { - "address": "/api/query", - "description": "API to send queries", - "bindings": { - "http": { - "method": "POST" - } - } - } - }, - "operations": { - "command": { - "title": "Command", - "action": "send", - "channel": { - "$ref": "#/channels/commands" - } - }, - "query": { - "title": "Query", - "action": "send", - "channel": { - "$ref": "#/channels/queries" - } - } - }, - "components": { - "messages": { - "AddRecipeCommand": { - "name": "AddRecipeCommand", - "contentType": "application/cloudevents+json", - "payload": { - "$ref": "#/components/schemas/AddRecipeCommand" - } - } - }, - "schemas": { - "CommandSchema": { - "$id": "https://api.nimbus.overlap.at/schemas/command/v1", - "type": "object", - "required": [ - "specversion", - "id", - "correlationid", - "time", - "source", - "type", - "data" - ], - "properties": { - "specversion": { - "const": "1.0" - }, - "id": { - "type": "string", - "minLength": 1 - }, - "correlationid": { - "type": "string", - "minLength": 1 - }, - "time": { - "type": "string", - "format": "date-time", - "minLength": 1 - }, - "source": { - "type": "string", - "format": "uri-reference", - "minLength": 1 - }, - "type": { - "type": "string", - "minLength": 1 - }, - "subject": { - "type": "string", - "minLength": 1 - }, - "data": { - "type": ["object", "string", "number", "array", "boolean"] - }, - "datacontenttype": { - "type": "string", - "minLength": 1 - }, - "dataschema": { - "type": "string", - "format": "uri", - "minLength": 1 - } - } - }, - - "AddRecipeCommand": { - "$id": "https://api.nimbus.overlap.at/schemas/add-recipe-command/v1", - "allOf": [ - { "$ref": "#/components/schemas/CommandSchema" }, - { - "type": "object", - "properties": { - "source": { "const": "https://nimbus.overlap.at" }, - "type": { "const": "at.overlap.nimbus.app-recipe" }, - "data": { "$ref": "#/components/schemas/Recipe" }, - "datacontenttype": { "const": "application/json" }, - "dataschema": { - "const": "https://api.nimbus.overlap.at/schemas/add-recipe-command/v1" - } - } - } - ] - }, - - "Recipe": { - "$id": "https://api.nimbus.overlap.at/schemas/recipe/v1", - "type": "object", - "required": ["slug", "name", "instructions", "ingredients"], - "properties": { - "slug": { - "type": "string" - }, - "name": { - "type": "string" - }, - "instructions": { - "type": "array", - "items": { - "type": "string" - } - }, - "ingredients": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - }, - "securitySchemes": { - "apiKey": { - "type": "httpApiKey", - "in": "header", - "name": "X-API-Key" - } - } - } -} diff --git a/examples/gustav/src/main.ts b/examples/gustav/src/main.ts deleted file mode 100644 index cdd3ccb..0000000 --- a/examples/gustav/src/main.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { - jsonLogFormatter, - parseLogLevel, - prettyLogFormatter, - setupLogger, -} from '@nimbus/core'; -import { Application } from '@oak/oak/application'; -import '@std/dotenv/load'; -import { oakCors } from '@tajpouria/cors'; -import process from 'node:process'; -import { exampleAuthMiddleware } from './contexts/iam/infrastructure/http/auth.middleware.ts'; -import { initEventStore } from './shared/infrastructure/eventStore.ts'; -import { router } from './shared/infrastructure/http/router.ts'; -import { registerSchemas } from './shared/infrastructure/http/schemas.ts'; -import { initMongoConnectionManager } from './shared/infrastructure/mongodb.ts'; - -// -// Setup logging with basic options provided by Nimbus -// -// See https://nimbus.overlap.at/guide/logging.html for more information about logging of Nimbus. -// -setupLogger({ - logLevel: parseLogLevel(process.env.LOG_LEVEL), - formatter: process.env.LOG_FORMAT === 'pretty' - ? prettyLogFormatter - : jsonLogFormatter, - useConsoleColors: process.env.LOG_FORMAT === 'pretty', -}); - -// Register JSON schemas for validation -registerSchemas(); - -// Initialize EventStore (includes event observer for read models) -await initEventStore(); - -// Initialize MongoDB Manager -initMongoConnectionManager(); - -// Oak HTTP Server APP -const app = new Application(); - -app.addEventListener('listen', ({ hostname, port, secure }) => { - console.log( - `Listening on: ${secure ? 'https://' : 'http://'}${ - hostname ?? 'localhost' - }:${port}`, - ); -}); - -// CORS Middleware -app.use(oakCors()); - -// Auth Middleware -app.use(exampleAuthMiddleware); - -// API Routes -app.use(router.routes()); -app.use(router.allowedMethods()); - -// Get the server started -app.listen({ hostname: '0.0.0.0', port: 3100 }); diff --git a/examples/gustav/src/shared/core/domain/unit.ts b/examples/gustav/src/shared/core/domain/unit.ts deleted file mode 100644 index 853230c..0000000 --- a/examples/gustav/src/shared/core/domain/unit.ts +++ /dev/null @@ -1,7 +0,0 @@ -export const Units = [ - 'piece', - 'gram', - 'milliliter', -] as const; - -export type Unit = typeof Units[number]; diff --git a/examples/gustav/src/shared/infrastructure/eventStore.ts b/examples/gustav/src/shared/infrastructure/eventStore.ts deleted file mode 100644 index 699c207..0000000 --- a/examples/gustav/src/shared/infrastructure/eventStore.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { getLogger } from '@nimbus/core'; -import type { EventStoreSubscription } from '@nimbus/eventsourcing'; -import { EventSourcingDBStore } from '@nimbus/eventsourcingdb'; -import { getEnv } from '@nimbus/utils'; -import { - RecipeAddedCommandType, - RecipeAddedEvent, -} from '../../contexts/recipe/core/events/recipeAdded.ts'; -import { - RecipeDeletedEvent, - RecipeDeletedEventType, -} from '../../contexts/recipe/core/events/recipeDeleted.ts'; -import { - RecipeUpdatedEvent, - RecipeUpdatedEventType, -} from '../../contexts/recipe/core/events/recipeUpdated.ts'; -import { recipeAddedHandler } from '../../contexts/recipe/infrastructure/eventHandler/recipeAdded.handler.ts'; -import { recipeDeletedHandler } from '../../contexts/recipe/infrastructure/eventHandler/recipeDeleted.handler.ts'; -import { recipeUpdatedHandler } from '../../contexts/recipe/infrastructure/eventHandler/recipeUpdated.handler.ts'; - -export let eventStore: EventSourcingDBStore; -let subscription: EventStoreSubscription | undefined; - -/** - * Initialize the event store and start observing events. - * - * This sets up: - * 1. Connection to EventSourcingDB - * 2. Event observer that keeps read models in sync - * - * Pattern: EventStore (source of truth) → Observer → Read Model Updates - */ -export const initEventStore = async () => { - const logger = getLogger(); - - // 1. Initialize EventStore connection - const { EVENTSOURCINGDB_API, EVENTSOURCINGDB_SECRET } = getEnv({ - variables: ['EVENTSOURCINGDB_API', 'EVENTSOURCINGDB_SECRET'], - }); - - eventStore = new EventSourcingDBStore({ - apiUrl: EVENTSOURCINGDB_API, - secret: EVENTSOURCINGDB_SECRET, - }); - - logger.info({ - category: 'Gustav', - message: 'EventStore initialized', - }); - - // 2. Start observing events to keep read models in sync - logger.info({ - category: 'Gustav', - message: 'Starting event observer for read model synchronization', - }); - - subscription = await eventStore.observe({ - subject: '/', - recursive: true, - // sinceEventId: '123', // Optional: Resume from checkpoint after restart - handler: async (event) => { - logger.debug({ - category: 'Gustav', - message: `Processing event: ${event.type}`, - data: { - eventId: event.eventstoremetadata.id, - subject: event.subject, - }, - }); - - // Route events to appropriate read model handlers - switch (event.type) { - case RecipeAddedCommandType: - await recipeAddedHandler(event as RecipeAddedEvent); - break; - - case RecipeUpdatedEventType: - await recipeUpdatedHandler(event as RecipeUpdatedEvent); - break; - - case RecipeDeletedEventType: - await recipeDeletedHandler(event as RecipeDeletedEvent); - break; - - default: - logger.debug({ - category: 'Gustav', - message: - `No handler registered for event type: ${event.type}`, - }); - } - }, - onError: (error, event) => { - logger.error({ - category: 'Gustav', - message: 'Failed to process event', - error, - data: { - eventType: event?.type, - eventId: event?.eventstoremetadata?.id, - }, - }); - }, - }); - - logger.info({ - category: 'Gustav', - message: 'Event observer started successfully', - }); -}; - -/** - * Cleanup event store observer on shutdown. - */ -export const stopEventStore = async () => { - if (subscription) { - await subscription.unsubscribe(); - getLogger().info({ - category: 'Gustav', - message: 'Event observer stopped', - }); - } -}; diff --git a/examples/gustav/src/shared/infrastructure/http/router.ts b/examples/gustav/src/shared/infrastructure/http/router.ts deleted file mode 100644 index 382c357..0000000 --- a/examples/gustav/src/shared/infrastructure/http/router.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { getLogger } from '@nimbus/core'; -import { NimbusOakRouter } from '@nimbus/oak'; -import { recipeRouter } from '../../../contexts/recipe/infrastructure/http/router.ts'; -import { mongoManager } from '../mongodb.ts'; - -export const router = new NimbusOakRouter(); - -router.get('/health', async (ctx) => { - const logger = getLogger(); - const now = new Date().toISOString(); - - const mongoHealth = await mongoManager.healthCheck(); - - logger.info({ - message: 'Health check', - data: { - time: now, - database: { ...mongoHealth }, - ...(ctx.state.correlationId - ? { correlationId: ctx.state.correlationId } - : {}), - ...(ctx.state.authContext - ? { authContext: ctx.state.authContext } - : {}), - }, - }); - - ctx.response.body = { - status: mongoHealth.status === 'healthy' ? 'OK' : 'ERROR', - http: { - status: 'healthy', - }, - database: { ...mongoHealth }, - ...(ctx.state.correlationId - ? { correlationId: ctx.state.correlationId } - : {}), - time: now, - }; -}); - -router.use( - '/recipes', - recipeRouter.routes(), - recipeRouter.allowedMethods(), -); diff --git a/examples/gustav/src/shared/infrastructure/http/schemas.ts b/examples/gustav/src/shared/infrastructure/http/schemas.ts deleted file mode 100644 index acf2835..0000000 --- a/examples/gustav/src/shared/infrastructure/http/schemas.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { getValidator } from '@nimbus/core'; -import { addRecipeCommandSchema } from '../../../contexts/recipe/infrastructure/http/schemas/addRecipeCommandSchema.ts'; -import { deleteRecipeCommandSchema } from '../../../contexts/recipe/infrastructure/http/schemas/deleteRecipeCommandSchema.ts'; -import { getRecipeQuerySchema } from '../../../contexts/recipe/infrastructure/http/schemas/getRecipeQuerySchema.ts'; -import { listRecipesQuerySchema } from '../../../contexts/recipe/infrastructure/http/schemas/listRecipesQuerySchema.ts'; -import { updateRecipeCommandSchema } from '../../../contexts/recipe/infrastructure/http/schemas/updateRecipeCommandSchema.ts'; - -/** - * Register all JSON schemas with the Nimbus validator. - * - * This should be called during application startup, before any routes are accessed. - * The schemas are used to validate message payloads when dataschema is present. - */ -export function registerSchemas(): void { - const validator = getValidator(); - - validator.addSchema(addRecipeCommandSchema); - validator.addSchema(updateRecipeCommandSchema); - validator.addSchema(deleteRecipeCommandSchema); - validator.addSchema(getRecipeQuerySchema); - validator.addSchema(listRecipesQuerySchema); - - // TODO: Add more schemas as you develop more commands/queries -} diff --git a/examples/gustav/src/shared/infrastructure/mongodb.ts b/examples/gustav/src/shared/infrastructure/mongodb.ts deleted file mode 100644 index eed76b8..0000000 --- a/examples/gustav/src/shared/infrastructure/mongodb.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { MongoConnectionManager } from '@nimbus/mongodb'; -import { ServerApiVersion } from 'mongodb'; -import process from 'node:process'; - -export let mongoManager: MongoConnectionManager; - -export const initMongoConnectionManager = () => { - mongoManager = MongoConnectionManager.getInstance( - process.env['MONGO_URL'] ?? '', - { - connectionTimeout: 1000 * 60 * 5, - mongoClientOptions: { - appName: 'gustav', - serverApi: { - version: ServerApiVersion.v1, - strict: false, - deprecationErrors: true, - }, - maxPoolSize: 10, - minPoolSize: 0, - maxIdleTimeMS: 1000 * 60 * 1, // 1 minutes idle timeout - connectTimeoutMS: 1000 * 15, // 15 seconds connection timeout - socketTimeoutMS: 1000 * 30, // 30 seconds socket timeout - }, - }, - ); - - // Check to see if the MongoDB connection can be cleaned up - // This is to prevent the MongoDB connection from being left open for too long - setInterval(() => { - mongoManager.cleanup().catch(console.error); - }, 1000 * 60); // Check every minute -}; diff --git a/examples/hono-demo/deno.json b/examples/hono-demo/deno.json index b353100..85ceb24 100644 --- a/examples/hono-demo/deno.json +++ b/examples/hono-demo/deno.json @@ -28,6 +28,8 @@ }, "imports": { "@std/dotenv": "jsr:@std/dotenv@^0.225.6", - "hono": "npm:hono@^4.11.4" + "@std/ulid": "jsr:@std/ulid@^1.0.0", + "hono": "npm:hono@^4.11.4", + "zod": "npm:zod@^4.3.5" } } diff --git a/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts b/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts new file mode 100644 index 0000000..564fde4 --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts @@ -0,0 +1,38 @@ +import { z } from 'zod'; +import { commandSchema, InvalidInputException } from '@nimbus/core'; +import { UserState } from '../domain/user.ts'; + +export const ADD_USER_COMMAND_TYPE = 'at.overlap.nimbus.add-user'; + +export const addUserInputSchema = z.object({ + email: z.email(), + firstName: z.string(), + lastName: z.string(), +}); + +export const addUserCommandSchema = commandSchema.extend({ + type: z.literal(ADD_USER_COMMAND_TYPE), + data: addUserInputSchema, +}); +export type AddUserCommand = z.infer; + +export const addUser = ( + state: UserState, + command: AddUserCommand, +): UserState => { + // Always make sure to cast all user emails to lowercase + const email = command.data.email.toLowerCase(); + + if (state && state.email === email) { + throw new InvalidInputException('User with this email already exists'); + } + + return { + id: crypto.randomUUID(), + email: email, + firstName: command.data.firstName, + lastName: command.data.lastName, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + }; +}; diff --git a/examples/hono-demo/src/iam/users/core/domain/user.ts b/examples/hono-demo/src/iam/users/core/domain/user.ts new file mode 100644 index 0000000..03c0560 --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/domain/user.ts @@ -0,0 +1,10 @@ +export type User = { + id: string; + email: string; + firstName: string; + lastName: string; + createdAt: string; + updatedAt: string; +}; + +export type UserState = User | null; diff --git a/examples/hono-demo/src/iam/users/core/queries/getUser.query.ts b/examples/hono-demo/src/iam/users/core/queries/getUser.query.ts new file mode 100644 index 0000000..e69de29 diff --git a/examples/hono-demo/src/iam/users/shell/http/router.ts b/examples/hono-demo/src/iam/users/shell/http/router.ts new file mode 100644 index 0000000..f284c1c --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/http/router.ts @@ -0,0 +1,31 @@ +import { createCommand } from '@nimbus/core'; +import { getCorrelationId } from '@nimbus/hono'; +import { Hono } from 'hono'; +import { messageRouter } from '../../../../shared/shell/messageRouter.ts'; +import { + ADD_USER_COMMAND_TYPE, + AddUserCommand, +} from '../../core/commands/addUser.command.ts'; + +const usersRouter = new Hono(); + +usersRouter.post( + '/add-user', + async (c) => { + const body = await c.req.json(); + const correlationId = getCorrelationId(c); + + const command = createCommand({ + type: ADD_USER_COMMAND_TYPE, + source: 'nimbus.overlap.at', + correlationid: correlationId, + data: body, + }); + + const result = await messageRouter.route(command); + + return c.json(result); + }, +); + +export default usersRouter; diff --git a/examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts b/examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts new file mode 100644 index 0000000..ff4cbe7 --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts @@ -0,0 +1,13 @@ +import { + addUser, + AddUserCommand, +} from '../../core/commands/addUser.command.ts'; +import { UserState } from '../../core/domain/user.ts'; + +export const addUserCommandHandler = async (command: AddUserCommand) => { + let state: UserState = await Promise.resolve(null); + + state = addUser(state, command); + + return state; +}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts new file mode 100644 index 0000000..e48a3b0 --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts @@ -0,0 +1,14 @@ +import { messageRouter } from '../../../../shared/shell/messageRouter.ts'; +import { + ADD_USER_COMMAND_TYPE, + addUserCommandSchema, +} from '../../core/commands/addUser.command.ts'; +import { addUserCommandHandler } from './addUser.command.ts'; + +export const registerUserMessages = () => { + messageRouter.register( + ADD_USER_COMMAND_TYPE, + addUserCommandHandler, + addUserCommandSchema, + ); +}; diff --git a/examples/hono-demo/src/main.ts b/examples/hono-demo/src/main.ts index 38eaf19..c105b95 100644 --- a/examples/hono-demo/src/main.ts +++ b/examples/hono-demo/src/main.ts @@ -7,11 +7,8 @@ import { prettyLogFormatter, setupLogger, } from '@nimbus/core'; -import { correlationId, logger } from '@nimbus/hono'; -import { Hono } from 'hono'; -import { cors } from 'hono/cors'; -import { secureHeaders } from 'hono/secure-headers'; -import { compress } from 'hono/compress'; +import { initMessages } from './shared/shell/messageRouter.ts'; +import { app } from './shared/shell/http.ts'; setupLogger({ logLevel: parseLogLevel(process.env.LOG_LEVEL), @@ -21,29 +18,13 @@ setupLogger({ useConsoleColors: process.env.LOG_FORMAT === 'pretty', }); -const app = new Hono(); - -app.use(correlationId()); - -app.use(logger({ - enableTracing: true, - tracerName: 'api', -})); - -app.use(cors()); - -app.use(secureHeaders()); - -app.use(compress()); - -app.get('/health', (c) => { - return c.json({ status: 'ok' }); -}); +initMessages(); if (process.env.PORT) { const port = parseInt(process.env.PORT); - Deno.serve({ port }, app.fetch); + Deno.serve({ hostname: '0.0.0.0', port }, app.fetch); + getLogger().info({ category: 'API', message: `Started application on port ${port}`, diff --git a/examples/hono-demo/src/shared/shell/http.ts b/examples/hono-demo/src/shared/shell/http.ts new file mode 100644 index 0000000..b418e76 --- /dev/null +++ b/examples/hono-demo/src/shared/shell/http.ts @@ -0,0 +1,31 @@ +import { correlationId, handleError, logger } from '@nimbus/hono'; +import { Hono } from 'hono'; +import { cors } from 'hono/cors'; +import { secureHeaders } from 'hono/secure-headers'; +import { compress } from 'hono/compress'; +import usersRouter from '../../iam/users/shell/http/router.ts'; + +export const app = new Hono(); + +// TODO: document correlationId middleware +app.use(correlationId()); + +// TODO: document logger middleware and observability in general +app.use(logger({ + enableTracing: true, + tracerName: 'api', +})); + +app.use(cors()); + +app.use(secureHeaders()); + +app.use(compress()); + +app.get('/health', (c) => { + return c.json({ status: 'ok' }); +}); + +app.route('/iam/users', usersRouter); + +app.onError(handleError); diff --git a/examples/hono-demo/src/shared/shell/messageRouter.ts b/examples/hono-demo/src/shared/shell/messageRouter.ts new file mode 100644 index 0000000..714a02f --- /dev/null +++ b/examples/hono-demo/src/shared/shell/messageRouter.ts @@ -0,0 +1,29 @@ +import { getLogger, MessageRouter } from '@nimbus/core'; +import { registerUserMessages } from '../../iam/users/shell/messages/registerUserMessages.ts'; + +export const messageRouter = new MessageRouter({ + logInput: (input) => { + getLogger().debug({ + category: 'MessageRouter', + message: 'Received input', + data: { input }, + ...(input?.correlationid + ? { correlationId: input.correlationid } + : {}), + }); + }, + logOutput: (output) => { + getLogger().debug({ + category: 'MessageRouter', + message: 'Output', + data: { output }, + ...(output?.correlationid + ? { correlationId: output.correlationid } + : {}), + }); + }, +}); + +export const initMessages = () => { + registerUserMessages(); +}; diff --git a/examples/the-expense/README.md b/examples/the-expense/README.md deleted file mode 100644 index 98c6f13..0000000 --- a/examples/the-expense/README.md +++ /dev/null @@ -1,16 +0,0 @@ -Nimbus - -# The Expense - A Nimbus Demo App - -We build a little app to track our regular expenses. - -This application is NOT production ready, it contains some unsecure code. It is just a simple example to show how to build an application with Nimbus. - -## Start App - -``` -deno task dev -``` diff --git a/examples/the-expense/deno.json b/examples/the-expense/deno.json deleted file mode 100644 index 3ec6640..0000000 --- a/examples/the-expense/deno.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "tasks": { - "dev": "deno run -A --watch src/main.ts", - "dev:otel": "sh start-with-otel.sh", - "test": "deno test -A", - "database:seed": "deno run -A src/seedCollections.ts" - }, - "lint": { - "include": ["src/"], - "exclude": [], - "rules": { - "tags": ["recommended"], - "include": [], - "exclude": ["no-explicit-any"] - } - }, - "fmt": { - "include": ["src/"], - "exclude": ["src/shared/mailing/core/*.html"], - "useTabs": false, - "lineWidth": 80, - "indentWidth": 4, - "semiColons": true, - "singleQuote": true, - "proseWrap": "always" - }, - "test": { - "include": ["src/"] - }, - "imports": { - "@oak/oak": "jsr:@oak/oak@^17.1.4", - "@std/ulid": "jsr:@std/ulid@^1.0.0", - "@tajpouria/cors": "jsr:@tajpouria/cors@^1.2.1", - "mongodb": "npm:mongodb@^6.12.0", - "zod": "npm:zod@^3.24.1" - } -} diff --git a/examples/the-expense/src/account/core/account.type.ts b/examples/the-expense/src/account/core/account.type.ts deleted file mode 100644 index 126a871..0000000 --- a/examples/the-expense/src/account/core/account.type.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { z } from 'zod'; - -export const AccountStatus = z.enum(['active', 'frozen']); -export type AccountStatus = z.infer; - -export const Account = z.object({ - _id: z.string().length(24), - name: z.string(), - status: AccountStatus, -}); -export type Account = z.infer; diff --git a/examples/the-expense/src/account/core/commands/addAccount.ts b/examples/the-expense/src/account/core/commands/addAccount.ts deleted file mode 100644 index 146852c..0000000 --- a/examples/the-expense/src/account/core/commands/addAccount.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { AuthContext, Command, InvalidInputException } from '@nimbus/core'; -import { ObjectId } from 'mongodb'; -import { z } from 'zod'; -import { Account } from '../account.type.ts'; - -export const AddAccountData = z.object({ - name: z.string(), -}); -export type AddAccountData = z.infer; - -export const AddAccountCommand = Command( - z.literal('account.add'), - AddAccountData, - AuthContext, -); -export type AddAccountCommand = z.infer; - -export const addAccount = ( - data: AddAccountData, - authContext?: AuthContext, -): Account => { - if (!authContext) { - throw new InvalidInputException(); - } - - return { - _id: new ObjectId().toString(), - name: data.name, - status: 'active', - }; -}; diff --git a/examples/the-expense/src/account/core/commands/deleteAccount.ts b/examples/the-expense/src/account/core/commands/deleteAccount.ts deleted file mode 100644 index 75ada58..0000000 --- a/examples/the-expense/src/account/core/commands/deleteAccount.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { AuthContext, Command, InvalidInputException } from '@nimbus/core'; -import { z } from 'zod'; -import { Account } from '../account.type.ts'; - -export const DeleteAccountData = z.object({ - _id: z.string().length(24), -}); -export type DeleteAccountData = z.infer; - -export const DeleteAccountCommand = Command( - z.literal('account.delete'), - DeleteAccountData, - AuthContext, -); -export type DeleteAccountCommand = z.infer; - -export const deleteAccount = ( - account: Account, - authContext?: AuthContext, -): Account => { - if (!authContext) { - throw new InvalidInputException(); - } - - return account; -}; diff --git a/examples/the-expense/src/account/core/events/accountAdded.ts b/examples/the-expense/src/account/core/events/accountAdded.ts deleted file mode 100644 index 187121f..0000000 --- a/examples/the-expense/src/account/core/events/accountAdded.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Event } from '@nimbus/core'; -import { z } from 'zod'; -import { Account } from '../account.type.ts'; - -export const AccountAddedData = z.object({ - account: Account, -}); -export type AccountAddedData = z.infer; - -export const AccountAddedEvent = Event( - z.literal('account.added'), - AccountAddedData, -); -export type AccountAddedEvent = z.infer; diff --git a/examples/the-expense/src/account/core/queries/getAccount.ts b/examples/the-expense/src/account/core/queries/getAccount.ts deleted file mode 100644 index c2262a2..0000000 --- a/examples/the-expense/src/account/core/queries/getAccount.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { AuthContext, InvalidInputException, Query } from '@nimbus/core'; -import { z } from 'zod'; -import { Account } from '../account.type.ts'; - -export const GetAccountQuery = Query( - z.literal('account.get'), - z.object({ - id: z.string().length(24), - }), - AuthContext, -); -export type GetAccountQuery = z.infer; - -export const getAccount = ( - data: Account, - authContext?: AuthContext, -): Account => { - if (!authContext) { - throw new InvalidInputException(); - } - - return data; -}; diff --git a/examples/the-expense/src/account/core/queries/listAccounts.ts b/examples/the-expense/src/account/core/queries/listAccounts.ts deleted file mode 100644 index c7dd9cf..0000000 --- a/examples/the-expense/src/account/core/queries/listAccounts.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { AuthContext, InvalidInputException, Query } from '@nimbus/core'; -import { z } from 'zod'; -import { Account } from '../account.type.ts'; - -export const ListAccountsQuery = Query( - z.literal('account.list'), - z.object({ - limit: z.string().optional(), - skip: z.string().optional(), - filter: z.string().optional(), - sortBy: z.string().optional(), - sortDir: z.enum(['asc', 'desc']).optional(), - }), - AuthContext, -); -export type ListAccountsQuery = z.infer; - -export const listAccounts = ( - data: Account[], - authContext?: AuthContext, -): Account[] => { - if (!authContext) { - throw new InvalidInputException(); - } - - return data; -}; diff --git a/examples/the-expense/src/account/shell/account.collection.ts b/examples/the-expense/src/account/shell/account.collection.ts deleted file mode 100644 index 6d9411c..0000000 --- a/examples/the-expense/src/account/shell/account.collection.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { MongoCollectionDefinition } from '@nimbus/mongodb'; - -export const ACCOUNT_COLLECTION: MongoCollectionDefinition = { - name: 'accounts', - options: { - validator: { - $jsonSchema: { - bsonType: 'object', - required: [ - 'name', - 'status', - ], - properties: { - name: { - bsonType: 'string', - }, - status: { - bsonType: 'string', - enum: ['active', 'archived'], - }, - }, - }, - }, - }, - indexes: [ - { key: { name: 1 }, unique: true }, - { key: { status: 1 } }, - ], -}; diff --git a/examples/the-expense/src/account/shell/account.eventBus.ts b/examples/the-expense/src/account/shell/account.eventBus.ts deleted file mode 100644 index 8c3a2a7..0000000 --- a/examples/the-expense/src/account/shell/account.eventBus.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { RouteHandlerMap } from '@nimbus/core'; -import { AccountAddedEvent } from '../core/events/accountAdded.ts'; -import { accountAddedHandler } from './events/accountAdded.handler.ts'; - -export const accountEventSubscriptions: RouteHandlerMap = { - 'account.added': { - handler: accountAddedHandler, - inputType: AccountAddedEvent, - }, -}; diff --git a/examples/the-expense/src/account/shell/account.repository.ts b/examples/the-expense/src/account/shell/account.repository.ts deleted file mode 100644 index 6e31438..0000000 --- a/examples/the-expense/src/account/shell/account.repository.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { MongoDBRepository } from '@nimbus/mongodb'; -import { getEnv } from '@nimbus/utils'; -import { Document, ObjectId } from 'mongodb'; -import { mongoManager } from '../../mongodb.ts'; -import { Account } from '../core/account.type.ts'; -import { ACCOUNT_COLLECTION } from './account.collection.ts'; - -class AccountRepository extends MongoDBRepository { - constructor() { - const env = getEnv({ variables: ['MONGO_DB'] }); - - super( - () => { - return mongoManager.getCollection( - env.MONGO_DB, - ACCOUNT_COLLECTION.name, - ); - }, - Account, - 'Account', - ); - } - - override _mapDocumentToEntity(doc: Document): Account { - return Account.parse({ - _id: doc._id.toString(), - name: doc.name, - status: doc.status, - }); - } - - override _mapEntityToDocument(client: Account): Document { - return { - _id: new ObjectId(client._id), - name: client.name, - status: client.status, - }; - } -} - -export const accountRepository = new AccountRepository(); diff --git a/examples/the-expense/src/account/shell/account.router.ts b/examples/the-expense/src/account/shell/account.router.ts deleted file mode 100644 index e69d44b..0000000 --- a/examples/the-expense/src/account/shell/account.router.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { NimbusOakRouter } from '@nimbus/oak'; -import { AddAccountCommand } from '../core/commands/addAccount.ts'; -import { DeleteAccountCommand } from '../core/commands/deleteAccount.ts'; -import { GetAccountQuery } from '../core/queries/getAccount.ts'; -import { ListAccountsQuery } from '../core/queries/listAccounts.ts'; -import { addAccountHandler } from './commands/addAccount.handler.ts'; -import { deleteAccountHandler } from './commands/deleteAccount.handler.ts'; -import { getAccountHandler } from './queries/getAccount.handler.ts'; -import { listAccountsHandler } from './queries/listAccounts.handler.ts'; - -export const accountRouter = new NimbusOakRouter(); - -accountRouter.query( - '/', - 'account.list', - ListAccountsQuery, - listAccountsHandler, -); - -accountRouter.query( - '/:id', - 'account.get', - GetAccountQuery, - getAccountHandler, -); - -accountRouter.command( - '/add-account', - 'account.add', - AddAccountCommand, - addAccountHandler, -); - -accountRouter.command( - '/delete-account', - 'account.delete', - DeleteAccountCommand, - deleteAccountHandler, -); diff --git a/examples/the-expense/src/account/shell/commands/addAccount.handler.ts b/examples/the-expense/src/account/shell/commands/addAccount.handler.ts deleted file mode 100644 index 1d743b8..0000000 --- a/examples/the-expense/src/account/shell/commands/addAccount.handler.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { InvalidInputException, type RouteHandler } from '@nimbus/core'; -import { ulid } from '@std/ulid'; -import { eventBus } from '../../../eventBus.ts'; -import { Account } from '../../core/account.type.ts'; -import { - addAccount, - AddAccountCommand, -} from '../../core/commands/addAccount.ts'; -import { AccountAddedEvent } from '../../core/events/accountAdded.ts'; -import { accountRepository } from '../account.repository.ts'; - -export const addAccountHandler: RouteHandler = async ( - command: AddAccountCommand, -) => { - let account = addAccount( - command.data.payload, - command.data.authContext, - ); - - try { - account = await accountRepository.insertOne({ item: account }); - } catch (error: any) { - if (error.message.startsWith('E11000')) { - throw new InvalidInputException( - 'Account already exists', - { - errorCode: 'ACCOUNT_ALREADY_EXISTS', - reason: 'An account with the same name already exists', - }, - ); - } - - throw error; - } - - eventBus.putEvent({ - specversion: '1.0', - id: ulid(), - source: command.source, - type: 'account.added', - data: { - correlationId: command.data.correlationId, - payload: { - account: account, - }, - }, - }); - - return { - statusCode: 200, - data: account, - }; -}; diff --git a/examples/the-expense/src/account/shell/commands/deleteAccount.handler.ts b/examples/the-expense/src/account/shell/commands/deleteAccount.handler.ts deleted file mode 100644 index 6fa0e4b..0000000 --- a/examples/the-expense/src/account/shell/commands/deleteAccount.handler.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { type RouteHandler } from '@nimbus/core'; -import { ObjectId } from 'mongodb'; -import { - deleteAccount, - DeleteAccountCommand, -} from '../../core/commands/deleteAccount.ts'; -import { accountRepository } from '../account.repository.ts'; - -export const deleteAccountHandler: RouteHandler = async ( - command: DeleteAccountCommand, -) => { - let account = await accountRepository.findOne({ - filter: { - _id: new ObjectId(command.data.payload._id), - }, - }); - - account = deleteAccount(account, command.data.authContext); - - await accountRepository.deleteOne({ item: account }); - - return { - statusCode: 204, - }; -}; diff --git a/examples/the-expense/src/account/shell/events/accountAdded.handler.ts b/examples/the-expense/src/account/shell/events/accountAdded.handler.ts deleted file mode 100644 index bab3e39..0000000 --- a/examples/the-expense/src/account/shell/events/accountAdded.handler.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { getLogger, RouteHandler } from '@nimbus/core'; -import { - AccountAddedData, - AccountAddedEvent, -} from '../../core/events/accountAdded.ts'; - -export const accountAddedHandler: RouteHandler< - AccountAddedEvent, - AccountAddedData -> = async ( - event, -) => { - await new Promise((resolve) => setTimeout(resolve, 1000)); - - getLogger().info({ - message: `New account was added: ${event.data.payload.account.name}`, - }); - - return { - statusCode: 200, - data: event.data.payload, - }; -}; diff --git a/examples/the-expense/src/account/shell/queries/getAccount.handler.ts b/examples/the-expense/src/account/shell/queries/getAccount.handler.ts deleted file mode 100644 index d5a8eee..0000000 --- a/examples/the-expense/src/account/shell/queries/getAccount.handler.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { RouteHandler } from '@nimbus/core'; -import { ObjectId } from 'mongodb'; -import { Account } from '../../core/account.type.ts'; -import { getAccount, GetAccountQuery } from '../../core/queries/getAccount.ts'; -import { accountRepository } from '../account.repository.ts'; - -export const getAccountHandler: RouteHandler< - GetAccountQuery, - Account -> = async (query) => { - let account = await accountRepository.findOne({ - filter: { _id: new ObjectId(query.data.payload.id) }, - }); - - account = getAccount(account, query.data.authContext); - - return { - statusCode: 200, - data: account, - }; -}; diff --git a/examples/the-expense/src/account/shell/queries/listAccounts.handler.ts b/examples/the-expense/src/account/shell/queries/listAccounts.handler.ts deleted file mode 100644 index 34cb18f..0000000 --- a/examples/the-expense/src/account/shell/queries/listAccounts.handler.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { RouteHandler } from '@nimbus/core'; -import { MongoJSON } from '@nimbus/mongodb'; -import type { WithPagination } from '../../../shared/withPagination.type.ts'; -import { Account } from '../../core/account.type.ts'; -import { - listAccounts, - ListAccountsQuery, -} from '../../core/queries/listAccounts.ts'; -import { accountRepository } from '../account.repository.ts'; - -export const listAccountsHandler: RouteHandler< - ListAccountsQuery, - WithPagination -> = async (query) => { - const params = query.data.payload; - const limit = parseInt(params.limit ?? '24'); - const skip = parseInt(params.skip ?? '0'); - const filter = MongoJSON.parse(params.filter ?? '{}'); - - let [accounts, total] = await Promise.all([ - accountRepository.find({ - filter, - limit, - skip, - sort: { - [params.sortBy ?? 'createdAt']: params.sortDir ?? - 'asc', - }, - }), - - accountRepository.countDocuments({ - filter, - }), - ]); - - accounts = listAccounts(accounts, query.data.authContext); - - return { - statusCode: 200, - data: { - limit, - skip, - total, - items: accounts, - }, - }; -}; diff --git a/examples/the-expense/src/auth/shell/auth.middleware.ts b/examples/the-expense/src/auth/shell/auth.middleware.ts deleted file mode 100644 index fa8bcb8..0000000 --- a/examples/the-expense/src/auth/shell/auth.middleware.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { AuthContext, getLogger } from '@nimbus/core'; -import type { Context } from '@oak/oak/context'; -import type { Next } from '@oak/oak/middleware'; - -/** - * ! NOT FOR PRODUCTION USE - * - * This is just a simple example of how to implement a middleware for authentication. - */ -export const exampleAuthMiddleware = async ( - ctx: Context, - next: Next, -) => { - const authorization = ctx.request.headers.get('authorization'); - - if (!authorization) { - const anonymousAuthContext: AuthContext = { - sub: 'anonymous', - groups: [], - }; - - ctx.state.authContext = anonymousAuthContext; - - await next(); - } else { - try { - const token = authorization?.replace('Bearer ', ''); - - if (token === 'very-special-secret') { - const adminAuthContext: AuthContext = { - sub: '02e50464-b051-70fa-25ef-63038890d80c', - groups: ['admin'], - }; - - ctx.state.authContext = adminAuthContext; - } else { - throw new Error('Invalid token'); - } - - await next(); - } catch (error: any) { - getLogger().error({ - message: 'Failed to authenticate user', - error, - }); - - ctx.response.status = 401; - ctx.response.body = { - message: 'Unauthorized', - }; - } - } -}; diff --git a/examples/the-expense/src/eventBus.ts b/examples/the-expense/src/eventBus.ts deleted file mode 100644 index d24f6a2..0000000 --- a/examples/the-expense/src/eventBus.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { NimbusEventBus, RouteHandlerMap } from '@nimbus/core'; -import { accountEventSubscriptions } from './account/shell/account.eventBus.ts'; - -export const eventBus = new NimbusEventBus({ - maxRetries: 3, -}); - -export const initEventBusSubscriptions = () => { - const subscriptions: Record = { - account: accountEventSubscriptions, - }; - - for (const [, handlerMap] of Object.entries(subscriptions)) { - for (const eventName of Object.keys(handlerMap)) { - eventBus.subscribeEvent( - eventName, - handlerMap[eventName].inputType, - handlerMap[eventName].handler, - ); - } - } -}; diff --git a/examples/the-expense/src/main.ts b/examples/the-expense/src/main.ts deleted file mode 100644 index fef2fd8..0000000 --- a/examples/the-expense/src/main.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { - jsonLogFormatter, - parseLogLevel, - prettyLogFormatter, - setupLogger, -} from '@nimbus/core'; -import { requestCorrelationId } from '@nimbus/oak'; -import { Application } from '@oak/oak/application'; -import { oakCors } from '@tajpouria/cors'; -import 'jsr:@std/dotenv/load'; -import process from 'node:process'; -import { exampleAuthMiddleware } from './auth/shell/auth.middleware.ts'; -import { initEventBusSubscriptions } from './eventBus.ts'; -import { initMongoConnectionManager } from './mongodb.ts'; -import { router } from './router.ts'; - -// -// Setup logging with basic options provided by Nimbus -// -// See https://nimbus.overlap.at/guide/logging.html for more information about logging of Nimbus. -// -setupLogger({ - logLevel: parseLogLevel(process.env.LOG_LEVEL), - formatter: process.env.LOG_FORMAT === 'pretty' - ? prettyLogFormatter - : jsonLogFormatter, - useConsoleColors: process.env.LOG_FORMAT === 'pretty', -}); - -// Initialize MongoDB Manager -initMongoConnectionManager(); - -// Initialize Event Bus Subscriptions -initEventBusSubscriptions(); - -// Oak HTTP Server APP -const app = new Application(); - -app.addEventListener('listen', ({ hostname, port, secure }) => { - console.log( - `Listening on: ${secure ? 'https://' : 'http://'}${ - hostname ?? 'localhost' - }:${port}`, - ); -}); - -// CORS Middleware -app.use(oakCors()); - -// Correlation ID Middleware -app.use(requestCorrelationId); - -// Auth Middleware -app.use(exampleAuthMiddleware); - -// API Routes -app.use(router.routes()); -app.use(router.allowedMethods()); - -// Get the server started -app.listen({ port: 3100 }); diff --git a/examples/the-expense/src/mongodb.ts b/examples/the-expense/src/mongodb.ts deleted file mode 100644 index 5f81ced..0000000 --- a/examples/the-expense/src/mongodb.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { MongoConnectionManager } from '@nimbus/mongodb'; -import { ServerApiVersion } from 'mongodb'; -import process from 'node:process'; - -export let mongoManager: MongoConnectionManager; - -export const initMongoConnectionManager = () => { - mongoManager = MongoConnectionManager.getInstance( - process.env['MONGO_URL'] ?? '', - { - connectionTimeout: 1000 * 60 * 5, - mongoClientOptions: { - appName: 'the-expanse', - serverApi: { - version: ServerApiVersion.v1, - strict: false, - deprecationErrors: true, - }, - maxPoolSize: 10, - minPoolSize: 0, - maxIdleTimeMS: 1000 * 60 * 1, // 1 minutes idle timeout - connectTimeoutMS: 1000 * 15, // 15 seconds connection timeout - socketTimeoutMS: 1000 * 30, // 30 seconds socket timeout - }, - }, - ); - - // Check to see if the MongoDB connection can be cleaned up - // This is to prevent the MongoDB connection from being left open for too long - setInterval(() => { - mongoManager.cleanup().catch(console.error); - }, 1000 * 60); // Check every minute -}; diff --git a/examples/the-expense/src/router.ts b/examples/the-expense/src/router.ts deleted file mode 100644 index fb1c189..0000000 --- a/examples/the-expense/src/router.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { getLogger } from '@nimbus/core'; -import { NimbusOakRouter } from '@nimbus/oak'; -import { accountRouter } from './account/shell/account.router.ts'; -import { mongoManager } from './mongodb.ts'; - -export const router = new NimbusOakRouter(); - -router.get('/health', async (ctx) => { - const logger = getLogger(); - const now = new Date().toISOString(); - - const mongoHealth = await mongoManager.healthCheck(); - - logger.info({ - message: 'Health check', - data: { - time: now, - database: { ...mongoHealth }, - ...(ctx.state.correlationId - ? { correlationId: ctx.state.correlationId } - : {}), - ...(ctx.state.authContext - ? { authContext: ctx.state.authContext } - : {}), - }, - }); - - ctx.response.body = { - status: mongoHealth.status === 'healthy' ? 'OK' : 'ERROR', - http: { - status: 'healthy', - }, - database: { ...mongoHealth }, - ...(ctx.state.correlationId - ? { correlationId: ctx.state.correlationId } - : {}), - time: now, - }; -}); - -router.use( - '/accounts', - accountRouter.routes(), - accountRouter.allowedMethods(), -); diff --git a/examples/the-expense/src/seedCollections.ts b/examples/the-expense/src/seedCollections.ts deleted file mode 100644 index 02eec2b..0000000 --- a/examples/the-expense/src/seedCollections.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { deployMongoCollection } from '@nimbus/mongodb'; -import 'jsr:@std/dotenv/load'; -import process from 'node:process'; -import { ACCOUNT_COLLECTION } from './account/shell/account.collection.ts'; -import { initMongoConnectionManager, mongoManager } from './mongodb.ts'; - -const { MONGO_DB } = process.env; - -try { - initMongoConnectionManager(); - - const mongoClient = await mongoManager.getClient(); - - const result = await Promise.allSettled([ - deployMongoCollection({ - mongoClient: mongoClient, - dbName: MONGO_DB ?? '', - collectionDefinition: ACCOUNT_COLLECTION, - allowUpdateIndexes: true, - }), - ]); - - console.log('\nDeployed collections', JSON.stringify(result, null, 2)); - process.exit(0); -} catch (error) { - console.error(error); - process.exit(1); -} diff --git a/examples/the-expense/src/shared/withPagination.type.ts b/examples/the-expense/src/shared/withPagination.type.ts deleted file mode 100644 index f069ae2..0000000 --- a/examples/the-expense/src/shared/withPagination.type.ts +++ /dev/null @@ -1,6 +0,0 @@ -export type WithPagination = { - limit: number; - skip: number; - total: number; - items: TItems[]; -}; diff --git a/examples/the-expense/start-with-otel.sh b/examples/the-expense/start-with-otel.sh deleted file mode 100644 index 6f42d19..0000000 --- a/examples/the-expense/start-with-otel.sh +++ /dev/null @@ -1,16 +0,0 @@ -export OTEL_DENO=true -export OTEL_EXPORTER_OTLP_PROTOCOL="http/protobuf" -export OTEL_EXPORTER_OTLP_ENDPOINT="https://otlp-gateway-prod-eu-west-2.grafana.net/otlp" - -# Read OTLP headers from secret file -if [ -f "./.otel_token" ]; then - export OTEL_EXPORTER_OTLP_HEADERS="Authorization=Basic $(cat ./.otel_token)" -else - echo "Error: .otel_token file not found." - exit 1 -fi - -export OTEL_SERVICE_NAME=nimbus-the-expense -export OTEL_RESOURCE_ATTRIBUTES=deployment.environment=development - -deno run --unstable-otel -A src/main.ts diff --git a/packages/client/README.md b/packages/client/README.md deleted file mode 100644 index 7146660..0000000 --- a/packages/client/README.md +++ /dev/null @@ -1,10 +0,0 @@ -Nimbus - -# Nimbus Client - -The client package of the Nimbus framework. - -Refer to the [Nimbus main repository](https://github.com/overlap-dev/Nimbus) or the [Nimbus documentation](https://nimbus.overlap.at) for more information about the Nimbus framework. diff --git a/packages/client/deno.json b/packages/client/deno.json deleted file mode 100644 index 5bc4e5e..0000000 --- a/packages/client/deno.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "@nimbus/client", - "version": "0.17.0", - "license": "MIT", - "author": "Daniel Gördes (https://overlap.at)", - "repository": { - "type": "git", - "url": "https://github.com/overlap-dev/Nimbus" - }, - "bugs": { - "url": "https://github.com/overlap-dev/Nimbus/issues" - }, - "homepage": "https://nimbus.overlap.at", - "exports": "./src/index.ts", - "fmt": { - "include": ["src/"], - "useTabs": false, - "lineWidth": 80, - "indentWidth": 4, - "semiColons": true, - "singleQuote": true, - "proseWrap": "always" - }, - "lint": { - "include": ["src/"], - "rules": { - "exclude": ["no-explicit-any", "no-slow-types"] - } - }, - "test": { - "include": ["src/"] - }, - "imports": {} -} diff --git a/packages/client/src/index.ts b/packages/client/src/index.ts deleted file mode 100644 index 949e442..0000000 --- a/packages/client/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./lib/sdk.ts"; diff --git a/packages/client/src/lib/sdk.ts b/packages/client/src/lib/sdk.ts deleted file mode 100644 index 2f3da34..0000000 --- a/packages/client/src/lib/sdk.ts +++ /dev/null @@ -1,6 +0,0 @@ -// TODO: implement some kind of SDK creator -// Support the messages handling like creating commands, events and queries. - -export const sdk = () => { - console.log("To be implemented"); -}; diff --git a/packages/core/deno.json b/packages/core/deno.json index b7c2116..ce38fa6 100644 --- a/packages/core/deno.json +++ b/packages/core/deno.json @@ -43,7 +43,6 @@ "@std/assert": "jsr:@std/assert@^1.0.10", "@std/fmt": "jsr:@std/fmt@^1.0.5", "@std/ulid": "jsr:@std/ulid@^1.0.0", - "ajv": "npm:ajv@^8.17.1", - "ajv-formats": "npm:ajv-formats@^3.0.1" + "zod": "npm:zod@^4.3.5" } } \ No newline at end of file diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 62af76b..68d53f4 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,6 +1,4 @@ -export * from './lib/auth/authContext.ts'; export * from './lib/event/eventBus.ts'; -export * from './lib/exception/concurrencyException.ts'; export * from './lib/exception/exception.ts'; export * from './lib/exception/forbiddenException.ts'; export * from './lib/exception/genericException.ts'; @@ -15,5 +13,4 @@ export * from './lib/message/command.ts'; export * from './lib/message/event.ts'; export * from './lib/message/message.ts'; export * from './lib/message/query.ts'; -export * from './lib/router/router.ts'; -export * from './lib/validator/validator.ts'; +export * from './lib/message/router.ts'; diff --git a/packages/core/src/lib/auth/authContext.ts b/packages/core/src/lib/auth/authContext.ts deleted file mode 100644 index 97025c2..0000000 --- a/packages/core/src/lib/auth/authContext.ts +++ /dev/null @@ -1,7 +0,0 @@ -/** - * Authentication context that can be passed through the application. - */ -export type AuthContext = { - sub: string; - groups: string[]; -}; diff --git a/packages/core/src/lib/event/eventBus.ts b/packages/core/src/lib/event/eventBus.ts index 346c0b9..b5bc7bc 100644 --- a/packages/core/src/lib/event/eventBus.ts +++ b/packages/core/src/lib/event/eventBus.ts @@ -2,8 +2,7 @@ import EventEmitter from 'node:events'; import { GenericException } from '../exception/genericException.ts'; import { getLogger } from '../log/logger.ts'; import type { Event } from '../message/event.ts'; -import type { MessageHandler } from '../router/router.ts'; -import { MessageRouter } from '../router/router.ts'; +import { type MessageHandler, MessageRouter } from '../message/router.ts'; /** * The input type for subscribing to an event. diff --git a/packages/core/src/lib/exception/concurrencyException.ts b/packages/core/src/lib/exception/concurrencyException.ts deleted file mode 100644 index a33a438..0000000 --- a/packages/core/src/lib/exception/concurrencyException.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { Exception } from './exception.ts'; - -/** - * Exception thrown when a concurrency conflict occurs. - * - * This typically happens in event sourcing when using optimistic concurrency control - * and another process has modified the aggregate between reading and writing. - */ -export class ConcurrencyException extends Exception { - constructor(message?: string, details?: Record) { - super( - 'CONCURRENCY_EXCEPTION', - message ?? 'Concurrency conflict detected', - details, - 409, - ); - } -} diff --git a/packages/core/src/lib/exception/exception.ts b/packages/core/src/lib/exception/exception.ts index 774d51a..757ee09 100644 --- a/packages/core/src/lib/exception/exception.ts +++ b/packages/core/src/lib/exception/exception.ts @@ -1,10 +1,7 @@ /** * Base exception */ -export class Exception { - public readonly name: string; - public message: string; - public stack?: string; +export class Exception extends Error { public details?: Record; public statusCode?: number; @@ -14,8 +11,8 @@ export class Exception { details?: Record, statusCode?: number, ) { + super(message); this.name = name; - this.message = message; if (details) { this.details = details; @@ -25,7 +22,10 @@ export class Exception { this.statusCode = statusCode; } - Error.captureStackTrace(this, this.constructor); + // Maintains proper stack trace in V8 environments + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } } public fromError(error: Error): Exception { diff --git a/packages/core/src/lib/exception/forbiddenException.ts b/packages/core/src/lib/exception/forbiddenException.ts index 1ce205e..71be59c 100644 --- a/packages/core/src/lib/exception/forbiddenException.ts +++ b/packages/core/src/lib/exception/forbiddenException.ts @@ -5,6 +5,6 @@ import { Exception } from './exception.ts'; */ export class ForbiddenException extends Exception { constructor(message?: string, details?: Record) { - super('FORBIDDEN_EXCEPTION', message ?? 'Forbidden', details, 403); + super('FORBIDDEN', message ?? 'Forbidden', details, 403); } } diff --git a/packages/core/src/lib/exception/genericException.test.ts b/packages/core/src/lib/exception/genericException.test.ts index c9cf4cc..96efa6d 100644 --- a/packages/core/src/lib/exception/genericException.test.ts +++ b/packages/core/src/lib/exception/genericException.test.ts @@ -5,8 +5,8 @@ Deno.test('GenericException without constructor input', () => { const exception = new GenericException(); assertInstanceOf(exception, GenericException); - assertEquals(exception.name, 'GENERIC_EXCEPTION'); - assertEquals(exception.message, 'An error occurred'); + assertEquals(exception.name, 'INTERNAL_SERVER_ERROR'); + assertEquals(exception.message, 'Internal server error'); assertEquals(exception.statusCode, 500); assertEquals(typeof exception.details, 'undefined'); assertEquals(typeof exception.stack, 'string'); @@ -21,7 +21,7 @@ Deno.test('GenericException with constructor input', () => { const exception = new GenericException(message, details); assertInstanceOf(exception, GenericException); - assertEquals(exception.name, 'GENERIC_EXCEPTION'); + assertEquals(exception.name, 'INTERNAL_SERVER_ERROR'); assertEquals(exception.message, message); assertEquals(exception.statusCode, 500); assertEquals(exception.details, details); @@ -34,7 +34,7 @@ Deno.test('GenericException from error without constructor input', () => { const exception = new GenericException().fromError(nativeError); assertInstanceOf(exception, GenericException); - assertEquals(exception.name, 'GENERIC_EXCEPTION'); + assertEquals(exception.name, 'INTERNAL_SERVER_ERROR'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 500); assertEquals(typeof exception.details, 'undefined'); @@ -54,7 +54,7 @@ Deno.test('GenericException from error with constructor input', () => { ).fromError(nativeError); assertInstanceOf(exception, GenericException); - assertEquals(exception.name, 'GENERIC_EXCEPTION'); + assertEquals(exception.name, 'INTERNAL_SERVER_ERROR'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 500); assertEquals(exception.details, details); diff --git a/packages/core/src/lib/exception/genericException.ts b/packages/core/src/lib/exception/genericException.ts index 6fc8340..48d7ebb 100644 --- a/packages/core/src/lib/exception/genericException.ts +++ b/packages/core/src/lib/exception/genericException.ts @@ -6,8 +6,8 @@ import { Exception } from './exception.ts'; export class GenericException extends Exception { constructor(message?: string, details?: Record) { super( - 'GENERIC_EXCEPTION', - message ?? 'An error occurred', + 'INTERNAL_SERVER_ERROR', + message ?? 'Internal server error', details, 500, ); diff --git a/packages/core/src/lib/exception/invalidInputException.test.ts b/packages/core/src/lib/exception/invalidInputException.test.ts index fba61b4..2c384d9 100644 --- a/packages/core/src/lib/exception/invalidInputException.test.ts +++ b/packages/core/src/lib/exception/invalidInputException.test.ts @@ -5,8 +5,8 @@ Deno.test('InvalidInputException without constructor input', () => { const exception = new InvalidInputException(); assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.name, 'INVALID_INPUT_EXCEPTION'); - assertEquals(exception.message, 'The provided input is invalid'); + assertEquals(exception.name, 'INVALID_INPUT'); + assertEquals(exception.message, 'Invalid input'); assertEquals(exception.statusCode, 400); assertEquals(typeof exception.details, 'undefined'); assertEquals(typeof exception.stack, 'string'); @@ -24,7 +24,7 @@ Deno.test('InvalidInputException with constructor input', () => { ); assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.name, 'INVALID_INPUT_EXCEPTION'); + assertEquals(exception.name, 'INVALID_INPUT'); assertEquals(exception.message, message); assertEquals(exception.statusCode, 400); assertEquals(exception.details, details); @@ -39,7 +39,7 @@ Deno.test('InvalidInputException from error without constructor input', () => { ); assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.name, 'INVALID_INPUT_EXCEPTION'); + assertEquals(exception.name, 'INVALID_INPUT'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 400); assertEquals(typeof exception.details, 'undefined'); @@ -59,7 +59,7 @@ Deno.test('InvalidInputException from error with constructor input', () => { ).fromError(nativeError); assertInstanceOf(exception, InvalidInputException); - assertEquals(exception.name, 'INVALID_INPUT_EXCEPTION'); + assertEquals(exception.name, 'INVALID_INPUT'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 400); assertEquals(exception.details, details); diff --git a/packages/core/src/lib/exception/invalidInputException.ts b/packages/core/src/lib/exception/invalidInputException.ts index cffd3a2..cffc29c 100644 --- a/packages/core/src/lib/exception/invalidInputException.ts +++ b/packages/core/src/lib/exception/invalidInputException.ts @@ -1,3 +1,4 @@ +import type { ZodError } from 'zod'; import { Exception } from './exception.ts'; /** @@ -6,8 +7,8 @@ import { Exception } from './exception.ts'; export class InvalidInputException extends Exception { constructor(message?: string, details?: Record) { super( - 'INVALID_INPUT_EXCEPTION', - message ?? 'The provided input is invalid', + 'INVALID_INPUT', + message ?? 'Invalid input', details, 400, ); @@ -23,7 +24,7 @@ export class InvalidInputException extends Exception { * * @deprecated We do not want to depend on Zod anymore. */ - public fromZodError(error: any): InvalidInputException { + public fromZodError(error: ZodError): InvalidInputException { if (error.stack) { this.stack = error.stack; } diff --git a/packages/core/src/lib/exception/notFoundException.test.ts b/packages/core/src/lib/exception/notFoundException.test.ts index f2fec2e..cf33374 100644 --- a/packages/core/src/lib/exception/notFoundException.test.ts +++ b/packages/core/src/lib/exception/notFoundException.test.ts @@ -5,7 +5,7 @@ Deno.test('NotFoundException without constructor input', () => { const exception = new NotFoundException(); assertInstanceOf(exception, NotFoundException); - assertEquals(exception.name, 'NOT_FOUND_EXCEPTION'); + assertEquals(exception.name, 'NOT_FOUND'); assertEquals(exception.message, 'Not found'); assertEquals(exception.statusCode, 404); assertEquals(typeof exception.details, 'undefined'); @@ -21,7 +21,7 @@ Deno.test('NotFoundException with constructor input', () => { const exception = new NotFoundException(message, details); assertInstanceOf(exception, NotFoundException); - assertEquals(exception.name, 'NOT_FOUND_EXCEPTION'); + assertEquals(exception.name, 'NOT_FOUND'); assertEquals(exception.message, message); assertEquals(exception.statusCode, 404); assertEquals(exception.details, details); @@ -34,7 +34,7 @@ Deno.test('NotFoundException from error without constructor input', () => { const exception = new NotFoundException().fromError(nativeError); assertInstanceOf(exception, NotFoundException); - assertEquals(exception.name, 'NOT_FOUND_EXCEPTION'); + assertEquals(exception.name, 'NOT_FOUND'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 404); assertEquals(typeof exception.details, 'undefined'); @@ -54,7 +54,7 @@ Deno.test('NotFoundException from error with constructor input', () => { ).fromError(nativeError); assertInstanceOf(exception, NotFoundException); - assertEquals(exception.name, 'NOT_FOUND_EXCEPTION'); + assertEquals(exception.name, 'NOT_FOUND'); assertEquals(exception.message, nativeError.message); assertEquals(exception.statusCode, 404); assertEquals(exception.details, details); diff --git a/packages/core/src/lib/exception/notFoundException.ts b/packages/core/src/lib/exception/notFoundException.ts index e86fd78..a293c2a 100644 --- a/packages/core/src/lib/exception/notFoundException.ts +++ b/packages/core/src/lib/exception/notFoundException.ts @@ -5,6 +5,6 @@ import { Exception } from './exception.ts'; */ export class NotFoundException extends Exception { constructor(message?: string, details?: Record) { - super('NOT_FOUND_EXCEPTION', message ?? 'Not found', details, 404); + super('NOT_FOUND', message ?? 'Not found', details, 404); } } diff --git a/packages/core/src/lib/exception/unauthorizedException.ts b/packages/core/src/lib/exception/unauthorizedException.ts index 7d6b1af..d729cd7 100644 --- a/packages/core/src/lib/exception/unauthorizedException.ts +++ b/packages/core/src/lib/exception/unauthorizedException.ts @@ -6,7 +6,7 @@ import { Exception } from './exception.ts'; export class UnauthorizedException extends Exception { constructor(message?: string, details?: Record) { super( - 'UNAUTHORIZED_EXCEPTION', + 'UNAUTHORIZED', message ?? 'Unauthorized', details, 401, diff --git a/packages/core/src/lib/message/command.ts b/packages/core/src/lib/message/command.ts index 78130f8..ff2ed55 100644 --- a/packages/core/src/lib/message/command.ts +++ b/packages/core/src/lib/message/command.ts @@ -1,3 +1,6 @@ +import { ulid } from '@std/ulid'; +import { z } from 'zod'; + /** * A command is a message that is sent to tell the system * to perform an action. Typically commands come in via an API @@ -49,75 +52,71 @@ export type Command = { dataschema?: string; }; -export const commandSchema = { - $id: 'https://nimbus.overlap.at/schemas/command/v1', - type: 'object', - required: [ - 'specversion', - 'id', - 'correlationid', - 'time', - 'source', - 'type', - 'data', - ], - properties: { - specversion: { - const: '1.0', - }, - id: { - type: 'string', - minLength: 1, - }, - correlationid: { - type: 'string', - minLength: 1, - }, - time: { - type: 'string', - format: 'date-time', - minLength: 1, - }, - source: { - type: 'string', - format: 'uri-reference', - minLength: 1, - }, - type: { - type: 'string', - minLength: 1, - }, - subject: { - type: 'string', - minLength: 1, - }, - data: { - anyOf: [ - { - type: 'object', - }, - { - type: 'string', - }, - { - type: 'number', - }, - { - type: 'array', - }, - { - type: 'boolean', - }, - ], - }, - datacontenttype: { - type: 'string', - minLength: 1, - }, - dataschema: { - type: 'string', - format: 'uri', - minLength: 1, - }, - }, +/** + * The Zod schema matching the Command type. + * + * Zod is the default for validating incomming messages. + * + * We do not infer the Command type from this schema because of + * slow type issues see https://jsr.io/docs/about-slow-types for more details. + */ +export const commandSchema = z.object({ + specversion: z.literal('1.0'), + id: z.string(), + correlationid: z.string(), + time: z.iso.datetime(), + source: z.string(), + type: z.string(), + subject: z.string().optional(), + data: z.union([ + z.record(z.string(), z.unknown()), + z.string(), + z.number(), + z.array(z.unknown()), + z.boolean(), + ]), + datacontenttype: z.string().optional(), + dataschema: z.url().optional(), +}); + +/** + * Input for creating a command. + */ +export type CreateCommandInput = Partial> & { + type: string; + source: string; + data: unknown; +}; + +/** + * Creates a command based on input data with the convenience + * to skip properties and use the defaults for the rest. + */ +export const createCommand = ( + { + id, + correlationid, + time, + source, + type, + subject, + data, + datacontenttype, + dataschema, + }: CreateCommandInput, +): TCommand => { + const command = { + specversion: '1.0', + id: id ?? ulid(), + correlationid: correlationid ?? ulid(), + time: time ?? new Date().toISOString(), + source, + type, + ...(subject && { subject }), + data, + datacontenttype: datacontenttype ?? 'application/json', + ...(dataschema && { dataschema }), + } as TCommand; + + return command; }; diff --git a/packages/core/src/lib/message/event.ts b/packages/core/src/lib/message/event.ts index b7a113c..d91feb3 100644 --- a/packages/core/src/lib/message/event.ts +++ b/packages/core/src/lib/message/event.ts @@ -1,3 +1,6 @@ +import { ulid } from '@std/ulid'; +import { z } from 'zod'; + /** * An event is a message that is emitted by the system to notify * subscribers that something has happened. Typically events are @@ -52,76 +55,71 @@ export type Event = { dataschema?: string; }; -export const eventSchema = { - $id: 'https://nimbus.overlap.at/schemas/event/v1', - type: 'object', - required: [ - 'specversion', - 'id', - 'correlationid', - 'time', - 'source', - 'type', - 'subject', - 'data', - ], - properties: { - specversion: { - const: '1.0', - }, - id: { - type: 'string', - minLength: 1, - }, - correlationid: { - type: 'string', - minLength: 1, - }, - time: { - type: 'string', - format: 'date-time', - minLength: 1, - }, - source: { - type: 'string', - format: 'uri-reference', - minLength: 1, - }, - type: { - type: 'string', - minLength: 1, - }, - subject: { - type: 'string', - minLength: 1, - }, - data: { - anyOf: [ - { - type: 'object', - }, - { - type: 'string', - }, - { - type: 'number', - }, - { - type: 'array', - }, - { - type: 'boolean', - }, - ], - }, - datacontenttype: { - type: 'string', - minLength: 1, - }, - dataschema: { - type: 'string', - format: 'uri', - minLength: 1, - }, - }, +/** + * The Zod schema matching the Event type. + * + * Zod is the default for validating incomming messages. + * + * We do not infer the Event type from this schema because of + * slow type issues see https://jsr.io/docs/about-slow-types for more details. + */ +export const eventSchema = z.object({ + specversion: z.literal('1.0'), + id: z.string(), + correlationid: z.string(), + time: z.iso.datetime(), + source: z.string(), + type: z.string(), + subject: z.string(), + data: z.union([ + z.record(z.string(), z.unknown()), + z.string(), + z.number(), + z.array(z.unknown()), + z.boolean(), + ]), + datacontenttype: z.string().optional(), + dataschema: z.url().optional(), +}); + +/** + * Input for creating an event. + */ +export type CreateEventInput = Partial> & { + type: string; + source: string; + data: unknown; +}; + +/** + * Creates an event based on input data with the convenience + * to skip properties and use the defaults for the rest. + */ +export const createEvent = ( + { + id, + correlationid, + time, + source, + type, + subject, + data, + datacontenttype, + dataschema, + }: CreateEventInput, +): TEvent => { + const event = { + specversion: '1.0', + id: id ?? ulid(), + correlationid: correlationid ?? ulid(), + time: time ?? new Date().toISOString(), + source, + type, + ...(subject && { subject }), + data, + datacontenttype: datacontenttype ?? 'application/json', + ...(dataschema && { dataschema }), + } as TEvent; + + return event; }; diff --git a/packages/core/src/lib/message/query.ts b/packages/core/src/lib/message/query.ts index 9e76253..01d71a6 100644 --- a/packages/core/src/lib/message/query.ts +++ b/packages/core/src/lib/message/query.ts @@ -1,3 +1,6 @@ +import { ulid } from '@std/ulid'; +import { z } from 'zod'; + /** * A query is a message that is sent to the system to request * information. @@ -45,56 +48,68 @@ export type Query = { dataschema?: string; }; -export const querySchema = { - $id: 'https://nimbus.overlap.at/schemas/query/v1', - type: 'object', - required: [ - 'specversion', - 'id', - 'correlationid', - 'time', - 'source', - 'type', - 'data', - ], - properties: { - specversion: { - const: '1.0', - }, - id: { - type: 'string', - minLength: 1, - }, - correlationid: { - type: 'string', - minLength: 1, - }, - time: { - type: 'string', - format: 'date-time', - minLength: 1, - }, - source: { - type: 'string', - format: 'uri-reference', - minLength: 1, - }, - type: { - type: 'string', - minLength: 1, - }, - data: { - type: 'object', - additionalProperties: true, - }, - datacontenttype: { - type: 'string', - minLength: 1, - }, - dataschema: { - type: 'string', - format: 'uri', - minLength: 1, - }, - }, +/** + * The Zod schema matching the Query type. + * + * Zod is the default for validating incomming messages. + * + * We do not infer the Query type from this schema because of + * slow type issues see https://jsr.io/docs/about-slow-types for more details. + */ +export const querySchema = z.object({ + specversion: z.literal('1.0'), + id: z.string(), + correlationid: z.string(), + time: z.iso.datetime(), + source: z.string(), + type: z.string(), + data: z.union([ + z.record(z.string(), z.unknown()), + z.string(), + z.number(), + z.array(z.unknown()), + z.boolean(), + ]), + datacontenttype: z.string().optional(), + dataschema: z.url().optional(), +}); + +/** + * Input for creating a query. + */ +export type CreateQueryInput = Partial> & { + type: string; + source: string; + data: unknown; +}; + +/** + * Creates a query based on input data with the convenience + * to skip properties and use the defaults for the rest. + */ +export const createQuery = ( + { + id, + correlationid, + time, + source, + type, + data, + datacontenttype, + dataschema, + }: CreateQueryInput, +): TQuery => { + const query = { + specversion: '1.0', + id: id ?? ulid(), + correlationid: correlationid ?? ulid(), + time: time ?? new Date().toISOString(), + source, + type, + data, + datacontenttype: datacontenttype ?? 'application/json', + ...(dataschema && { dataschema }), + } as TQuery; + + return query; }; diff --git a/packages/core/src/lib/message/router.ts b/packages/core/src/lib/message/router.ts new file mode 100644 index 0000000..6c733fc --- /dev/null +++ b/packages/core/src/lib/message/router.ts @@ -0,0 +1,173 @@ +import { getLogger, InvalidInputException } from '@nimbus/core'; +import type { z } from 'zod'; +import { NotFoundException } from '../exception/notFoundException.ts'; +import type { Message } from './message.ts'; + +/** + * The message handler type - transport-agnostic, just returns domain data. + * + * @template TInput - The type of the input message. + * @template TOutput - The type of the data returned by the handler. + */ +export type MessageHandler< + TInput extends Message = Message, + TOutput = unknown, +> = ( + input: TInput, +) => Promise; + +/** + * Options for creating a MessageRouter. + */ +export type MessageRouterOptions = { + logInput?: (input: any) => void; + logOutput?: (output: any) => void; +}; + +type ZodSchema = z.ZodType; + +/** + * Internal handler registration. + */ +type HandlerRegistration = { + handler: MessageHandler; + schema: ZodSchema; +}; + +/** + * The MessageRouter routes messages to their handlers + * based on the type value of the message. + * + * @example + * ```ts + * import { MessageRouter } from "@nimbus/core"; + * + * const messageRouter = new MessageRouter(); + * + * messageRouter.register( + * 'at.overlap.nimbus.add-recipe', + * addRecipeHandler, + * addRecipeSchema, + * ); + * + * messageRouter.register( + * 'at.overlap.nimbus.recipe-added', + * recipeAddedHandler, + * recipeAddedSchema, + * ); + * + * messageRouter.register( + * 'at.overlap.nimbus.get-recipe', + * getRecipeHandler, + * getRecipeSchema, + * ); + * + * const result = await messageRouter.route(someInput); + * ``` + */ +export class MessageRouter { + private readonly _handlers: Map; + private readonly _logInput?: (input: any) => void; + private readonly _logOutput?: (output: any) => void; + + constructor( + options?: MessageRouterOptions, + ) { + this._handlers = new Map(); + this._logInput = options?.logInput; + this._logOutput = options?.logOutput; + } + + /** + * Register a handler for a specific message type. + * + * @param {string} messageType - The messages type as defined in the CloudEvents specification (e.g., 'at.overlap.nimbus.add-recipe') + * @param {MessageHandler} handler - The handler function + * @param {ZodSchema} schema - The schema to validate the command + * + * @example + * ```ts + * router.register( + * 'at.overlap.nimbus.add-recipe', + * addRecipeHandler, + * addRecipeSchema, + * ); + * + * router.register( + * 'at.overlap.nimbus.get-recipe', + * getRecipeHandler, + * getRecipeSchema, + * ); + * ``` + */ + public register( + messageType: string, + handler: MessageHandler, + schema: ZodSchema, + ): void { + this._handlers.set(messageType, { + handler, + schema, + }); + + getLogger().debug({ + category: 'Nimbus', + message: `Registered handler for: ${messageType}`, + }); + } + + /** + * Route a message to its handler. + * + * @param {unknown} input - The raw input to route + * + * @returns {Promise} The result from the handler + * + * @throws {NotFoundException} - If no handler is registered for the message type + * @throws {InvalidInputException} - If the message is invalid + * @throws {GenericException} - If an error occurs during routing + */ + public async route(input: any): Promise { + if (this._logInput) { + this._logInput(input); + } + + if (!input?.type) { + throw new InvalidInputException( + 'The provided input has no type attribute', + ); + } + + const registration = this._handlers.get(input.type); + if (!registration) { + throw new NotFoundException( + 'Message handler not found', + { + reason: + `Could not find a handler for message type: "${input.type}"`, + }, + ); + } + + const { handler, schema } = registration; + + const validationResult = schema.safeParse(input); + + if (!validationResult.success) { + throw new InvalidInputException( + 'The provided input is invalid', + { + issues: validationResult.error.issues, + }, + ); + } + + const result = await handler(validationResult.data); + + if (this._logOutput) { + this._logOutput(result); + } + + return result; + } +} diff --git a/packages/core/src/lib/router/messageRouter.test.ts b/packages/core/src/lib/router/messageRouter.test.ts deleted file mode 100644 index 29e84d6..0000000 --- a/packages/core/src/lib/router/messageRouter.test.ts +++ /dev/null @@ -1,256 +0,0 @@ -import { assertEquals, assertRejects } from '@std/assert'; -import { GenericException } from '../exception/genericException.ts'; -import { InvalidInputException } from '../exception/invalidInputException.ts'; -import { NotFoundException } from '../exception/notFoundException.ts'; -import type { Command } from '../message/command.ts'; -import { getValidator } from '../validator/validator.ts'; -import { MessageRouter } from './router.ts'; -import { - testCommand, - testCommandHandler, - testEvent, - testEventHandler, - testEventSchema, - testEventWithException, - testEventWithInvalidData, - testQuery, - testQueryHandler, -} from './testFixtures.ts'; - -Deno.test('MessageRouter - registers command handler', () => { - const router = new MessageRouter('command'); - router.register( - 'at.overlap.nimbus.test-command', - testCommandHandler, - ); - - // No error means success - registration is synchronous - assertEquals(true, true); -}); - -Deno.test('MessageRouter - routes valid command to handler', async () => { - const router = new MessageRouter('command'); - router.register( - 'at.overlap.nimbus.test-command', - testCommandHandler, - { allowUnsafeInput: true }, - ); - - const result = await router.route(testCommand); - - assertEquals(result, { - aNumber: 42, - }); -}); - -Deno.test('MessageRouter - routes valid query to handler', async () => { - const router = new MessageRouter('query'); - router.register( - 'at.overlap.nimbus.test-query', - testQueryHandler, - { allowUnsafeInput: true }, - ); - - const result = await router.route(testQuery); - - assertEquals(result, { - foo: 'bar', - }); -}); - -Deno.test('MessageRouter - routes valid event to handler with schema validation', async () => { - const validator = getValidator(); - validator.addSchema(testEventSchema); - - const router = new MessageRouter('event'); - router.register( - 'at.overlap.nimbus.test-event', - testEventHandler, - ); - - const result = await router.route(testEvent); - - assertEquals(result, { - testException: false, - aNumber: 42, - }); -}); - -Deno.test('MessageRouter - throws NotFoundException when handler not registered', async () => { - const router = new MessageRouter('command'); - - const unknownCommand: Command = { - specversion: '1.0', - id: '123', - correlationid: '456', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.unknown-command', - data: {}, - }; - - await assertRejects( - async () => { - await router.route(unknownCommand); - }, - NotFoundException, - 'Message handler not found', - ); -}); - -Deno.test('MessageRouter - throws InvalidInputException when command envelope is invalid', async () => { - const router = new MessageRouter('command'); - - const invalidCommand = { - specversion: '1.0', - id: '123', - // Missing correlationid - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.test-command', - data: {}, - }; - - await assertRejects( - async () => { - await router.route(invalidCommand); - }, - InvalidInputException, - 'The provided input is invalid', - ); -}); - -Deno.test('MessageRouter - throws InvalidInputException when dataschema is missing and allowUnsafeInput is false', async () => { - const router = new MessageRouter('event'); - router.register( - 'at.overlap.nimbus.test-event', - testEventHandler, - // allowUnsafeInput defaults to false - ); - - const eventWithoutSchema = { - ...testEvent, - dataschema: undefined, - }; - - await assertRejects( - async () => { - await router.route(eventWithoutSchema); - }, - InvalidInputException, - 'No dataschema provided for message', - ); -}); - -Deno.test('MessageRouter - allows missing dataschema when allowUnsafeInput is true', async () => { - const router = new MessageRouter('command'); - router.register( - 'at.overlap.nimbus.test-command', - testCommandHandler, - { allowUnsafeInput: true }, - ); - - const commandWithoutSchema = { - ...testCommand, - dataschema: undefined, - }; - - const result = await router.route(commandWithoutSchema); - - assertEquals(result, { - aNumber: 42, - }); -}); - -Deno.test('MessageRouter - validates data against dataschema when provided', async () => { - const router = new MessageRouter('event'); - router.register( - 'at.overlap.nimbus.test-event', - testEventHandler, - ); - - await assertRejects( - async () => { - await router.route(testEventWithInvalidData); - }, - InvalidInputException, - 'The provided input is invalid', - ); -}); - -Deno.test('MessageRouter - handler exceptions propagate correctly', async () => { - const router = new MessageRouter('event'); - router.register( - 'at.overlap.nimbus.test-event', - testEventHandler, - ); - - await assertRejects( - async () => { - await router.route(testEventWithException); - }, - GenericException, - 'Test exception thrown', - ); -}); - -Deno.test('MessageRouter - calls logInput hook when provided', async () => { - let loggedInput: any = null; - - const router = new MessageRouter('command', { - logInput: (input) => { - loggedInput = input; - }, - }); - - router.register( - 'at.overlap.nimbus.test-command', - testCommandHandler, - { allowUnsafeInput: true }, - ); - - await router.route(testCommand); - - assertEquals(loggedInput, testCommand); -}); - -Deno.test('MessageRouter - calls logOutput hook when provided', async () => { - let loggedOutput: any = null; - - const router = new MessageRouter('command', { - logOutput: (output) => { - loggedOutput = output; - }, - }); - - router.register( - 'at.overlap.nimbus.test-command', - testCommandHandler, - { allowUnsafeInput: true }, - ); - - await router.route(testCommand); - - assertEquals(loggedOutput, { - aNumber: 42, - }); -}); - -Deno.test('MessageRouter - throws GenericException for invalid router type', async () => { - // This test verifies internal type checking - // We can't actually create an invalid router type through the constructor - // due to TypeScript, but the runtime check exists for JavaScript users - - // Create a router with a valid type - const router = new MessageRouter('command'); - router.register( - 'at.overlap.nimbus.test-command', - testCommandHandler, - { allowUnsafeInput: true }, - ); - - // The type check happens internally during route() - // This test just confirms routing works normally - const result = await router.route(testCommand); - assertEquals(result, { aNumber: 42 }); -}); diff --git a/packages/core/src/lib/router/router.ts b/packages/core/src/lib/router/router.ts deleted file mode 100644 index 7375735..0000000 --- a/packages/core/src/lib/router/router.ts +++ /dev/null @@ -1,210 +0,0 @@ -import { getLogger, InvalidInputException } from '@nimbus/core'; -import { GenericException } from '../exception/genericException.ts'; -import { NotFoundException } from '../exception/notFoundException.ts'; -import { type Command, commandSchema } from '../message/command.ts'; -import { type Event, eventSchema } from '../message/event.ts'; -import type { Message } from '../message/message.ts'; -import { type Query, querySchema } from '../message/query.ts'; -import { getValidator } from '../validator/validator.ts'; - -/** - * The message handler type - transport-agnostic, just returns domain data. - * - * @template TInput - The type of the input message. - * @template TOutput - The type of the data returned by the handler. - */ -export type MessageHandler< - TInput extends Message = Message, - TOutput = unknown, -> = ( - input: TInput, -) => Promise; - -/** - * Options for registering a message handler. - */ -export type RegisterHandlerOptions = { - allowUnsafeInput?: boolean; -}; - -/** - * Options for creating a MessageRouter. - */ -export type MessageRouterOptions = { - logInput?: (input: unknown) => void; - logOutput?: (output: unknown) => void; -}; - -/** - * Internal handler registration. - */ -type HandlerRegistration = { - handler: MessageHandler; - allowUnsafeInput: boolean; -}; - -/** - * The MessageRouter routes messages to their handlers. - * - * @example - * ```ts - * import { MessageRouter } from "@nimbus/core"; - * - * const commandRouter = new MessageRouter('command'); - * - * commandRouter.register( - * 'at.overlap.nimbus.add-recipe', - * addRecipeHandler, - * ); - * - * const result = await commandRouter.route(someCommand); - * ``` - */ -export class MessageRouter { - private readonly _type: 'command' | 'query' | 'event'; - private readonly _handlers: Map; - private readonly _logInput?: (input: unknown) => void; - private readonly _logOutput?: (output: unknown) => void; - - constructor( - type: 'command' | 'query' | 'event', - options?: MessageRouterOptions, - ) { - this._type = type; - this._handlers = new Map(); - this._logInput = options?.logInput; - this._logOutput = options?.logOutput; - } - - /** - * Register a message handler for a specific message type. - * - * @param {string} messageType - The message type (e.g., 'at.overlap.nimbus.add-recipe') - * @param {MessageHandler} handler - The handler function - * @param {RegisterHandlerOptions} options - Optional configuration - * - * @example - * ```ts - * router.register( - * 'at.overlap.nimbus.add-recipe', - * addRecipeHandler, - * { allowUnsafeInput: true } - * ); - * ``` - */ - public register( - messageType: string, - handler: MessageHandler, - options?: RegisterHandlerOptions, - ): void { - this._handlers.set(messageType, { - handler, - allowUnsafeInput: options?.allowUnsafeInput ?? false, - }); - - getLogger().debug({ - category: 'Nimbus', - message: `Registered ${this._type} handler for: ${messageType}`, - }); - } - - /** - * Route a message to its handler. - * - * @param {unknown} input - The raw input to route - * - * @returns {Promise} The result from the handler - * - * @throws {NotFoundException} - If no handler is registered for the message type - * @throws {InvalidInputException} - If the message is invalid - * @throws {GenericException} - If an error occurs during routing - */ - public async route(input: unknown): Promise { - if (this._logInput) { - this._logInput(input); - } - - const validator = getValidator(); - - // Validate message envelope - let parseResult; - if (this._type === 'command') { - parseResult = validator.validate(commandSchema.$id, input); - } else if (this._type === 'query') { - parseResult = validator.validate(querySchema.$id, input); - } else if (this._type === 'event') { - parseResult = validator.validate(eventSchema.$id, input); - } else { - throw new GenericException( - 'Invalid router type', - { - reason: - `The router type must be either "command", "query" or "event"`, - }, - ); - } - - if (parseResult.error) { - throw parseResult.error; - } - - const message = parseResult.data; - - // Find handler - const registration = this._handlers.get(message.type); - if (!registration) { - throw new NotFoundException( - 'Message handler not found', - { - reason: - `Could not find a handler for message type: "${message.type}"`, - }, - ); - } - - const { handler, allowUnsafeInput } = registration; - - // Validate message data if dataschema is provided - let validMessage: Message; - if (message.dataschema) { - const { data, error } = validator.validate( - message.dataschema, - message, - ); - - if (error) { - throw error; - } else { - validMessage = data; - } - } else { - if (allowUnsafeInput) { - getLogger().warn({ - category: 'Nimbus', - message: 'No dataschema found for message', - }); - } else { - throw new InvalidInputException( - 'No dataschema provided for message', - { - errorCode: 'MISSING_DATASCHEMA', - reason: `The dataschema is missing on the message - and "allowUnsafeInput" is not enabled for the message type. - It is recommended to always provide a dataschema - for input validation. Otherwise set "allowUnsafeInput" - to true when registering the handler.`, - }, - ); - } - validMessage = message; - } - - const result = await handler(validMessage); - - if (this._logOutput) { - this._logOutput(result); - } - - return result; - } -} diff --git a/packages/core/src/lib/router/testFixtures.ts b/packages/core/src/lib/router/testFixtures.ts deleted file mode 100644 index d1e934c..0000000 --- a/packages/core/src/lib/router/testFixtures.ts +++ /dev/null @@ -1,155 +0,0 @@ -import type { SchemaObject } from 'ajv'; -import { GenericException } from '../exception/genericException.ts'; -import type { Command } from '../message/command.ts'; -import { type Event, eventSchema } from '../message/event.ts'; -import type { Query } from '../message/query.ts'; -import type { MessageHandler } from './router.ts'; - -// ============================================================================= -// Command Fixtures -// ============================================================================= - -export type TestCommandData = { - aNumber: number; -}; - -export const testCommand: Command = { - specversion: '1.0', - id: '123', - correlationid: '456', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.test-command', - data: { - aNumber: 42, - }, - datacontenttype: 'application/json', -}; - -export const testCommandHandler: MessageHandler< - Command, - TestCommandData -> = async (command) => { - return command.data; -}; - -// ============================================================================= -// Query Fixtures -// ============================================================================= - -export type TestQueryData = { - filter: string; -}; - -export type TestQueryResult = { - foo: string; -}; - -export const testQuery: Query = { - specversion: '1.0', - id: '123', - correlationid: '456', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.test-query', - data: { - filter: '42', - }, - datacontenttype: 'application/json', -}; - -export const testQueryHandler: MessageHandler< - Query, - TestQueryResult -> = async () => { - return { - foo: 'bar', - }; -}; - -// ============================================================================= -// Event Fixtures -// ============================================================================= - -export type TestEventData = { - testException: boolean; - aNumber: number; -}; - -export const testEventSchema: SchemaObject = { - ...eventSchema, - $id: 'https://api.nimbus.overlap.at/schemas/event/test/v1', - properties: { - ...eventSchema.properties, - data: { - type: 'object', - required: ['testException', 'aNumber'], - properties: { - testException: { type: 'boolean' }, - aNumber: { type: 'number' }, - }, - }, - dataschema: { - const: 'https://api.nimbus.overlap.at/schemas/event/test/v1', - }, - }, -}; - -export const testEvent: Event = { - specversion: '1.0', - id: '123', - correlationid: '456', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.test-event', - subject: '/test', - data: { - testException: false, - aNumber: 42, - }, - datacontenttype: 'application/json', - dataschema: 'https://api.nimbus.overlap.at/schemas/event/test/v1', -}; - -export const testEventWithException: Event = { - specversion: '1.0', - id: '123', - correlationid: '456', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.test-event', - subject: '/test', - data: { - testException: true, - aNumber: 42, - }, - datacontenttype: 'application/json', - dataschema: 'https://api.nimbus.overlap.at/schemas/event/test/v1', -}; - -export const testEventWithInvalidData: Event = { - specversion: '1.0', - id: '123', - correlationid: '456', - time: '2025-01-01T00:00:00Z', - source: 'https://nimbus.overlap.at', - type: 'at.overlap.nimbus.test-event', - subject: '/test', - data: { - testException: true, - aNumber: '42', // This should trigger a validation error - }, - datacontenttype: 'application/json', - dataschema: 'https://api.nimbus.overlap.at/schemas/event/test/v1', -}; - -export const testEventHandler: MessageHandler< - Event, - TestEventData -> = async (event) => { - if (event.data?.testException) { - throw new GenericException('Test exception thrown'); - } - - return event.data; -}; diff --git a/packages/core/src/lib/validator/validator.ts b/packages/core/src/lib/validator/validator.ts deleted file mode 100644 index 1dec137..0000000 --- a/packages/core/src/lib/validator/validator.ts +++ /dev/null @@ -1,228 +0,0 @@ -import type { SchemaObject, ValidateFunction } from 'ajv'; -import { Ajv } from 'ajv'; -import { GenericException } from '../exception/genericException.ts'; -import { InvalidInputException } from '../exception/invalidInputException.ts'; -import { commandSchema } from '../message/command.ts'; -import { eventSchema } from '../message/event.ts'; -import { querySchema } from '../message/query.ts'; - -// Some import shenanigans to make it work in here... -import _addFormats from 'ajv-formats'; -const addFormats = _addFormats as unknown as typeof _addFormats.default; - -export type ValidatorOptions = { - ajv?: Ajv; -}; - -export type ValidationResult = { - data: TData; - error: undefined; -} | { - data: undefined; - error: InvalidInputException | GenericException; -}; - -/** - * The Validator is used to validate data against a schema. - * This way e.g. the Nimbus router validates the the route inputs. - * - * Use the `setupValidator` function to configure the Validator. - * It is possible to pass a custom Ajv instance to the Validator if needed. - * - * Use the `getValidator` function to get the instance of the Validator. - * - * Use the validators `addSchema` method to add schemas to the Validator. - * - * Use the validators `validate` method to validate data against a schema. - * Pass in a schemaId to use a precompiled schema added to the validator before - * or pass in a schema object to compile the schema on the fly. - * - * @example - * ```ts - * import { setupValidator, getValidator } from "@nimbus/core"; - * - * setupValidator({ - * ajv: myCustomAjvInstance, - * }); - * - * const validator = getValidator(); - * - * validator.addSchema(mySchema); - * - * const result1 = validator.validate('theSchemaId', myData); - * const result2 = validator.validate(mySchema, myData); - * ``` - */ -export class Validator { - private static _instance: Validator; - - private readonly _ajv: Ajv; - - constructor(options?: ValidatorOptions) { - if (options?.ajv) { - this._ajv = options.ajv; - } else { - this._ajv = new Ajv(); - addFormats(this._ajv); - } - } - - /** - * Configure the Validator. - */ - public static configure(options?: ValidatorOptions): void { - const validator = new Validator(options); - - validator.addSchema(querySchema); - validator.addSchema(eventSchema); - validator.addSchema(commandSchema); - - Validator._instance = validator; - } - - /** - * Get the Validator instance. - * - * @returns {Validator} The Validator instance - */ - public static getInstance(): Validator { - if (!Validator._instance) { - this.configure(); - } - - return Validator._instance; - } - - /** - * Validate data against a schema. - * - * @param {string | SchemaObject} schema - Either a schema id to use a precompiled schema or a schema object. - * @param {unknown} data - The data to validate. - * - * @returns {ValidationResult} The validation result with either the data, an GenericException if the schema is not found, or an InvalidInputException if the data is invalid. - * - * @template TData - The type of the valid data. - * - * @example - * ```ts - * import { getValidator } from "@nimbus/core"; - * - * getValidator().validate( - * 'https://api.nimbus.overlap.at/schemas/command/v1', - * { - * foo: 'bar', - * }, - * ); - * ``` - */ - public validate( - schema: string | SchemaObject, - data: unknown, - ): ValidationResult { - let validateFunc: ValidateFunction; - - if (typeof schema === 'string') { - const func = this._ajv.getSchema(schema); - - if (!func) { - return { - data: undefined, - error: new GenericException('Schema not found', { - reason: `A string was provided for the schema - but no schema with id "${schema}" added to the validator. - Use the addSchema method to add a schema to the validator.`, - }), - }; - } - - validateFunc = func; - } else { - validateFunc = this._ajv.compile(schema); - } - - const isValid = validateFunc(data); - - if (isValid) { - return { data: data as TData, error: undefined }; - } else { - return { - data: undefined, - error: new InvalidInputException( - 'The provided input is invalid', - { - issues: validateFunc.errors, - }, - ), - }; - } - } - - /** - * Add a schema to the validator. - * - * @param {SchemaObject} schema - The JSON Schema to add. - * - * @throws {GenericException} If the schema does not have a $id property. - * - * @example - * ```ts - * import { getValidator } from "@nimbus/core"; - * - * getValidator().addSchema({ - * $id: "https://api.nimbus.overlap.at/schemas/recipe/v1", - * type: "object", - * properties: { - * foo: { - * type: "string", - * }, - * }, - * }); - * ``` - */ - public addSchema(schema: SchemaObject): void { - if (!schema.$id) { - throw new GenericException('Schema must have a $id', { - reason: - 'The schema must have a $id to be added to the validator.', - }); - } - - this._ajv.addSchema(schema, schema.$id); - } -} - -/** - * Configure the Validator. - * - * @param {ValidatorOptions} options - The options for the Validator - * - * @example - * ```ts - * import { - * setupValidator, - * } from "@nimbus/core"; - * - * setupValidator({ - * ajv: myCustomAjvInstance, - * }); - * ``` - */ -export const setupValidator = (options?: ValidatorOptions): void => { - Validator.configure(options); -}; - -/** - * Get the Validator instance. - * - * @returns {Validator} The Validator instance - * - * @example - * ```ts - * import { getValidator } from "@nimbus/core"; - * - * const validator = getValidator(); - * ``` - */ -export const getValidator = (): Validator => { - return Validator.getInstance(); -}; diff --git a/packages/eventsourcing/README.md b/packages/eventsourcing/README.md deleted file mode 100644 index 8db202e..0000000 --- a/packages/eventsourcing/README.md +++ /dev/null @@ -1,10 +0,0 @@ -Nimbus - -# Nimbus Event Sourcing - -The event sourcing package of the Nimbus framework. - -Refer to the [Nimbus main repository](https://github.com/overlap-dev/Nimbus) or the [Nimbus documentation](https://nimbus.overlap.at) for more information about the Nimbus framework. diff --git a/packages/eventsourcing/deno.json b/packages/eventsourcing/deno.json deleted file mode 100644 index ec3ee0f..0000000 --- a/packages/eventsourcing/deno.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "@nimbus/eventsourcing", - "version": "0.17.0", - "license": "MIT", - "author": "Daniel Gördes (https://overlap.at)", - "repository": { - "type": "git", - "url": "https://github.com/overlap-dev/Nimbus" - }, - "bugs": { - "url": "https://github.com/overlap-dev/Nimbus/issues" - }, - "homepage": "https://nimbus.overlap.at", - "exports": "./src/index.ts", - "fmt": { - "include": [ - "src/" - ], - "useTabs": false, - "lineWidth": 80, - "indentWidth": 4, - "semiColons": true, - "singleQuote": true, - "proseWrap": "always" - }, - "lint": { - "include": [ - "src/" - ], - "rules": { - "exclude": [ - "no-explicit-any", - "no-slow-types" - ] - } - }, - "test": { - "include": [ - "src/" - ] - }, - "imports": {} -} \ No newline at end of file diff --git a/packages/eventsourcing/src/index.ts b/packages/eventsourcing/src/index.ts deleted file mode 100644 index 31b1de8..0000000 --- a/packages/eventsourcing/src/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from './lib/aggregate.ts'; -export * from './lib/eventStore.ts'; diff --git a/packages/eventsourcing/src/lib/aggregate.ts b/packages/eventsourcing/src/lib/aggregate.ts deleted file mode 100644 index 38e593b..0000000 --- a/packages/eventsourcing/src/lib/aggregate.ts +++ /dev/null @@ -1,167 +0,0 @@ -import type { Event } from '@nimbus/core'; -import type { - EventStore, - EventStoreReadOptions, - EventWithMetadata, -} from './eventStore.ts'; - -/** - * Reducer function that applies an event to aggregate state. - * - * @template TState - The type of the aggregate state - */ -export type EventReducer = ( - state: TState, - event: Event, -) => TState; - -/** - * Options for loading an aggregate. - */ -export type LoadAggregateOptions = EventStoreReadOptions; - -/** - * Result of loading an aggregate from the event store. - * - * @template TState - The type of the aggregate state - */ -export type AggregateSnapshot = { - state: TState; - events: EventWithMetadata[]; - lastEventId?: string; -}; - -/** - * Load an aggregate from the event store by replaying events. - * - * This function reads all events for a given subject and applies them - * sequentially using the provided reducer to reconstruct the current state. - * - * @param eventStore - The event store to read from - * @param subject - The subject (aggregate ID) to load - * @param initialState - The initial state before any events - * @param reducer - Function to apply events to state - * @param options - Optional read options (recursive, bounds, etc.) - * - * @returns The reconstructed aggregate state with metadata - * - * @example - * ```ts - * const snapshot = await loadAggregate( - * eventStore, - * '/recipes/carbonara', - * null, - * recipeReducer, - * { order: 'chronological' } - * ); - * - * console.log(snapshot.state); // Current recipe state - * console.log(snapshot.version); // Number of events applied - * ``` - */ -export async function loadAggregate( - eventStore: EventStore, - subject: string, - initialState: TState, - reducer: EventReducer, - options?: LoadAggregateOptions, -): Promise> { - const events = await eventStore.readEvents(subject, { - ...options, - order: 'chronological', // Always apply events in order - }); - - return { - state: events.reduce(reducer, initialState), - events, - lastEventId: events.length > 0 - ? events[events.length - 1].eventstoremetadata.id - : undefined, - }; -} - -/** - * Check if an aggregate exists (has any events). - * - * @param eventStore - The event store to check - * @param subject - The subject (aggregate ID) to check - * - * @returns True if the aggregate has events, false otherwise - */ -export async function aggregateExists( - eventStore: EventStore, - subject: string, -): Promise { - const events = await eventStore.readEvents(subject, { - recursive: false, - }); - - return events.length > 0; -} - -/** - * Load multiple aggregates by reading events recursively. - * - * This enables dynamic aggregate boundaries - you can read events for - * a parent subject and reconstruct multiple child aggregates. - * - * @param eventStore - The event store to read from - * @param parentSubject - The parent subject to read recursively - * @param initialState - The initial state for each aggregate - * @param reducer - Function to apply events to state - * @param groupBy - Function to extract subject from event - * - * @returns Map of subject to aggregate snapshot - * - * @example - * ```ts - * // Load all recipes - * const recipes = await loadAggregates( - * eventStore, - * '/recipes', - * null, - * recipeReducer, - * (event) => event.subject - * ); - * - * for (const [subject, snapshot] of recipes) { - * console.log(`${subject}: ${snapshot.state?.name}`); - * } - * ``` - */ -export async function loadAggregates( - eventStore: EventStore, - parentSubject: string, - initialState: TState, - reducer: EventReducer, - groupBy: (event: EventWithMetadata) => string, -): Promise>> { - const events = await eventStore.readEvents(parentSubject, { - recursive: true, - order: 'chronological', - }); - - // Group events by subject - const eventsBySubject = new Map(); - for (const event of events) { - const subject = groupBy(event); - if (!eventsBySubject.has(subject)) { - eventsBySubject.set(subject, []); - } - eventsBySubject.get(subject)!.push(event); - } - - // Reduce each subject's events to build aggregates - const aggregates = new Map>(); - for (const [subject, subjectEvents] of eventsBySubject) { - aggregates.set(subject, { - state: subjectEvents.reduce(reducer, initialState), - events: subjectEvents, - lastEventId: subjectEvents.length > 0 - ? subjectEvents[subjectEvents.length - 1].eventstoremetadata.id - : undefined, - }); - } - - return aggregates; -} diff --git a/packages/eventsourcing/src/lib/eventStore.ts b/packages/eventsourcing/src/lib/eventStore.ts deleted file mode 100644 index 94c3690..0000000 --- a/packages/eventsourcing/src/lib/eventStore.ts +++ /dev/null @@ -1,258 +0,0 @@ -import type { Event } from '@nimbus/core'; - -/** - * Precondition to ensure a subject is on a specific event ID. - * Enables optimistic concurrency control. - * - * @example - * ```ts - * { - * type: 'isSubjectOnEventId', - * payload: { - * subject: '/recipes/carbonara', - * eventId: '42' // Last known event ID - * } - * } - * ``` - */ -export type IsSubjectOnEventIdPrecondition = { - type: 'isSubjectOnEventId'; - payload: { - subject: string; - eventId: string; - }; -}; - -/** - * Precondition to ensure a subject has no existing events. - * Used when creating a new aggregate to prevent duplicates. - * - * @example - * ```ts - * { - * type: 'isSubjectPristine', - * payload: { - * subject: '/recipes/carbonara' - * } - * } - * ``` - */ -export type IsSubjectPristinePrecondition = { - type: 'isSubjectPristine'; - payload: { - subject: string; - }; -}; - -/** - * Sometimes, you want to ensure that an event is only written if a more complex condition holds - * for example, if no similar event has ever been recorded before. - * The isEventQlQueryTrue precondition lets you define such conditions using EventQL. - * - * @example - * ```ts - * { - * type: 'isEventQlQueryTrue', - * payload: { - * query: 'FROM e IN events WHERE e.data.title == "2001 – A Space Odyssey" PROJECT INTO COUNT() == 0' - * } - * ``` - */ -export type IsEventQlQueryTruePrecondition = { - type: 'isEventQlQueryTrue'; - payload: { - query: string; - }; -}; - -/** - * Preconditions for writing events. - * Used for optimistic concurrency control and validation. - */ -export type EventStorePrecondition = - | IsSubjectOnEventIdPrecondition - | IsSubjectPristinePrecondition - | IsEventQlQueryTruePrecondition; - -/** - * Options for writing events. - */ -export type EventStoreWriteOptions = { - preconditions?: EventStorePrecondition[]; -}; - -export type EventStoreReadOptions = { - recursive?: boolean; - order?: 'chronological' | 'antichronological'; - lowerBound?: EventStoreBound; - upperBound?: EventStoreBound; - fromLatestEvent?: EventStoreMarker; -}; - -export type EventStoreBound = { - id: number; - type: 'inclusive' | 'exclusive'; -}; - -export type EventStoreMarker = { - subject: string; - type: string; - ifEventIsMissing: 'read-everything' | 'read-nothing'; -}; - -/** - * Metadata from the event store. - * Includes the database-assigned event ID for concurrency control. - */ -export type EventStoreMetadata = { - id: string; - hash?: string; - predecessorhash?: string; - signature?: string | null; -}; - -/** - * Event with metadata from the event store. - */ -export type EventWithMetadata = Event & { - eventstoremetadata: EventStoreMetadata; -}; - -/** - * Configuration for observing events from the event store. - * - * @example - * ```ts - * { - * subject: '/recipes', - * recursive: true, - * sinceEventId: '42', - * handler: async (event) => { - * console.log('New event:', event); - * }, - * onError: (error, event) => { - * console.error('Failed to process event:', error); - * } - * } - * ``` - */ -export type EventStoreObserveConfig = { - /** - * The subject to observe events for. - */ - subject: string; - - /** - * Whether to observe events recursively for all child subjects. - * @default false - */ - recursive?: boolean; - - /** - * Resume observing from a specific event ID. - * Useful for checkpointing and recovery after restarts. - */ - sinceEventId?: string; - - /** - * Handler called for each new event. - */ - handler: (event: EventWithMetadata) => Promise; - - /** - * Error handler called when event processing fails. - * If not provided, errors will be thrown. - */ - onError?: (error: Error, event?: EventWithMetadata) => void; -}; - -/** - * Status of an event store subscription. - */ -export type EventStoreSubscriptionStatus = - | 'active' - | 'paused' - | 'closed' - | 'error'; - -/** - * Represents an active subscription to events from the event store. - * Returned by EventStore.observe(). - */ -export interface EventStoreSubscription { - /** - * Unsubscribe from the event stream and clean up resources. - */ - unsubscribe(): Promise; - - /** - * Get the current status of the subscription. - */ - getStatus(): EventStoreSubscriptionStatus; - - /** - * Get the last processed event ID. - * Useful for checkpointing. - */ - getLastEventId(): string | undefined; -} - -/** - * Event store interface. - */ -export interface EventStore { - /** - * Write events to the event store. - * - * @param events - Events to write - * @param options - Write options including preconditions - * @returns The written events with metadata - */ - writeEvents: ( - events: Event[], - options?: EventStoreWriteOptions, - ) => Promise; - - /** - * Read events from the event store. - * - * @param subject - The subject to read events for - * @param options - Read options (recursive, order, bounds, etc.) - * @returns Array of events matching the criteria with metadata - */ - readEvents: ( - subject: string, - options?: EventStoreReadOptions, - ) => Promise; - - /** - * Observe events from the event store in real-time. - * - * Creates a subscription that calls the handler for each new event. - * The implementation may use SSE, WebSockets, polling, or other mechanisms. - * - * @param config - Configuration for observing events - * @returns A subscription object that can be used to unsubscribe - * - * @example - * ```ts - * const subscription = await eventStore.observe({ - * subject: '/recipes', - * recursive: true, - * handler: async (event) => { - * // Update read model - * await updateRecipeProjection(event); - * }, - * onError: (error, event) => { - * console.error('Failed to process event:', error); - * } - * }); - * - * // Later, when shutting down: - * await subscription.unsubscribe(); - * ``` - */ - observe: ( - config: EventStoreObserveConfig, - ) => Promise; -} diff --git a/packages/eventsourcingdb/README.md b/packages/eventsourcingdb/README.md deleted file mode 100644 index fb12e25..0000000 --- a/packages/eventsourcingdb/README.md +++ /dev/null @@ -1,10 +0,0 @@ -Nimbus - -# Nimbus EventSourcingDB - -The EventSourcingDB package of the Nimbus framework. - -Refer to the [Nimbus main repository](https://github.com/overlap-dev/Nimbus) or the [Nimbus documentation](https://nimbus.overlap.at) for more information about the Nimbus framework. diff --git a/packages/eventsourcingdb/deno.json b/packages/eventsourcingdb/deno.json deleted file mode 100644 index 5be4a9a..0000000 --- a/packages/eventsourcingdb/deno.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "@nimbus/eventsourcingdb", - "version": "0.17.0", - "license": "MIT", - "author": "Daniel Gördes (https://overlap.at)", - "repository": { - "type": "git", - "url": "https://github.com/overlap-dev/Nimbus" - }, - "bugs": { - "url": "https://github.com/overlap-dev/Nimbus/issues" - }, - "homepage": "https://nimbus.overlap.at", - "exports": "./src/index.ts", - "fmt": { - "include": [ - "src/" - ], - "useTabs": false, - "lineWidth": 80, - "indentWidth": 4, - "semiColons": true, - "singleQuote": true, - "proseWrap": "always" - }, - "lint": { - "include": [ - "src/" - ], - "rules": { - "exclude": [ - "no-explicit-any", - "no-slow-types" - ] - } - }, - "test": { - "include": [ - "src/" - ] - }, - "imports": {} -} \ No newline at end of file diff --git a/packages/eventsourcingdb/src/index.ts b/packages/eventsourcingdb/src/index.ts deleted file mode 100644 index fa745f3..0000000 --- a/packages/eventsourcingdb/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './lib/eventSourcingDb.ts'; diff --git a/packages/eventsourcingdb/src/lib/eventSourcingDb.ts b/packages/eventsourcingdb/src/lib/eventSourcingDb.ts deleted file mode 100644 index 502cd5b..0000000 --- a/packages/eventsourcingdb/src/lib/eventSourcingDb.ts +++ /dev/null @@ -1,309 +0,0 @@ -import { - ConcurrencyException, - type Event, - GenericException, - getLogger, -} from '@nimbus/core'; -import type { - EventStore, - EventStoreObserveConfig, - EventStoreReadOptions, - EventStoreSubscription, - EventStoreWriteOptions, - EventWithMetadata, -} from '@nimbus/eventsourcing'; -import { EventSourcingDBSubscription } from './eventSourcingDbSubscription.ts'; - -export type MappingEnvelope = { - nimbusData: { - id: string; - correlationid: string; - dataschema?: string; - }; - data: any; -}; - -export type EventSourcingDbInput = { - source: string; - subject: string; - type: string; - data: MappingEnvelope; -}; - -export type EventSourcingDbEvent = { - source: string; - subject: string; - type: string; - specversion: '1.0'; - id: string; - time: string; - datacontenttype: string; - data: any; - hash: string; - predecessorhash: string; - signature: string | null; -}; - -/** - * Options for EventSourcingDBStore. - */ -export type EventSourcingDBStoreOptions = { - apiUrl: string; - secret: string; -}; - -const defaultReadOptions: EventStoreReadOptions = { - recursive: false, -}; - -/** - * EventSourcingDB adapter for the EventStore interface. - * - * Connects to EventSourcingDB API to write and read events. - * - * @example - * ```ts - * const eventStore = new EventSourcingDBStore({ - * apiUrl: process.env.EVENTSOURCINGDB_API, - * secret: process.env.EVENTSOURCINGDB_SECRET, - * }); - * - * await eventStore.writeEvents([{ - * source: 'my-app', - * subject: '/recipes/carbonara', - * type: 'recipe-added', - * data: { title: 'Carbonara' }, - * }]); - * - * const events = await eventStore.readEvents('/recipes/carbonara'); - * ``` - */ -export class EventSourcingDBStore implements EventStore { - private readonly _apiUrl: string; - private readonly _secret: string; - - constructor(options: EventSourcingDBStoreOptions) { - this._apiUrl = options.apiUrl; - this._secret = options.secret; - } - - /** - * Write events to EventSourcingDB. - * - * @param events - Events to write - * @param options - Write options including preconditions for optimistic concurrency - * @returns The written events with metadata - * @throws ConcurrencyException if preconditions fail (409 status) - */ - async writeEvents( - events: Event[], - options?: EventStoreWriteOptions, - ): Promise { - const payload = JSON.stringify({ - events: events.map(this._mapNimbusEventToEventSourcingDbInput), - ...(options?.preconditions && { - preconditions: options.preconditions, - }), - }); - - const response = await fetch(`${this._apiUrl}/write-events`, { - method: 'POST', - headers: { - 'authorization': `Bearer ${this._secret}`, - 'content-type': 'application/json', - }, - body: payload, - }); - - const body = await response.text(); - - if (response.status === 409) { - throw new ConcurrencyException( - `Concurrency conflict. At least one precondition failed.`, - { - ...(options?.preconditions && - { preconditions: options.preconditions }), - response: body || 'Precondition check failed', - }, - ); - } - - if (!response.ok) { - throw new GenericException('Failed to write events', { - status: response.status, - statusText: response.statusText, - url: response.url, - body, - }); - } - - if (body.startsWith('[')) { - let items: any[]; - - try { - items = JSON.parse(body); - } catch (error: any) { - throw new GenericException('Failed to parse events', { - reason: error.message, - }); - } - - getLogger().debug({ - category: 'Nimbus', - message: 'EventSourcingDBStore :: Events written', - data: { count: items.length }, - }); - - return items.map(this._mapEventSourcingDbEventToNimbusEvent); - } else { - throw new GenericException('Failed to parse events', { - reason: 'Response was not an array of events', - }); - } - } - - /** - * Read events from EventSourcingDB. - * - * @param subject - The subject to read events for - * @param options - Read options (recursive, order, bounds, etc.) - * @returns Array of events matching the criteria with metadata - */ - async readEvents( - subject: string, - options: EventStoreReadOptions = defaultReadOptions, - ): Promise { - const response = await fetch(`${this._apiUrl}/read-events`, { - method: 'POST', - headers: { - 'authorization': `Bearer ${this._secret}`, - 'content-type': 'application/json', - }, - body: JSON.stringify({ - subject, - options: { - recursive: options.recursive ?? false, - ...(options.order && { order: options.order }), - ...(options.lowerBound && { - lowerBound: options.lowerBound, - }), - ...(options.upperBound && { - upperBound: options.upperBound, - }), - ...(options.fromLatestEvent && { - fromLatestEvent: options.fromLatestEvent, - }), - }, - }), - }); - - const body = await response.text(); - - if (!response.ok) { - throw new GenericException('Failed to read events', { - status: response.status, - statusText: response.statusText, - url: response.url, - body, - }); - } - - let items: any[] = []; - - try { - // We return an empty array if there are no events at all. - if (body.length === 0) { - return []; - } - - // Otherwise we turn the NDJSON response into an array of items. - // https://docs.eventsourcingdb.io/getting-started/reading-events - items = body - .split('\n') - .filter((item) => item.startsWith('{')) - .map((item) => JSON.parse(item)); - } catch (error: any) { - throw new GenericException('Failed to parse events', { - reason: error.message, - }); - } - - const events: EventWithMetadata[] = items.map((item) => - this._mapEventSourcingDbEventToNimbusEvent(item.payload) - ); - - getLogger().debug({ - category: 'Nimbus', - message: 'EventSourcingDBStore :: Events read', - data: { subject, count: events.length }, - }); - - return events; - } - - /** - * Observe events from EventSourcingDB in real-time. - * - * Creates a streaming connection to EventSourcingDB and calls the handler - * for each new event. Supports resuming from a specific event ID. - * - * @param config - Configuration for observing events - * @returns A subscription object that can be used to unsubscribe - */ - async observe( - config: EventStoreObserveConfig, - ): Promise { - const subscription = new EventSourcingDBSubscription( - this._apiUrl, - this._secret, - config, - this._mapEventSourcingDbEventToNimbusEvent.bind(this), - ); - - await subscription.start(); - - return subscription; - } - - private _mapNimbusEventToEventSourcingDbInput( - event: Event, - ): EventSourcingDbInput { - return { - source: event.source, - subject: event.subject, - type: event.type, - data: { - nimbusData: { - id: event.id, - correlationid: event.correlationid, - ...(event.dataschema && { dataschema: event.dataschema }), - }, - data: event.data, - }, - }; - } - - private _mapEventSourcingDbEventToNimbusEvent( - dbEvent: EventSourcingDbEvent, - ): EventWithMetadata { - return { - specversion: '1.0', - id: dbEvent.data.nimbusData.id, - correlationid: dbEvent.data.nimbusData.correlationId, - time: dbEvent.time, - source: dbEvent.source, - type: dbEvent.type, - subject: dbEvent.subject, - data: dbEvent.data.data, - datacontenttype: dbEvent.datacontenttype, - ...(dbEvent.data.nimbusData.dataschema && - { dataschema: dbEvent.data.nimbusData.dataschema }), - eventstoremetadata: { - id: dbEvent.id, - hash: dbEvent.hash, - predecessorhash: dbEvent.predecessorhash, - signature: dbEvent.signature, - }, - }; - } -} diff --git a/packages/eventsourcingdb/src/lib/eventSourcingDbSubscription.ts b/packages/eventsourcingdb/src/lib/eventSourcingDbSubscription.ts deleted file mode 100644 index 6c436a4..0000000 --- a/packages/eventsourcingdb/src/lib/eventSourcingDbSubscription.ts +++ /dev/null @@ -1,180 +0,0 @@ -import { GenericException, getLogger } from '@nimbus/core'; -import type { - EventStoreObserveConfig, - EventStoreSubscription, - EventStoreSubscriptionStatus, - EventWithMetadata, -} from '@nimbus/eventsourcing'; -import type { EventSourcingDbEvent } from './eventSourcingDb.ts'; - -/** - * Subscription implementation for EventSourcingDB. - * Manages the streaming connection and processes incoming events. - */ -export class EventSourcingDBSubscription implements EventStoreSubscription { - private _status: EventStoreSubscriptionStatus = 'active'; - private _lastEventId?: string; - private _abortController?: AbortController; - private readonly _apiUrl: string; - private readonly _secret: string; - private readonly _config: EventStoreObserveConfig; - private readonly _mapper: ( - dbEvent: EventSourcingDbEvent, - ) => EventWithMetadata; - - constructor( - apiUrl: string, - secret: string, - config: EventStoreObserveConfig, - mapper: (dbEvent: EventSourcingDbEvent) => EventWithMetadata, - ) { - this._apiUrl = apiUrl; - this._secret = secret; - this._config = config; - this._mapper = mapper; - this._lastEventId = config.sinceEventId; - } - - start(): Promise { - this._abortController = new AbortController(); - this._status = 'active'; - - // Start streaming in background - this._stream().catch((error) => { - this._status = 'error'; - if (this._config.onError) { - this._config.onError(error); - } else { - getLogger().error({ - category: 'Nimbus', - message: 'EventSourcingDB observation stream error', - error, - }); - } - }); - - return Promise.resolve(); - } - - unsubscribe(): Promise { - if (this._abortController) { - this._abortController.abort(); - } - this._status = 'closed'; - - getLogger().debug({ - category: 'Nimbus', - message: 'EventSourcingDB subscription closed', - data: { - subject: this._config.subject, - lastEventId: this._lastEventId, - }, - }); - - return Promise.resolve(); - } - - getStatus(): EventStoreSubscriptionStatus { - return this._status; - } - - getLastEventId(): string | undefined { - return this._lastEventId; - } - - private async _stream(): Promise { - const body = JSON.stringify({ - subject: this._config.subject, - options: { - recursive: this._config.recursive ?? false, - ...(this._lastEventId && { - lowerBound: { - id: parseInt(this._lastEventId, 10), - type: 'exclusive', - }, - }), - }, - }); - - const response = await fetch(`${this._apiUrl}/observe-events`, { - method: 'POST', - headers: { - 'authorization': `Bearer ${this._secret}`, - 'content-type': 'application/json', - }, - body, - signal: this._abortController?.signal, - }); - - if (!response.ok) { - throw new GenericException('Failed to start observing events', { - status: response.status, - statusText: response.statusText, - url: response.url, - }); - } - - if (!response.body) { - throw new GenericException('Response body is null'); - } - - getLogger().info({ - category: 'Nimbus', - message: 'EventSourcingDB observation started', - data: { - subject: this._config.subject, - recursive: this._config.recursive, - sinceEventId: this._lastEventId, - }, - }); - - // Process NDJSON stream - const reader = response.body.getReader(); - const decoder = new TextDecoder(); - let buffer = ''; - - try { - while (true) { - const { done, value } = await reader.read(); - - if (done) { - break; - } - - buffer += decoder.decode(value, { stream: true }); - - // Process complete lines - const lines = buffer.split('\n'); - buffer = lines.pop() || ''; // Keep incomplete line in buffer - - for (const line of lines) { - if (line.trim() === '') continue; - - try { - const item = JSON.parse(line); - - // Skip heartbeat messages - if (item.type === 'heartbeat') { - continue; - } - - const dbEvent = item.payload as EventSourcingDbEvent; - const nimbusEvent = this._mapper(dbEvent); - - this._lastEventId = nimbusEvent.eventstoremetadata.id; - - await this._config.handler(nimbusEvent); - } catch (error: any) { - if (this._config.onError) { - this._config.onError(error); - } else { - throw error; - } - } - } - } - } finally { - reader.releaseLock(); - } - } -} diff --git a/packages/hono/src/index.ts b/packages/hono/src/index.ts index 5034518..078dec9 100644 --- a/packages/hono/src/index.ts +++ b/packages/hono/src/index.ts @@ -1,2 +1,3 @@ -export * from './lib/correlationId.ts'; -export * from './lib/logger.ts'; +export * from './lib/middleware/correlationId.ts'; +export * from './lib/middleware/logger.ts'; +export * from './lib/onError.ts'; diff --git a/packages/hono/src/lib/correlationId.ts b/packages/hono/src/lib/middleware/correlationId.ts similarity index 97% rename from packages/hono/src/lib/correlationId.ts rename to packages/hono/src/lib/middleware/correlationId.ts index c900157..fbcf344 100644 --- a/packages/hono/src/lib/correlationId.ts +++ b/packages/hono/src/lib/middleware/correlationId.ts @@ -102,6 +102,6 @@ export const correlationId = ( */ export const getCorrelationId = (c: { get: (key: typeof CORRELATION_ID_KEY) => string | undefined; -}): string | undefined => { - return c.get(CORRELATION_ID_KEY); +}): string => { + return c.get(CORRELATION_ID_KEY) ?? ''; }; diff --git a/packages/hono/src/lib/logger.ts b/packages/hono/src/lib/middleware/logger.ts similarity index 98% rename from packages/hono/src/lib/logger.ts rename to packages/hono/src/lib/middleware/logger.ts index b545d14..d736e5a 100644 --- a/packages/hono/src/lib/logger.ts +++ b/packages/hono/src/lib/middleware/logger.ts @@ -1,4 +1,3 @@ -import type { MiddlewareHandler } from 'hono'; import { getLogger } from '@nimbus/core'; import { context, @@ -7,7 +6,8 @@ import { SpanStatusCode, trace, } from '@opentelemetry/api'; -import { getCorrelationId } from './correlationId.ts'; +import type { MiddlewareHandler } from 'hono'; +import { getCorrelationId } from '../middleware/correlationId.ts'; /** * Options for configuring the hono logger middleware. diff --git a/packages/hono/src/lib/onError.ts b/packages/hono/src/lib/onError.ts new file mode 100644 index 0000000..a43a6d1 --- /dev/null +++ b/packages/hono/src/lib/onError.ts @@ -0,0 +1,59 @@ +import { Exception, getLogger } from '@nimbus/core'; +import type { Context } from 'hono'; +import type { HTTPResponseError } from 'hono/types'; + +/** + * An error handler for Hono applications that maps + * Nimbus exceptions to HTTP responses and handles + * other unhandled errors. + * + * @param error - The error to handle. + * @param c - The Hono context. + * + * @example + * ```ts + * import { handleError } from '@nimbus/hono'; + * + * const app = new Hono(); + * app.onError(handleError); + * ``` + */ +export const handleError = (error: Error | HTTPResponseError, c: Context) => { + let statusCode = 500; + let response: Record = { + error: 'INTERNAL_SERVER_ERROR', + }; + + const isNimbusException = error instanceof Exception; + + if (isNimbusException) { + statusCode = error.statusCode ?? 500; + response = { + error: error.name, + message: error.message, + ...(error.details && { details: error.details }), + }; + + if (statusCode >= 500) { + getLogger().error({ + category: 'Nimbus', + message: error.message, + error, + }); + } else { + getLogger().debug({ + category: 'Nimbus', + message: error.message, + error, + }); + } + } else { + getLogger().critical({ + category: 'Nimbus', + message: 'An unhandled error occurred', + error, + }); + } + + return c.json(response, statusCode as any); +}; From b3160eb5200296ffb76727735dca3d9ef04aee46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Sat, 17 Jan 2026 20:24:52 +0100 Subject: [PATCH 18/37] better observability for mongodb operations by adding traces --- .claude/settings.local.json | 3 +- deno.lock | 111 ++++++++++++--- examples/hono-demo/deno.json | 26 +++- .../users/core/commands/addUser.command.ts | 5 +- .../src/iam/users/core/domain/user.ts | 20 +-- .../users/shell/messages/addUser.command.ts | 17 ++- .../users/shell/mongodb/user.collection.ts | 43 ++++++ .../users/shell/mongodb/user.repository.ts | 47 +++++++ examples/hono-demo/src/main.ts | 9 +- examples/hono-demo/src/seedCollections.ts | 31 +++++ .../hono-demo/src/shared/shell/mongodb.ts | 37 +++++ packages/core/deno.json | 1 + packages/core/src/index.ts | 3 +- .../lib/{event => eventBus}/eventBus.test.ts | 0 .../src/lib/{event => eventBus}/eventBus.ts | 34 +---- packages/core/src/lib/tracing/withSpan.ts | 128 ++++++++++++++++++ packages/hono/src/lib/middleware/logger.ts | 4 +- packages/mongodb/deno.json | 23 +++- packages/mongodb/src/lib/crud/aggregate.ts | 35 ++--- packages/mongodb/src/lib/crud/bulkWrite.ts | 16 ++- .../mongodb/src/lib/crud/countDocuments.ts | 16 ++- packages/mongodb/src/lib/crud/deleteMany.ts | 16 ++- packages/mongodb/src/lib/crud/deleteOne.ts | 16 ++- packages/mongodb/src/lib/crud/find.ts | 59 ++++---- packages/mongodb/src/lib/crud/findOne.ts | 39 +++--- .../mongodb/src/lib/crud/findOneAndDelete.ts | 45 +++--- .../mongodb/src/lib/crud/findOneAndReplace.ts | 53 ++++---- .../mongodb/src/lib/crud/findOneAndUpdate.ts | 45 +++--- packages/mongodb/src/lib/crud/insertMany.ts | 16 ++- packages/mongodb/src/lib/crud/insertOne.ts | 16 ++- packages/mongodb/src/lib/crud/replaceOne.ts | 16 ++- packages/mongodb/src/lib/crud/updateMany.ts | 16 ++- packages/mongodb/src/lib/crud/updateOne.ts | 16 ++- packages/mongodb/src/lib/tracing.ts | 50 +++++++ 34 files changed, 742 insertions(+), 270 deletions(-) create mode 100644 examples/hono-demo/src/iam/users/shell/mongodb/user.collection.ts create mode 100644 examples/hono-demo/src/iam/users/shell/mongodb/user.repository.ts create mode 100644 examples/hono-demo/src/seedCollections.ts create mode 100644 examples/hono-demo/src/shared/shell/mongodb.ts rename packages/core/src/lib/{event => eventBus}/eventBus.test.ts (100%) rename packages/core/src/lib/{event => eventBus}/eventBus.ts (86%) create mode 100644 packages/core/src/lib/tracing/withSpan.ts create mode 100644 packages/mongodb/src/lib/tracing.ts diff --git a/.claude/settings.local.json b/.claude/settings.local.json index c4fed74..36ded71 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -9,7 +9,8 @@ "Bash(deno doc:*)", "WebFetch(domain:docs.eventsourcingdb.io)", "Bash(cat:*)", - "WebSearch" + "WebSearch", + "Bash(ls:*)" ], "deny": [], "ask": [] diff --git a/deno.lock b/deno.lock index 8331a80..3157030 100644 --- a/deno.lock +++ b/deno.lock @@ -1,21 +1,70 @@ { "version": "5", "specifiers": { + "jsr:@oak/commons@1": "1.0.1", + "jsr:@oak/oak@^17.1.6": "17.2.0", + "jsr:@std/assert@1": "1.0.15", + "jsr:@std/assert@^1.0.10": "1.0.15", + "jsr:@std/bytes@1": "1.0.6", + "jsr:@std/crypto@1": "1.0.5", "jsr:@std/dotenv@*": "0.225.3", "jsr:@std/dotenv@~0.225.6": "0.225.6", + "jsr:@std/encoding@1": "1.0.10", + "jsr:@std/encoding@^1.0.10": "1.0.10", "jsr:@std/fmt@^1.0.4": "1.0.5", "jsr:@std/fmt@^1.0.5": "1.0.5", + "jsr:@std/http@1": "1.0.21", + "jsr:@std/internal@^1.0.10": "1.0.12", + "jsr:@std/internal@^1.0.12": "1.0.12", + "jsr:@std/media-types@1": "1.1.0", + "jsr:@std/path@1": "1.1.2", "jsr:@std/text@^1.0.10": "1.0.10", "jsr:@std/ulid@1": "1.0.0", "npm:@opentelemetry/api@^1.9.0": "1.9.0", "npm:@types/node@*": "22.5.4", "npm:ajv@^8.17.1": "8.17.1", "npm:hono@^4.11.4": "4.11.4", - "npm:mongodb@^6.12.0": "6.12.0", + "npm:mongodb@7": "7.0.0", + "npm:path-to-regexp@^6.3.0": "6.3.0", "npm:zod@^3.24.1": "3.25.76", "npm:zod@^4.3.5": "4.3.5" }, "jsr": { + "@oak/commons@1.0.1": { + "integrity": "889ff210f0b4292591721be07244ecb1b5c118742f5273c70cf30d7cd4184d0c", + "dependencies": [ + "jsr:@std/assert@1", + "jsr:@std/bytes", + "jsr:@std/crypto", + "jsr:@std/encoding@1", + "jsr:@std/http", + "jsr:@std/media-types" + ] + }, + "@oak/oak@17.2.0": { + "integrity": "938537a92fc7922a46a9984696c65fb189c9baad164416ac3e336768a9ff0cd1", + "dependencies": [ + "jsr:@oak/commons", + "jsr:@std/assert@1", + "jsr:@std/bytes", + "jsr:@std/http", + "jsr:@std/media-types", + "jsr:@std/path", + "npm:path-to-regexp" + ] + }, + "@std/assert@1.0.15": { + "integrity": "d64018e951dbdfab9777335ecdb000c0b4e3df036984083be219ce5941e4703b", + "dependencies": [ + "jsr:@std/internal@^1.0.12" + ] + }, + "@std/bytes@1.0.6": { + "integrity": "f6ac6adbd8ccd99314045f5703e23af0a68d7f7e58364b47d2c7f408aeb5820a" + }, + "@std/crypto@1.0.5": { + "integrity": "0dcfbb319fe0bba1bd3af904ceb4f948cde1b92979ec1614528380ed308a3b40" + }, "@std/dotenv@0.225.3": { "integrity": "a95e5b812c27b0854c52acbae215856d9cce9d4bbf774d938c51d212711e8d4a" }, @@ -25,12 +74,33 @@ "@std/dotenv@0.225.6": { "integrity": "1d6f9db72f565bd26790fa034c26e45ecb260b5245417be76c2279e5734c421b" }, + "@std/encoding@1.0.10": { + "integrity": "8783c6384a2d13abd5e9e87a7ae0520a30e9f56aeeaa3bdf910a3eaaf5c811a1" + }, "@std/fmt@1.0.4": { "integrity": "e14fe5bedee26f80877e6705a97a79c7eed599e81bb1669127ef9e8bc1e29a74" }, "@std/fmt@1.0.5": { "integrity": "0cfab43364bc36650d83c425cd6d99910fc20c4576631149f0f987eddede1a4d" }, + "@std/http@1.0.21": { + "integrity": "abb5c747651ee6e3ea6139858fd9b1810d2c97f53a5e6722f3b6d27a6d263edc", + "dependencies": [ + "jsr:@std/encoding@^1.0.10" + ] + }, + "@std/internal@1.0.12": { + "integrity": "972a634fd5bc34b242024402972cd5143eac68d8dffaca5eaa4dba30ce17b027" + }, + "@std/media-types@1.1.0": { + "integrity": "c9d093f0c05c3512932b330e3cc1fe1d627b301db33a4c2c2185c02471d6eaa4" + }, + "@std/path@1.1.2": { + "integrity": "c0b13b97dfe06546d5e16bf3966b1cadf92e1cc83e56ba5476ad8b498d9e3038", + "dependencies": [ + "jsr:@std/internal@^1.0.10" + ] + }, "@std/text@1.0.10": { "integrity": "9dcab377450253c0efa9a9a0c731040bfd4e1c03f8303b5934381467b7954338" }, @@ -39,8 +109,8 @@ } }, "npm": { - "@mongodb-js/saslprep@1.1.9": { - "integrity": "sha512-tVkljjeEaAhCqTzajSdgbQ6gE6f3oneVwa3iXR6csiEwXXOFsiC6Uh9iAjAhXPtqa/XMDHWjjeNH/77m/Yq2dw==", + "@mongodb-js/saslprep@1.4.4": { + "integrity": "sha512-p7X/ytJDIdwUfFL/CLOhKgdfJe1Fa8uw9seJYvdOmnP9JBWGWHW69HkOixXS6Wy9yvGf1MbhcS6lVmrhy4jm2g==", "dependencies": [ "sparse-bitfield" ] @@ -57,8 +127,8 @@ "@types/webidl-conversions@7.0.3": { "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==" }, - "@types/whatwg-url@11.0.5": { - "integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==", + "@types/whatwg-url@13.0.0": { + "integrity": "sha512-N8WXpbE6Wgri7KUSvrmQcqrMllKZ9uxkYWMt+mCSGwNc0Hsw9VQTW7ApqI4XNrx6/SaM2QQJCzMPDEXE058s+Q==", "dependencies": [ "@types/webidl-conversions" ] @@ -72,9 +142,8 @@ "require-from-string" ] }, - "bson@6.10.1": { - "integrity": "sha512-P92xmHDQjSKPLHqFxefqMxASNq/aWJMEZugpCjf+AF/pgcUpMMQCg7t7+ewko0/u8AapvF3luf/FoehddEK+sA==", - "deprecated": true + "bson@7.0.0": { + "integrity": "sha512-Kwc6Wh4lQ5OmkqqKhYGKIuELXl+EPYSCObVE6bWsp1T/cGkOCBN0I8wF/T44BiuhHyNi1mmKVPXk60d41xZ7kw==" }, "fast-deep-equal@3.1.3": { "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" @@ -91,21 +160,24 @@ "memory-pager@1.5.0": { "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" }, - "mongodb-connection-string-url@3.0.1": { - "integrity": "sha512-XqMGwRX0Lgn05TDB4PyG2h2kKO/FfWJyCzYQbIhXUxz7ETt0I/FqHjUeqj37irJ+Dl1ZtU82uYyj14u2XsZKfg==", + "mongodb-connection-string-url@7.0.0": { + "integrity": "sha512-irhhjRVLE20hbkRl4zpAYLnDMM+zIZnp0IDB9akAFFUZp/3XdOfwwddc7y6cNvF2WCEtfTYRwYbIfYa2kVY0og==", "dependencies": [ "@types/whatwg-url", "whatwg-url" ] }, - "mongodb@6.12.0": { - "integrity": "sha512-RM7AHlvYfS7jv7+BXund/kR64DryVI+cHbVAy9P61fnb1RcWZqOW1/Wj2YhqMCx+MuYhqTRGv7AwHBzmsCKBfA==", + "mongodb@7.0.0": { + "integrity": "sha512-vG/A5cQrvGGvZm2mTnCSz1LUcbOPl83hfB6bxULKQ8oFZauyox/2xbZOoGNl+64m8VBrETkdGCDBdOsCr3F3jg==", "dependencies": [ "@mongodb-js/saslprep", "bson", "mongodb-connection-string-url" ] }, + "path-to-regexp@6.3.0": { + "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==" + }, "punycode@2.3.1": { "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==" }, @@ -118,8 +190,8 @@ "memory-pager" ] }, - "tr46@4.1.1": { - "integrity": "sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw==", + "tr46@5.1.1": { + "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==", "dependencies": [ "punycode" ] @@ -130,8 +202,8 @@ "webidl-conversions@7.0.0": { "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==" }, - "whatwg-url@13.0.0": { - "integrity": "sha512-9WWbymnqj57+XEuqADHrCJ2eSXzn8WXIW/YSGaZtb2WKAInQ6CHfaUUcTyyver0p8BDg5StLQq8h1vtZuwmOig==", + "whatwg-url@14.2.0": { + "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==", "dependencies": [ "tr46", "webidl-conversions" @@ -190,6 +262,7 @@ "jsr:@std/dotenv@~0.225.6", "jsr:@std/ulid@1", "npm:hono@^4.11.4", + "npm:mongodb@7", "npm:zod@^4.3.5" ] }, @@ -198,6 +271,7 @@ "jsr:@std/assert@^1.0.10", "jsr:@std/fmt@^1.0.5", "jsr:@std/ulid@1", + "npm:@opentelemetry/api@^1.9.0", "npm:zod@^4.3.5" ] }, @@ -212,8 +286,9 @@ "dependencies": [ "jsr:@std/assert@^1.0.10", "jsr:@std/text@^1.0.10", - "npm:mongodb@^6.12.0", - "npm:zod@^3.24.1" + "npm:@opentelemetry/api@^1.9.0", + "npm:mongodb@7", + "npm:zod@^4.3.5" ] }, "packages/oak": { diff --git a/examples/hono-demo/deno.json b/examples/hono-demo/deno.json index 85ceb24..fa54ead 100644 --- a/examples/hono-demo/deno.json +++ b/examples/hono-demo/deno.json @@ -2,19 +2,28 @@ "tasks": { "dev": "deno run -A --watch src/main.ts", "dev:otel": "sh start-with-otel.sh", - "test": "deno test -A" + "test": "deno test -A", + "database:seed": "deno run -A src/seedCollections.ts" }, "lint": { - "include": ["src/"], + "include": [ + "src/" + ], "exclude": [], "rules": { - "tags": ["recommended"], + "tags": [ + "recommended" + ], "include": [], - "exclude": ["no-explicit-any"] + "exclude": [ + "no-explicit-any" + ] } }, "fmt": { - "include": ["src/"], + "include": [ + "src/" + ], "exclude": [], "useTabs": false, "lineWidth": 80, @@ -24,12 +33,15 @@ "proseWrap": "always" }, "test": { - "include": ["src/"] + "include": [ + "src/" + ] }, "imports": { "@std/dotenv": "jsr:@std/dotenv@^0.225.6", "@std/ulid": "jsr:@std/ulid@^1.0.0", "hono": "npm:hono@^4.11.4", + "mongodb": "npm:mongodb@^7.0.0", "zod": "npm:zod@^4.3.5" } -} +} \ No newline at end of file diff --git a/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts b/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts index 564fde4..0219a23 100644 --- a/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts +++ b/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts @@ -1,5 +1,6 @@ -import { z } from 'zod'; import { commandSchema, InvalidInputException } from '@nimbus/core'; +import { ObjectId } from 'mongodb'; +import { z } from 'zod'; import { UserState } from '../domain/user.ts'; export const ADD_USER_COMMAND_TYPE = 'at.overlap.nimbus.add-user'; @@ -28,7 +29,7 @@ export const addUser = ( } return { - id: crypto.randomUUID(), + _id: new ObjectId().toString(), email: email, firstName: command.data.firstName, lastName: command.data.lastName, diff --git a/examples/hono-demo/src/iam/users/core/domain/user.ts b/examples/hono-demo/src/iam/users/core/domain/user.ts index 03c0560..f064353 100644 --- a/examples/hono-demo/src/iam/users/core/domain/user.ts +++ b/examples/hono-demo/src/iam/users/core/domain/user.ts @@ -1,10 +1,14 @@ -export type User = { - id: string; - email: string; - firstName: string; - lastName: string; - createdAt: string; - updatedAt: string; -}; +import { z } from 'zod'; + +export const User = z.object({ + _id: z.string().length(24), + email: z.email(), + firstName: z.string(), + lastName: z.string(), + createdAt: z.iso.datetime(), + updatedAt: z.iso.datetime(), +}); + +export type User = z.infer; export type UserState = User | null; diff --git a/examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts b/examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts index ff4cbe7..429bf7a 100644 --- a/examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts +++ b/examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts @@ -3,11 +3,26 @@ import { AddUserCommand, } from '../../core/commands/addUser.command.ts'; import { UserState } from '../../core/domain/user.ts'; +import { userRepository } from '../mongodb/user.repository.ts'; export const addUserCommandHandler = async (command: AddUserCommand) => { - let state: UserState = await Promise.resolve(null); + let state: UserState = null; + + try { + state = await userRepository.findOne({ + filter: { email: command.data.email }, + }); + } catch (_error) { + state = null; + } state = addUser(state, command); + if (state) { + state = await userRepository.insertOne({ + item: state, + }); + } + return state; }; diff --git a/examples/hono-demo/src/iam/users/shell/mongodb/user.collection.ts b/examples/hono-demo/src/iam/users/shell/mongodb/user.collection.ts new file mode 100644 index 0000000..7206616 --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/mongodb/user.collection.ts @@ -0,0 +1,43 @@ +import { MongoCollectionDefinition } from '@nimbus/mongodb'; + +export const USERS_COLLECTION: MongoCollectionDefinition = { + name: 'users', + options: { + validator: { + $jsonSchema: { + bsonType: 'object', + required: [ + 'email', + 'firstName', + 'lastName', + 'createdAt', + 'updatedAt', + ], + properties: { + email: { + bsonType: 'string', + }, + firstName: { + bsonType: 'string', + }, + lastName: { + bsonType: 'string', + }, + createdAt: { + bsonType: 'date', + }, + updatedAt: { + bsonType: 'date', + }, + }, + }, + }, + }, + indexes: [ + { key: { email: 1 }, unique: true }, + { key: { firstName: 1 } }, + { key: { lastName: 1 } }, + { key: { createdAt: 1 } }, + { key: { updatedAt: 1 } }, + ], +}; diff --git a/examples/hono-demo/src/iam/users/shell/mongodb/user.repository.ts b/examples/hono-demo/src/iam/users/shell/mongodb/user.repository.ts new file mode 100644 index 0000000..e972e3f --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/mongodb/user.repository.ts @@ -0,0 +1,47 @@ +import { MongoDBRepository } from '@nimbus/mongodb'; +import { getEnv } from '@nimbus/utils'; +import { Document, ObjectId } from 'mongodb'; +import { mongoManager } from '../../../../shared/shell/mongodb.ts'; +import { User } from '../../core/domain/user.ts'; +import { USERS_COLLECTION } from './user.collection.ts'; + +class UserRepository extends MongoDBRepository { + constructor() { + const env = getEnv({ variables: ['MONGO_DB'] }); + + super( + () => { + return mongoManager.getCollection( + env.MONGO_DB, + USERS_COLLECTION.name, + ); + }, + User, + 'User', + ); + } + + override _mapDocumentToEntity(doc: Document): User { + return User.parse({ + _id: doc._id.toString(), + email: doc.email, + firstName: doc.firstName, + lastName: doc.lastName, + createdAt: doc.createdAt.toISOString(), + updatedAt: doc.updatedAt.toISOString(), + }); + } + + override _mapEntityToDocument(user: User): Document { + return { + _id: new ObjectId(user._id), + email: user.email, + firstName: user.firstName, + lastName: user.lastName, + createdAt: new Date(user.createdAt), + updatedAt: new Date(user.updatedAt), + }; + } +} + +export const userRepository = new UserRepository(); diff --git a/examples/hono-demo/src/main.ts b/examples/hono-demo/src/main.ts index c105b95..31502b8 100644 --- a/examples/hono-demo/src/main.ts +++ b/examples/hono-demo/src/main.ts @@ -1,5 +1,3 @@ -import '@std/dotenv/load'; -import process from 'node:process'; import { getLogger, jsonLogFormatter, @@ -7,8 +5,11 @@ import { prettyLogFormatter, setupLogger, } from '@nimbus/core'; -import { initMessages } from './shared/shell/messageRouter.ts'; +import '@std/dotenv/load'; +import process from 'node:process'; import { app } from './shared/shell/http.ts'; +import { initMessages } from './shared/shell/messageRouter.ts'; +import { initMongoConnectionManager } from './shared/shell/mongodb.ts'; setupLogger({ logLevel: parseLogLevel(process.env.LOG_LEVEL), @@ -18,6 +19,8 @@ setupLogger({ useConsoleColors: process.env.LOG_FORMAT === 'pretty', }); +initMongoConnectionManager(); + initMessages(); if (process.env.PORT) { diff --git a/examples/hono-demo/src/seedCollections.ts b/examples/hono-demo/src/seedCollections.ts new file mode 100644 index 0000000..0c96e02 --- /dev/null +++ b/examples/hono-demo/src/seedCollections.ts @@ -0,0 +1,31 @@ +import { deployMongoCollection } from '@nimbus/mongodb'; +import '@std/dotenv/load'; +import process from 'node:process'; +import { USERS_COLLECTION } from './iam/users/shell/mongodb/user.collection.ts'; +import { + initMongoConnectionManager, + mongoManager, +} from './shared/shell/mongodb.ts'; + +const { MONGO_DB } = process.env; + +try { + initMongoConnectionManager(); + + const mongoClient = await mongoManager.getClient(); + + const result = await Promise.allSettled([ + deployMongoCollection({ + mongoClient: mongoClient, + dbName: MONGO_DB ?? '', + collectionDefinition: USERS_COLLECTION, + allowUpdateIndexes: true, + }), + ]); + + console.log('\nDeployed collections', JSON.stringify(result, null, 2)); + process.exit(0); +} catch (error) { + console.error(error); + process.exit(1); +} diff --git a/examples/hono-demo/src/shared/shell/mongodb.ts b/examples/hono-demo/src/shared/shell/mongodb.ts new file mode 100644 index 0000000..1dddf94 --- /dev/null +++ b/examples/hono-demo/src/shared/shell/mongodb.ts @@ -0,0 +1,37 @@ +import { getLogger } from '@nimbus/core'; +import { MongoConnectionManager } from '@nimbus/mongodb'; +import { ServerApiVersion } from 'mongodb'; +import process from 'node:process'; + +export const mongoManager = MongoConnectionManager.getInstance( + process.env['MONGO_URL'] ?? '', + { + connectionTimeout: 1000 * 60 * 5, + mongoClientOptions: { + appName: 'overtools', + serverApi: { + version: ServerApiVersion.v1, + strict: false, + deprecationErrors: true, + }, + maxPoolSize: 10, + minPoolSize: 0, + maxIdleTimeMS: 1000 * 60 * 1, // 1 minutes idle timeout + connectTimeoutMS: 1000 * 15, // 15 seconds connection timeout + socketTimeoutMS: 1000 * 30, // 30 seconds socket timeout + }, + }, +); + +export const initMongoConnectionManager = () => { + // Check to see if the MongoDB connection can be cleaned up + // This is to prevent the MongoDB connection from being left open for too long + setInterval(() => { + mongoManager.cleanup().catch((error) => { + getLogger().error({ + message: error.message, + error, + }); + }); + }, 1000 * 60); // Check every minute +}; diff --git a/packages/core/deno.json b/packages/core/deno.json index ce38fa6..1ef8d6e 100644 --- a/packages/core/deno.json +++ b/packages/core/deno.json @@ -40,6 +40,7 @@ ] }, "imports": { + "@opentelemetry/api": "npm:@opentelemetry/api@^1.9.0", "@std/assert": "jsr:@std/assert@^1.0.10", "@std/fmt": "jsr:@std/fmt@^1.0.5", "@std/ulid": "jsr:@std/ulid@^1.0.0", diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 68d53f4..1f5c229 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,4 +1,4 @@ -export * from './lib/event/eventBus.ts'; +export * from './lib/eventBus/eventBus.ts'; export * from './lib/exception/exception.ts'; export * from './lib/exception/forbiddenException.ts'; export * from './lib/exception/genericException.ts'; @@ -14,3 +14,4 @@ export * from './lib/message/event.ts'; export * from './lib/message/message.ts'; export * from './lib/message/query.ts'; export * from './lib/message/router.ts'; +export * from './lib/tracing/withSpan.ts'; diff --git a/packages/core/src/lib/event/eventBus.test.ts b/packages/core/src/lib/eventBus/eventBus.test.ts similarity index 100% rename from packages/core/src/lib/event/eventBus.test.ts rename to packages/core/src/lib/eventBus/eventBus.test.ts diff --git a/packages/core/src/lib/event/eventBus.ts b/packages/core/src/lib/eventBus/eventBus.ts similarity index 86% rename from packages/core/src/lib/event/eventBus.ts rename to packages/core/src/lib/eventBus/eventBus.ts index b5bc7bc..de5d54f 100644 --- a/packages/core/src/lib/event/eventBus.ts +++ b/packages/core/src/lib/eventBus/eventBus.ts @@ -2,15 +2,13 @@ import EventEmitter from 'node:events'; import { GenericException } from '../exception/genericException.ts'; import { getLogger } from '../log/logger.ts'; import type { Event } from '../message/event.ts'; -import { type MessageHandler, MessageRouter } from '../message/router.ts'; /** * The input type for subscribing to an event. */ export type SubscribeEventInput = { type: string; - handler: MessageHandler; - allowUnsafeInput?: boolean; + handler: (event: Event) => Promise; onError?: (error: any, event: Event) => void; options?: NimbusEventBusOptions; }; @@ -36,7 +34,7 @@ export type NimbusEventBusOptions = { * * eventBus.subscribeEvent({ * type: 'at.overlap.nimbus.account-added', - * handler: accountAddedHandler, + * handler: (event) => { ... }, * }); * * eventBus.putEvent({ @@ -115,7 +113,7 @@ export class NimbusEventBus { * Subscribe to an event. * * @param {string} eventType - The type of event to subscribe to. - * @param {MessageHandler} handler - The handler to call when the event got published. + * @param {Function} handler - The handler to process the event. * @param {Function} [onError] - The function to call when the event could not be handled after the maximum number of retries. * @param {NimbusEventBusOptions} [options] - The options for the event bus. * @param {number} [options.maxRetries] - The maximum number of retries for handling the event in case of an error. @@ -133,7 +131,6 @@ export class NimbusEventBus { public subscribeEvent({ type, handler, - allowUnsafeInput, onError, options, }: SubscribeEventInput): void { @@ -145,18 +142,10 @@ export class NimbusEventBus { const maxRetries = options?.maxRetries ?? this._maxRetries; const retryDelay = options?.retryDelay ?? this._retryDelay; - const nimbusRouter = new MessageRouter('event', { - logInput: this._logInput, - }); - - nimbusRouter.register(type, handler, { - allowUnsafeInput: allowUnsafeInput ?? false, - }); - const handleEvent = async (event: Event) => { try { await this._processEvent( - nimbusRouter, + handler, event, maxRetries, retryDelay, @@ -177,19 +166,8 @@ export class NimbusEventBus { this._eventEmitter.on(type, handleEvent); } - private _logInput(input: any) { - getLogger().info({ - category: 'Nimbus', - ...(input?.data?.correlationId && { - correlationId: input?.data?.correlationId, - }), - message: - `${input?.data?.correlationId} - [Event] ${input?.type} from ${input?.source}`, - }); - } - private async _processEvent( - nimbusRouter: MessageRouter, + handler: (event: Event) => Promise, event: Event, maxRetries: number, retryDelay: number, @@ -198,7 +176,7 @@ export class NimbusEventBus { while (attempt < maxRetries) { try { - await nimbusRouter.route(event); + await handler(event); break; } catch (error: any) { attempt++; diff --git a/packages/core/src/lib/tracing/withSpan.ts b/packages/core/src/lib/tracing/withSpan.ts new file mode 100644 index 0000000..c43587c --- /dev/null +++ b/packages/core/src/lib/tracing/withSpan.ts @@ -0,0 +1,128 @@ +import { + type Attributes, + context, + type Span, + SpanKind, + SpanStatusCode, + trace, +} from '@opentelemetry/api'; + +/** + * Options for configuring a span created by withSpan. + */ +export type WithSpanOptions = { + /** + * The name of the span. This will be displayed in your tracing UI. + */ + name: string; + /** + * The name of the tracer. Defaults to "nimbus". + */ + tracerName?: string; + /** + * The kind of span. Defaults to SpanKind.INTERNAL. + */ + kind?: SpanKind; + /** + * Initial attributes to set on the span. + */ + attributes?: Attributes; +}; + +/** + * Higher-order function that wraps a function with OpenTelemetry tracing. + * + * Creates a child span within the current trace context. The span automatically: + * - Inherits the parent span from the active context + * - Records the function's execution time + * - Sets error status and records exceptions on failure + * - Ends when the function completes (success or failure) + * + * @example + * ```ts + * import { withSpan } from '@nimbus/core'; + * + * const fetchUser = withSpan( + * { + * name: 'fetchUser', + * attributes: { + * 'user.source': 'database' + * } + * }, + * async (userId: string) => { + * return await db.users.findById(userId); + * } + * ); + * + * const user = await fetchUser('123'); + * ``` + * + * @example + * ```ts + * const processOrder = withSpan( + * { name: 'processOrder' }, + * async (orderId: string, span: Span) => { + * const order = await db.orders.findById(orderId); + * + * span.setAttribute('order.total', order.total); + * span.setAttribute('order.items', order.items.length); + * + * return await processPayment(order); + * } + * ); + * ``` + */ +export const withSpan = ( + options: WithSpanOptions, + fn: (...args: [...TArgs, Span]) => TReturn, +): (...args: TArgs) => TReturn => { + const tracerName = options.tracerName ?? 'nimbus'; + const tracer = trace.getTracer(tracerName); + + return (...args: TArgs): TReturn => { + const parentContext = context.active(); + + return tracer.startActiveSpan( + options.name, + { + kind: options.kind ?? SpanKind.INTERNAL, + attributes: options.attributes, + }, + parentContext, + (span) => { + try { + const result = fn(...args, span); + + // Handle promises + if (result instanceof Promise) { + return result + .then((value) => { + span.end(); + return value; + }) + .catch((err) => { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: (err as Error).message, + }); + span.recordException(err as Error); + span.end(); + throw err; + }) as TReturn; + } + + span.end(); + return result; + } catch (err) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: (err as Error).message, + }); + span.recordException(err as Error); + span.end(); + throw err; + } + }, + ) as TReturn; + }; +}; diff --git a/packages/hono/src/lib/middleware/logger.ts b/packages/hono/src/lib/middleware/logger.ts index d736e5a..182ae86 100644 --- a/packages/hono/src/lib/middleware/logger.ts +++ b/packages/hono/src/lib/middleware/logger.ts @@ -21,7 +21,7 @@ export type LoggerOptions = { enableTracing?: boolean; /** * Optionally change the name of the tracer. - * Defaults to "nimbus-hono". + * Defaults to "nimbus". */ tracerName?: string; }; @@ -62,7 +62,7 @@ const time = (start: number) => { * ``` */ export const logger = (options?: LoggerOptions): MiddlewareHandler => { - const tracerName = options?.tracerName ?? 'nimbus-hono'; + const tracerName = options?.tracerName ?? 'nimbus'; const tracer = trace.getTracer(tracerName); return async (c, next) => { diff --git a/packages/mongodb/deno.json b/packages/mongodb/deno.json index 290f697..9963238 100644 --- a/packages/mongodb/deno.json +++ b/packages/mongodb/deno.json @@ -13,7 +13,9 @@ "homepage": "https://nimbus.overlap.at", "exports": "./src/index.ts", "fmt": { - "include": ["src/"], + "include": [ + "src/" + ], "useTabs": false, "lineWidth": 80, "indentWidth": 4, @@ -22,18 +24,25 @@ "proseWrap": "always" }, "lint": { - "include": ["src/"], + "include": [ + "src/" + ], "rules": { - "exclude": ["no-explicit-any"] + "exclude": [ + "no-explicit-any" + ] } }, "test": { - "include": ["src/"] + "include": [ + "src/" + ] }, "imports": { + "@opentelemetry/api": "npm:@opentelemetry/api@^1.9.0", "@std/assert": "jsr:@std/assert@^1.0.10", "@std/text": "jsr:@std/text@^1.0.10", - "mongodb": "npm:mongodb@^6.12.0", - "zod": "npm:zod@^3.24.1" + "mongodb": "npm:mongodb@^7.0.0", + "zod": "npm:zod@^4.3.5" } -} +} \ No newline at end of file diff --git a/packages/mongodb/src/lib/crud/aggregate.ts b/packages/mongodb/src/lib/crud/aggregate.ts index e242d87..c6d502c 100644 --- a/packages/mongodb/src/lib/crud/aggregate.ts +++ b/packages/mongodb/src/lib/crud/aggregate.ts @@ -2,6 +2,7 @@ import { GenericException } from '@nimbus/core'; import type { AggregateOptions, Collection, Document } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the aggregate function. @@ -34,29 +35,31 @@ export type Aggregate = ( * * @returns {Promise} The aggregated documents. */ -export const aggregate: Aggregate = async ({ +export const aggregate: Aggregate = ({ collection, aggregation, mapDocument, outputType, options, }) => { - let res: Document[] = []; + return withSpan('aggregate', collection, async () => { + let res: Document[] = []; - try { - const aggregationRes = collection.aggregate(aggregation, options); - res = await aggregationRes.toArray(); - } catch (error) { - throw handleMongoError(error); - } + try { + const aggregationRes = collection.aggregate(aggregation, options); + res = await aggregationRes.toArray(); + } catch (error) { + throw handleMongoError(error); + } - try { - return res.map((item) => outputType.parse(mapDocument(item))); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return res.map((item) => outputType.parse(mapDocument(item))); + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/bulkWrite.ts b/packages/mongodb/src/lib/crud/bulkWrite.ts index c25c40f..3bb429e 100644 --- a/packages/mongodb/src/lib/crud/bulkWrite.ts +++ b/packages/mongodb/src/lib/crud/bulkWrite.ts @@ -6,6 +6,7 @@ import type { Document, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the bulkWrite function. @@ -33,15 +34,16 @@ export type BulkWrite = ( * * @returns {Promise} The result of the bulk write operation. */ -export const bulkWrite: BulkWrite = async ({ +export const bulkWrite: BulkWrite = ({ collection, operations, options, }) => { - try { - const res = await collection.bulkWrite(operations, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('bulkWrite', collection, async () => { + try { + return await collection.bulkWrite(operations, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/countDocuments.ts b/packages/mongodb/src/lib/crud/countDocuments.ts index 83e5399..6b4f32a 100644 --- a/packages/mongodb/src/lib/crud/countDocuments.ts +++ b/packages/mongodb/src/lib/crud/countDocuments.ts @@ -5,6 +5,7 @@ import type { Filter, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the countDocuments function. @@ -32,15 +33,16 @@ export type CountDocuments = ( * * @returns {Promise} The number of documents. */ -export const countDocuments: CountDocuments = async ({ +export const countDocuments: CountDocuments = ({ collection, filter, options, }) => { - try { - const res = await collection.countDocuments(filter, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('countDocuments', collection, async () => { + try { + return await collection.countDocuments(filter, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/deleteMany.ts b/packages/mongodb/src/lib/crud/deleteMany.ts index bed6604..f5fa17b 100644 --- a/packages/mongodb/src/lib/crud/deleteMany.ts +++ b/packages/mongodb/src/lib/crud/deleteMany.ts @@ -6,6 +6,7 @@ import type { Filter, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the deleteMany function. @@ -33,15 +34,16 @@ export type DeleteMany = ( * * @returns {Promise} The result of the delete operation. */ -export const deleteMany: DeleteMany = async ({ +export const deleteMany: DeleteMany = ({ collection, filter, options, }) => { - try { - const res = await collection.deleteMany(filter, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('deleteMany', collection, async () => { + try { + return await collection.deleteMany(filter, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/deleteOne.ts b/packages/mongodb/src/lib/crud/deleteOne.ts index e77a576..eb809db 100644 --- a/packages/mongodb/src/lib/crud/deleteOne.ts +++ b/packages/mongodb/src/lib/crud/deleteOne.ts @@ -6,6 +6,7 @@ import type { Filter, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the deleteOne function. @@ -33,15 +34,16 @@ export type DeleteOne = ( * * @returns {Promise} The result of the delete operation. */ -export const deleteOne: DeleteOne = async ({ +export const deleteOne: DeleteOne = ({ collection, filter, options, }) => { - try { - const res = await collection.deleteOne(filter, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('deleteOne', collection, async () => { + try { + return await collection.deleteOne(filter, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/find.ts b/packages/mongodb/src/lib/crud/find.ts index 5f4a43d..b094320 100644 --- a/packages/mongodb/src/lib/crud/find.ts +++ b/packages/mongodb/src/lib/crud/find.ts @@ -9,6 +9,7 @@ import type { } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the find function. @@ -49,7 +50,7 @@ export type Find = ( * * @returns {Promise} The found documents. */ -export const find: Find = async ({ +export const find: Find = ({ collection, filter, limit, @@ -60,39 +61,41 @@ export const find: Find = async ({ outputType, options, }) => { - let res: WithId[] = []; + return withSpan('find', collection, async () => { + let res: WithId[] = []; - try { - const findRes = collection.find(filter, options); + try { + const findRes = collection.find(filter, options); - if (typeof limit !== 'undefined') { - findRes.limit(limit); - } + if (typeof limit !== 'undefined') { + findRes.limit(limit); + } - if (typeof skip !== 'undefined') { - findRes.skip(skip); - } + if (typeof skip !== 'undefined') { + findRes.skip(skip); + } - if (typeof sort !== 'undefined') { - findRes.sort(sort); - } + if (typeof sort !== 'undefined') { + findRes.sort(sort); + } - if (typeof project !== 'undefined') { - findRes.project(project); - } + if (typeof project !== 'undefined') { + findRes.project(project); + } - res = await findRes.toArray(); - } catch (error) { - throw handleMongoError(error); - } + res = await findRes.toArray(); + } catch (error) { + throw handleMongoError(error); + } - try { - return res.map((item) => outputType.parse(mapDocument(item))); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return res.map((item) => outputType.parse(mapDocument(item))); + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/findOne.ts b/packages/mongodb/src/lib/crud/findOne.ts index 4596259..3a489a9 100644 --- a/packages/mongodb/src/lib/crud/findOne.ts +++ b/packages/mongodb/src/lib/crud/findOne.ts @@ -2,6 +2,7 @@ import { GenericException, NotFoundException } from '@nimbus/core'; import type { Collection, Document, Filter, WithId } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the findOne function. @@ -32,31 +33,33 @@ export type FindOne = ( * * @returns {Promise} The found document. */ -export const findOne: FindOne = async ({ +export const findOne: FindOne = ({ collection, filter, mapDocument, outputType, }) => { - let res: WithId | null = null; + return withSpan('findOne', collection, async () => { + let res: WithId | null = null; - try { - res = await collection.findOne(filter); - } catch (error) { - throw handleMongoError(error); - } + try { + res = await collection.findOne(filter); + } catch (error) { + throw handleMongoError(error); + } - if (!res) { - throw new NotFoundException('Document not found'); - } + if (!res) { + throw new NotFoundException('Document not found'); + } - try { - return outputType.parse(mapDocument(res)); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return outputType.parse(mapDocument(res)); + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/findOneAndDelete.ts b/packages/mongodb/src/lib/crud/findOneAndDelete.ts index df6bb74..41d9393 100644 --- a/packages/mongodb/src/lib/crud/findOneAndDelete.ts +++ b/packages/mongodb/src/lib/crud/findOneAndDelete.ts @@ -8,6 +8,7 @@ import type { } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the findOneAndDelete function. @@ -40,36 +41,38 @@ export type FindOneAndDelete = ( * * @returns {Promise} The found and deleted document. */ -export const findOneAndDelete: FindOneAndDelete = async ({ +export const findOneAndDelete: FindOneAndDelete = ({ collection, filter, mapDocument, outputType, options, }) => { - let res: WithId | null = null; + return withSpan('findOneAndDelete', collection, async () => { + let res: WithId | null = null; - try { - if (options) { - res = await collection.findOneAndDelete(filter, options); - } else { - res = await collection.findOneAndDelete(filter); + try { + if (options) { + res = await collection.findOneAndDelete(filter, options); + } else { + res = await collection.findOneAndDelete(filter); + } + } catch (error) { + throw handleMongoError(error); } - } catch (error) { - throw handleMongoError(error); - } - if (!res) { - throw new NotFoundException('Document not found'); - } + if (!res) { + throw new NotFoundException('Document not found'); + } - try { - return outputType.parse(mapDocument(res)); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return outputType.parse(mapDocument(res)); + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/findOneAndReplace.ts b/packages/mongodb/src/lib/crud/findOneAndReplace.ts index f928023..8be2283 100644 --- a/packages/mongodb/src/lib/crud/findOneAndReplace.ts +++ b/packages/mongodb/src/lib/crud/findOneAndReplace.ts @@ -9,6 +9,7 @@ import type { } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the findOneAndReplace function. @@ -43,7 +44,7 @@ export type FindOneAndReplace = ( * * @returns {Promise} The found and replaced document. */ -export const findOneAndReplace: FindOneAndReplace = async ({ +export const findOneAndReplace: FindOneAndReplace = ({ collection, filter, replacement, @@ -51,33 +52,35 @@ export const findOneAndReplace: FindOneAndReplace = async ({ outputType, options, }) => { - let res: WithId | null = null; + return withSpan('findOneAndReplace', collection, async () => { + let res: WithId | null = null; - try { - if (options) { - res = await collection.findOneAndReplace( - filter, - replacement, - options, - ); - } else { - res = await collection.findOneAndReplace(filter, replacement); + try { + if (options) { + res = await collection.findOneAndReplace( + filter, + replacement, + options, + ); + } else { + res = await collection.findOneAndReplace(filter, replacement); + } + } catch (error) { + throw handleMongoError(error); } - } catch (error) { - throw handleMongoError(error); - } - if (!res) { - throw new NotFoundException('Document not found'); - } + if (!res) { + throw new NotFoundException('Document not found'); + } - try { - return outputType.parse(mapDocument(res)); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return outputType.parse(mapDocument(res)); + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/findOneAndUpdate.ts b/packages/mongodb/src/lib/crud/findOneAndUpdate.ts index 1e312ff..5381c66 100644 --- a/packages/mongodb/src/lib/crud/findOneAndUpdate.ts +++ b/packages/mongodb/src/lib/crud/findOneAndUpdate.ts @@ -9,6 +9,7 @@ import type { } from 'mongodb'; import type { ZodType } from 'zod'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the findOneAndUpdate function. @@ -43,7 +44,7 @@ export type FindOneAndUpdate = ( * * @returns {Promise} The found and updated document. */ -export const findOneAndUpdate: FindOneAndUpdate = async ({ +export const findOneAndUpdate: FindOneAndUpdate = ({ collection, filter, update, @@ -51,29 +52,31 @@ export const findOneAndUpdate: FindOneAndUpdate = async ({ outputType, options, }) => { - let res: WithId | null = null; + return withSpan('findOneAndUpdate', collection, async () => { + let res: WithId | null = null; - try { - if (options) { - res = await collection.findOneAndUpdate(filter, update, options); - } else { - res = await collection.findOneAndUpdate(filter, update); + try { + if (options) { + res = await collection.findOneAndUpdate(filter, update, options); + } else { + res = await collection.findOneAndUpdate(filter, update); + } + } catch (error) { + throw handleMongoError(error); } - } catch (error) { - throw handleMongoError(error); - } - if (!res) { - throw new NotFoundException('Document not found'); - } + if (!res) { + throw new NotFoundException('Document not found'); + } - try { - return outputType.parse(mapDocument(res)); - } catch (error) { - const exception = error instanceof Error - ? new GenericException().fromError(error) - : new GenericException(); + try { + return outputType.parse(mapDocument(res)); + } catch (error) { + const exception = error instanceof Error + ? new GenericException().fromError(error) + : new GenericException(); - throw exception; - } + throw exception; + } + }); }; diff --git a/packages/mongodb/src/lib/crud/insertMany.ts b/packages/mongodb/src/lib/crud/insertMany.ts index 5f545fc..7522cb4 100644 --- a/packages/mongodb/src/lib/crud/insertMany.ts +++ b/packages/mongodb/src/lib/crud/insertMany.ts @@ -6,6 +6,7 @@ import type { OptionalUnlessRequiredId, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the insertMany function. @@ -33,15 +34,16 @@ export type InsertMany = ( * * @returns {Promise} The result of the insert operation. */ -export const insertMany: InsertMany = async ({ +export const insertMany: InsertMany = ({ collection, documents, options, }) => { - try { - const res = await collection.insertMany(documents, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('insertMany', collection, async () => { + try { + return await collection.insertMany(documents, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/insertOne.ts b/packages/mongodb/src/lib/crud/insertOne.ts index cfedb80..78a23e6 100644 --- a/packages/mongodb/src/lib/crud/insertOne.ts +++ b/packages/mongodb/src/lib/crud/insertOne.ts @@ -6,6 +6,7 @@ import type { OptionalUnlessRequiredId, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the insertOne function. @@ -33,15 +34,16 @@ export type InsertOne = ( * * @returns {Promise} The result of the insert operation. */ -export const insertOne: InsertOne = async ({ +export const insertOne: InsertOne = ({ collection, document, options, }) => { - try { - const res = await collection.insertOne(document, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('insertOne', collection, async () => { + try { + return await collection.insertOne(document, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/replaceOne.ts b/packages/mongodb/src/lib/crud/replaceOne.ts index 63e560f..9f15cff 100644 --- a/packages/mongodb/src/lib/crud/replaceOne.ts +++ b/packages/mongodb/src/lib/crud/replaceOne.ts @@ -7,6 +7,7 @@ import type { WithoutId, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the replaceOne function. @@ -36,16 +37,17 @@ export type ReplaceOne = ( * * @returns {Promise} The result of the replace operation. */ -export const replaceOne: ReplaceOne = async ({ +export const replaceOne: ReplaceOne = ({ collection, filter, replacement, options, }) => { - try { - const res = await collection.replaceOne(filter, replacement, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('replaceOne', collection, async () => { + try { + return await collection.replaceOne(filter, replacement, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/updateMany.ts b/packages/mongodb/src/lib/crud/updateMany.ts index cd880c6..8125871 100644 --- a/packages/mongodb/src/lib/crud/updateMany.ts +++ b/packages/mongodb/src/lib/crud/updateMany.ts @@ -7,6 +7,7 @@ import type { UpdateResult, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the updateMany function. @@ -36,16 +37,17 @@ export type UpdateMany = ( * * @returns {Promise} The result of the update operation. */ -export const updateMany: UpdateMany = async ({ +export const updateMany: UpdateMany = ({ collection, filter, update, options, }) => { - try { - const res = await collection.updateMany(filter, update, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('updateMany', collection, async () => { + try { + return await collection.updateMany(filter, update, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/crud/updateOne.ts b/packages/mongodb/src/lib/crud/updateOne.ts index 2d938e0..e773469 100644 --- a/packages/mongodb/src/lib/crud/updateOne.ts +++ b/packages/mongodb/src/lib/crud/updateOne.ts @@ -7,6 +7,7 @@ import type { UpdateResult, } from 'mongodb'; import { handleMongoError } from '../handleMongoError.ts'; +import { withSpan } from '../tracing.ts'; /** * Type to define the input for the updateOne function. @@ -36,16 +37,17 @@ export type UpdateOne = ( * * @returns {Promise} The result of the update operation. */ -export const updateOne: UpdateOne = async ({ +export const updateOne: UpdateOne = ({ collection, filter, update, options, }) => { - try { - const res = await collection.updateOne(filter, update, options); - return res; - } catch (error) { - throw handleMongoError(error); - } + return withSpan('updateOne', collection, async () => { + try { + return await collection.updateOne(filter, update, options); + } catch (error) { + throw handleMongoError(error); + } + }); }; diff --git a/packages/mongodb/src/lib/tracing.ts b/packages/mongodb/src/lib/tracing.ts new file mode 100644 index 0000000..32ced6a --- /dev/null +++ b/packages/mongodb/src/lib/tracing.ts @@ -0,0 +1,50 @@ +import { SpanKind, SpanStatusCode, trace } from '@opentelemetry/api'; +import type { Collection, Document } from 'mongodb'; + +export const tracer = trace.getTracer('nimbus'); + +export const DB_SYSTEM = 'mongodb'; + +/** + * Wraps an async function with OpenTelemetry tracing. + * + * @param operation - The MongoDB operation name (e.g., 'findOne', 'insertMany') + * @param collection - The MongoDB collection being operated on + * @param fn - The async function to execute within the span + * @returns The result of the async function + */ +export const withSpan = ( + operation: string, + collection: Collection, + fn: () => Promise, +): Promise => { + return tracer.startActiveSpan( + `mongodb.${operation}`, + { + kind: SpanKind.CLIENT, + attributes: { + 'db.system': DB_SYSTEM, + 'db.operation': operation, + 'db.mongodb.collection': collection.collectionName, + }, + }, + async (span) => { + try { + return await fn(); + } catch (error) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error + ? error.message + : 'Unknown error', + }); + span.recordException( + error instanceof Error ? error : new Error('Unknown error'), + ); + throw error; + } finally { + span.end(); + } + }, + ); +}; From d49d3ec53f043df6807e20e2fee811870f5e655e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Thu, 22 Jan 2026 13:30:55 +0100 Subject: [PATCH 19/37] feat(mongodb): add metrics for mongodb operations --- packages/mongodb/src/lib/tracing.ts | 46 +++++++++++++++++++++++++++-- 1 file changed, 43 insertions(+), 3 deletions(-) diff --git a/packages/mongodb/src/lib/tracing.ts b/packages/mongodb/src/lib/tracing.ts index 32ced6a..d8e7091 100644 --- a/packages/mongodb/src/lib/tracing.ts +++ b/packages/mongodb/src/lib/tracing.ts @@ -1,12 +1,30 @@ -import { SpanKind, SpanStatusCode, trace } from '@opentelemetry/api'; +import { metrics, SpanKind, SpanStatusCode, trace } from '@opentelemetry/api'; import type { Collection, Document } from 'mongodb'; export const tracer = trace.getTracer('nimbus'); export const DB_SYSTEM = 'mongodb'; +const meter = metrics.getMeter('nimbus'); + +const operationCounter = meter.createCounter('mongodb_operation_total', { + description: 'Total number of MongoDB operations', +}); + +const operationDuration = meter.createHistogram( + 'mongodb_operation_duration_seconds', + { + description: 'Duration of MongoDB operations in seconds', + unit: 's', + }, +); + /** - * Wraps an async function with OpenTelemetry tracing. + * Wraps an async function with OpenTelemetry tracing and metrics. + * + * Records: + * - `mongodb_operation_total` counter with operation, collection, and status labels + * - `mongodb_operation_duration_seconds` histogram with operation and collection labels * * @param operation - The MongoDB operation name (e.g., 'findOne', 'insertMany') * @param collection - The MongoDB collection being operated on @@ -18,6 +36,12 @@ export const withSpan = ( collection: Collection, fn: () => Promise, ): Promise => { + const startTime = performance.now(); + const metricLabels = { + operation, + collection: collection.collectionName, + }; + return tracer.startActiveSpan( `mongodb.${operation}`, { @@ -30,8 +54,24 @@ export const withSpan = ( }, async (span) => { try { - return await fn(); + const result = await fn(); + + // Record success metrics + operationCounter.add(1, { ...metricLabels, status: 'success' }); + operationDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + + return result; } catch (error) { + // Record error metrics + operationCounter.add(1, { ...metricLabels, status: 'error' }); + operationDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + span.setStatus({ code: SpanStatusCode.ERROR, message: error instanceof Error From b0cf2c3214b78b96c38297a95f9515f7a72baf93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Thu, 22 Jan 2026 16:00:54 +0100 Subject: [PATCH 20/37] feat(core): reworks eventBus to include observability and use exponential backoff --- .../iam/users/core/events/userAdded.event.ts | 8 + .../iam/users/core/queries/getUser.query.ts | 12 + .../src/iam/users/shell/http/router.ts | 27 +- .../users/shell/messages/addUser.command.ts | 28 - .../messages/commands/addUser.command.ts | 43 ++ .../shell/messages/events/userAdded.event.ts | 11 + .../shell/messages/queries/getUser.query.ts | 11 + .../shell/messages/registerUserMessages.ts | 23 +- examples/hono-demo/src/main.ts | 22 +- examples/hono-demo/src/shared/shell/http.ts | 8 +- .../core/src/lib/eventBus/eventBus.test.ts | 293 +++++++++- packages/core/src/lib/eventBus/eventBus.ts | 512 ++++++++++++++---- 12 files changed, 864 insertions(+), 134 deletions(-) create mode 100644 examples/hono-demo/src/iam/users/core/events/userAdded.event.ts delete mode 100644 examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts create mode 100644 examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts create mode 100644 examples/hono-demo/src/iam/users/shell/messages/events/userAdded.event.ts create mode 100644 examples/hono-demo/src/iam/users/shell/messages/queries/getUser.query.ts diff --git a/examples/hono-demo/src/iam/users/core/events/userAdded.event.ts b/examples/hono-demo/src/iam/users/core/events/userAdded.event.ts new file mode 100644 index 0000000..99050d1 --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/events/userAdded.event.ts @@ -0,0 +1,8 @@ +import { Event } from '@nimbus/core'; +import { UserState } from '../domain/user.ts'; + +export const USER_ADDED_EVENT_TYPE = 'at.overlap.nimbus.user-added'; + +export type UserAddedEvent = Event & { + type: typeof USER_ADDED_EVENT_TYPE; +}; diff --git a/examples/hono-demo/src/iam/users/core/queries/getUser.query.ts b/examples/hono-demo/src/iam/users/core/queries/getUser.query.ts index e69de29..429c57b 100644 --- a/examples/hono-demo/src/iam/users/core/queries/getUser.query.ts +++ b/examples/hono-demo/src/iam/users/core/queries/getUser.query.ts @@ -0,0 +1,12 @@ +import { querySchema } from '@nimbus/core'; +import { z } from 'zod'; + +export const GET_USER_QUERY_TYPE = 'at.overlap.nimbus.get-user'; + +export const getUserQuerySchema = querySchema.extend({ + type: z.literal(GET_USER_QUERY_TYPE), + data: z.object({ + id: z.string().length(24), + }), +}); +export type GetUserQuery = z.infer; diff --git a/examples/hono-demo/src/iam/users/shell/http/router.ts b/examples/hono-demo/src/iam/users/shell/http/router.ts index f284c1c..9b05e3c 100644 --- a/examples/hono-demo/src/iam/users/shell/http/router.ts +++ b/examples/hono-demo/src/iam/users/shell/http/router.ts @@ -1,4 +1,4 @@ -import { createCommand } from '@nimbus/core'; +import { createCommand, createQuery } from '@nimbus/core'; import { getCorrelationId } from '@nimbus/hono'; import { Hono } from 'hono'; import { messageRouter } from '../../../../shared/shell/messageRouter.ts'; @@ -6,6 +6,10 @@ import { ADD_USER_COMMAND_TYPE, AddUserCommand, } from '../../core/commands/addUser.command.ts'; +import { + GET_USER_QUERY_TYPE, + GetUserQuery, +} from '../../core/queries/getUser.query.ts'; const usersRouter = new Hono(); @@ -28,4 +32,25 @@ usersRouter.post( }, ); +usersRouter.get( + '/:id', + async (c) => { + const id = c.req.param('id'); + const correlationId = getCorrelationId(c); + + const query = createQuery({ + type: GET_USER_QUERY_TYPE, + source: 'nimbus.overlap.at', + correlationid: correlationId, + data: { + id: id, + }, + }); + + const result = await messageRouter.route(query); + + return c.json(result); + }, +); + export default usersRouter; diff --git a/examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts b/examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts deleted file mode 100644 index 429bf7a..0000000 --- a/examples/hono-demo/src/iam/users/shell/messages/addUser.command.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { - addUser, - AddUserCommand, -} from '../../core/commands/addUser.command.ts'; -import { UserState } from '../../core/domain/user.ts'; -import { userRepository } from '../mongodb/user.repository.ts'; - -export const addUserCommandHandler = async (command: AddUserCommand) => { - let state: UserState = null; - - try { - state = await userRepository.findOne({ - filter: { email: command.data.email }, - }); - } catch (_error) { - state = null; - } - - state = addUser(state, command); - - if (state) { - state = await userRepository.insertOne({ - item: state, - }); - } - - return state; -}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts b/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts new file mode 100644 index 0000000..6f544d4 --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts @@ -0,0 +1,43 @@ +import { createEvent, getEventBus } from '@nimbus/core'; +import { + addUser, + AddUserCommand, +} from '../../../core/commands/addUser.command.ts'; +import { UserState } from '../../../core/domain/user.ts'; +import { + USER_ADDED_EVENT_TYPE, + UserAddedEvent, +} from '../../../core/events/userAdded.event.ts'; +import { userRepository } from '../../mongodb/user.repository.ts'; + +export const addUserCommandHandler = async (command: AddUserCommand) => { + const eventBus = getEventBus('default'); + let state: UserState = null; + + try { + state = await userRepository.findOne({ + filter: { email: command.data.email }, + }); + } catch (_error) { + state = null; + } + + state = addUser(state, command); + + if (state) { + state = await userRepository.insertOne({ + item: state, + }); + } + + const event = createEvent({ + type: USER_ADDED_EVENT_TYPE, + source: 'nimbus.overlap.at', + correlationid: command.correlationid, + data: state, + }); + + eventBus.putEvent(event); + + return state; +}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/events/userAdded.event.ts b/examples/hono-demo/src/iam/users/shell/messages/events/userAdded.event.ts new file mode 100644 index 0000000..0961729 --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/events/userAdded.event.ts @@ -0,0 +1,11 @@ +import { getLogger } from '@nimbus/core'; +import { UserAddedEvent } from '../../../core/events/userAdded.event.ts'; + +export const userAddedEventHandler = async (event: UserAddedEvent) => { + await Promise.resolve(); + + getLogger().info({ + message: 'User added', + data: event.data ?? {}, + }); +}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/queries/getUser.query.ts b/examples/hono-demo/src/iam/users/shell/messages/queries/getUser.query.ts new file mode 100644 index 0000000..c74d1a1 --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/queries/getUser.query.ts @@ -0,0 +1,11 @@ +import { ObjectId } from 'mongodb'; +import { GetUserQuery } from '../../../core/queries/getUser.query.ts'; +import { userRepository } from '../../mongodb/user.repository.ts'; + +export const getUserQueryHandler = async (query: GetUserQuery) => { + const state = await userRepository.findOne({ + filter: { _id: new ObjectId(query.data.id) }, + }); + + return state; +}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts index e48a3b0..0fd6d39 100644 --- a/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts +++ b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts @@ -1,14 +1,35 @@ +import { getEventBus } from '@nimbus/core'; import { messageRouter } from '../../../../shared/shell/messageRouter.ts'; import { ADD_USER_COMMAND_TYPE, addUserCommandSchema, } from '../../core/commands/addUser.command.ts'; -import { addUserCommandHandler } from './addUser.command.ts'; +import { USER_ADDED_EVENT_TYPE } from '../../core/events/userAdded.event.ts'; +import { + GET_USER_QUERY_TYPE, + getUserQuerySchema, +} from '../../core/queries/getUser.query.ts'; +import { addUserCommandHandler } from './commands/addUser.command.ts'; +import { userAddedEventHandler } from './events/userAdded.event.ts'; +import { getUserQueryHandler } from './queries/getUser.query.ts'; export const registerUserMessages = () => { + const eventBus = getEventBus('default'); + + eventBus.subscribeEvent({ + type: USER_ADDED_EVENT_TYPE, + handler: userAddedEventHandler, + }); + messageRouter.register( ADD_USER_COMMAND_TYPE, addUserCommandHandler, addUserCommandSchema, ); + + messageRouter.register( + GET_USER_QUERY_TYPE, + getUserQueryHandler, + getUserQuerySchema, + ); }; diff --git a/examples/hono-demo/src/main.ts b/examples/hono-demo/src/main.ts index 31502b8..536c3f3 100644 --- a/examples/hono-demo/src/main.ts +++ b/examples/hono-demo/src/main.ts @@ -3,6 +3,7 @@ import { jsonLogFormatter, parseLogLevel, prettyLogFormatter, + setupEventBus, setupLogger, } from '@nimbus/core'; import '@std/dotenv/load'; @@ -19,10 +20,27 @@ setupLogger({ useConsoleColors: process.env.LOG_FORMAT === 'pretty', }); -initMongoConnectionManager(); +setupEventBus('default', { + maxRetries: 3, + baseDelay: 1000, + maxDelay: 30000, + useJitter: true, + logPublish: (event) => { + getLogger().debug({ + category: 'EventBus', + message: 'Published event', + data: { event }, + ...(event?.correlationid + ? { correlationId: event.correlationid } + : {}), + }); + }, +}); initMessages(); +initMongoConnectionManager(); + if (process.env.PORT) { const port = parseInt(process.env.PORT); @@ -36,6 +54,6 @@ if (process.env.PORT) { getLogger().critical({ category: 'API', message: - `Could not start the application! Please define a valid port vienvironment variable.`, + `Could not start the application! Please define a valid port environment variable.`, }); } diff --git a/examples/hono-demo/src/shared/shell/http.ts b/examples/hono-demo/src/shared/shell/http.ts index b418e76..95a9d00 100644 --- a/examples/hono-demo/src/shared/shell/http.ts +++ b/examples/hono-demo/src/shared/shell/http.ts @@ -1,16 +1,20 @@ import { correlationId, handleError, logger } from '@nimbus/hono'; import { Hono } from 'hono'; +import { compress } from 'hono/compress'; import { cors } from 'hono/cors'; import { secureHeaders } from 'hono/secure-headers'; -import { compress } from 'hono/compress'; import usersRouter from '../../iam/users/shell/http/router.ts'; export const app = new Hono(); +// Next Steps: +// - Finish example and get the documentation completed +// - Try out PostgresSQL + // TODO: document correlationId middleware app.use(correlationId()); -// TODO: document logger middleware and observability in general +// TODO: document logger middleware and observability in general now also added to mongodb app.use(logger({ enableTracing: true, tracerName: 'api', diff --git a/packages/core/src/lib/eventBus/eventBus.test.ts b/packages/core/src/lib/eventBus/eventBus.test.ts index ff84121..ccf2aec 100644 --- a/packages/core/src/lib/eventBus/eventBus.test.ts +++ b/packages/core/src/lib/eventBus/eventBus.test.ts @@ -1,10 +1,28 @@ import { assertEquals, assertInstanceOf } from '@std/assert'; import { GenericException } from '../exception/genericException.ts'; import type { Event } from '../message/event.ts'; -import { NimbusEventBus } from './eventBus.ts'; +import { getEventBus, NimbusEventBus, setupEventBus } from './eventBus.ts'; + +/** + * Helper function to create a valid test event. + */ +const createTestEvent = ( + type: string, + data?: Record, +): Event => ({ + specversion: '1.0', + id: crypto.randomUUID(), + correlationid: crypto.randomUUID(), + time: new Date().toISOString(), + source: 'https://test.nimbus.overlap.at', + type, + subject: '/test', + data: data ?? { testData: 'value' }, +}); Deno.test('EventBus rejects event that exceeds the 64KB size limit', () => { const eventBus = new NimbusEventBus({ + name: 'size-test', maxRetries: 3, }); @@ -31,3 +49,276 @@ Deno.test('EventBus rejects event that exceeds the 64KB size limit', () => { assertInstanceOf(exception, GenericException); assertEquals(exception.message, 'Event size exceeds the limit of 64KB'); }); + +Deno.test('EventBus delivers event to subscriber', async () => { + const eventBus = new NimbusEventBus({ name: 'deliver-test' }); + let receivedEvent: Event | undefined; + + eventBus.subscribeEvent({ + type: 'test.event.deliver', + handler: (event) => { + receivedEvent = event; + return Promise.resolve(); + }, + }); + + const testEvent = createTestEvent('test.event.deliver', { + message: 'hello', + }); + eventBus.putEvent(testEvent); + + // Wait for async handler to complete + await new Promise((r) => setTimeout(r, 50)); + + assertEquals(receivedEvent!.type, 'test.event.deliver'); + assertEquals( + (receivedEvent!.data as Record).message, + 'hello', + ); +}); + +Deno.test('EventBus delivers event to multiple subscribers', async () => { + const eventBus = new NimbusEventBus({ name: 'multi-sub-test' }); + const receivedEvents: Event[] = []; + + // First subscriber + eventBus.subscribeEvent({ + type: 'test.event.multi', + handler: (event) => { + receivedEvents.push(event); + return Promise.resolve(); + }, + }); + + // Second subscriber + eventBus.subscribeEvent({ + type: 'test.event.multi', + handler: (event) => { + receivedEvents.push(event); + return Promise.resolve(); + }, + }); + + const testEvent = createTestEvent('test.event.multi'); + eventBus.putEvent(testEvent); + + // Wait for async handlers to complete + await new Promise((r) => setTimeout(r, 50)); + + assertEquals(receivedEvents.length, 2); + assertEquals(receivedEvents[0].id, testEvent.id); + assertEquals(receivedEvents[1].id, testEvent.id); +}); + +Deno.test('EventBus retries on handler failure and eventually succeeds', async () => { + const eventBus = new NimbusEventBus({ + name: 'retry-success-test', + maxRetries: 3, + baseDelay: 10, + maxDelay: 100, + useJitter: false, + }); + let attempts = 0; + + eventBus.subscribeEvent({ + type: 'test.event.retry-success', + handler: () => { + attempts++; + if (attempts < 3) { + return Promise.reject(new Error('Temporary failure')); + } + // Succeeds on 3rd attempt + return Promise.resolve(); + }, + }); + + const testEvent = createTestEvent('test.event.retry-success'); + eventBus.putEvent(testEvent); + + // Wait for retries to complete (base delay * 2^0 + base delay * 2^1 = 10 + 20 = 30ms + buffer) + await new Promise((r) => setTimeout(r, 200)); + + assertEquals(attempts, 3); +}); + +Deno.test('EventBus exhausts retries and invokes onError callback', async () => { + const eventBus = new NimbusEventBus({ + name: 'retry-exhausted-test', + maxRetries: 2, + baseDelay: 10, + maxDelay: 100, + useJitter: false, + }); + let attempts = 0; + let errorReceived: Error | undefined; + let eventReceived: Event | undefined; + + eventBus.subscribeEvent({ + type: 'test.event.retry-exhausted', + handler: () => { + attempts++; + return Promise.reject(new Error('Always fails')); + }, + onError: (error, event) => { + errorReceived = error; + eventReceived = event; + }, + }); + + const testEvent = createTestEvent('test.event.retry-exhausted'); + eventBus.putEvent(testEvent); + + // Wait for all retries to exhaust + await new Promise((r) => setTimeout(r, 200)); + + // Initial attempt + 2 retries = 3 total attempts + assertEquals(attempts, 3); + assertInstanceOf(errorReceived, GenericException); + assertEquals(eventReceived!.id, testEvent.id); +}); + +Deno.test('EventBus onError callback receives error and event', async () => { + const eventBus = new NimbusEventBus({ + name: 'onerror-test', + maxRetries: 0, + baseDelay: 10, + }); + let errorMessage: string | undefined; + let eventType: string | undefined; + + eventBus.subscribeEvent({ + type: 'test.event.onerror', + handler: () => { + return Promise.reject(new Error('Handler error')); + }, + onError: (error, event) => { + errorMessage = error.message; + eventType = event.type; + }, + }); + + const testEvent = createTestEvent('test.event.onerror'); + eventBus.putEvent(testEvent); + + // Wait for handler to fail + await new Promise((r) => setTimeout(r, 50)); + + assertEquals( + errorMessage, + 'Failed to handle event: test.event.onerror from https://test.nimbus.overlap.at', + ); + assertEquals(eventType, 'test.event.onerror'); +}); + +Deno.test('setupEventBus creates and registers an EventBus instance', () => { + setupEventBus('test-setup-bus', { maxRetries: 5 }); + const bus = getEventBus('test-setup-bus'); + + assertInstanceOf(bus, NimbusEventBus); +}); + +Deno.test('getEventBus creates default instance if not found', () => { + const bus = getEventBus('test-new-bus-' + crypto.randomUUID()); + + assertInstanceOf(bus, NimbusEventBus); +}); + +Deno.test('getEventBus returns same instance on repeated calls', () => { + const busName = 'test-same-instance-' + crypto.randomUUID(); + const bus1 = getEventBus(busName); + const bus2 = getEventBus(busName); + + assertEquals(bus1, bus2); +}); + +Deno.test('Multiple named EventBus instances are independent', async () => { + const ordersBus = new NimbusEventBus({ name: 'orders-independent' }); + const notificationsBus = new NimbusEventBus({ + name: 'notifications-independent', + }); + + let ordersReceived = 0; + let notificationsReceived = 0; + + ordersBus.subscribeEvent({ + type: 'test.event.independent', + handler: () => { + ordersReceived++; + return Promise.resolve(); + }, + }); + + notificationsBus.subscribeEvent({ + type: 'test.event.independent', + handler: () => { + notificationsReceived++; + return Promise.resolve(); + }, + }); + + // Publish to orders bus only + ordersBus.putEvent(createTestEvent('test.event.independent')); + + await new Promise((r) => setTimeout(r, 50)); + + assertEquals(ordersReceived, 1); + assertEquals(notificationsReceived, 0); +}); + +Deno.test('EventBus logPublish callback is invoked when publishing an event', () => { + const loggedEvents: Event[] = []; + + const eventBus = new NimbusEventBus({ + name: 'log-publish-test', + logPublish: (event) => { + loggedEvents.push(event); + }, + }); + + const testEvent = createTestEvent('test.event.log-publish', { + message: 'logged', + }); + eventBus.putEvent(testEvent); + + assertEquals(loggedEvents.length, 1); + assertEquals(loggedEvents[0].id, testEvent.id); + assertEquals(loggedEvents[0].type, 'test.event.log-publish'); + assertEquals( + (loggedEvents[0].data as Record).message, + 'logged', + ); +}); + +Deno.test('EventBus logPublish callback receives correct event for each publish', () => { + const loggedEvents: Event[] = []; + + const eventBus = new NimbusEventBus({ + name: 'log-publish-multi-test', + logPublish: (event) => { + loggedEvents.push(event); + }, + }); + + const event1 = createTestEvent('test.event.first'); + const event2 = createTestEvent('test.event.second'); + const event3 = createTestEvent('test.event.third'); + + eventBus.putEvent(event1); + eventBus.putEvent(event2); + eventBus.putEvent(event3); + + assertEquals(loggedEvents.length, 3); + assertEquals(loggedEvents[0].type, 'test.event.first'); + assertEquals(loggedEvents[1].type, 'test.event.second'); + assertEquals(loggedEvents[2].type, 'test.event.third'); +}); + +Deno.test('EventBus works without logPublish callback', () => { + const eventBus = new NimbusEventBus({ + name: 'no-log-publish-test', + }); + + // Should not throw when logPublish is not provided + const testEvent = createTestEvent('test.event.no-log'); + eventBus.putEvent(testEvent); +}); diff --git a/packages/core/src/lib/eventBus/eventBus.ts b/packages/core/src/lib/eventBus/eventBus.ts index de5d54f..d64e9a5 100644 --- a/packages/core/src/lib/eventBus/eventBus.ts +++ b/packages/core/src/lib/eventBus/eventBus.ts @@ -1,154 +1,312 @@ +import { metrics, SpanKind, SpanStatusCode, trace } from '@opentelemetry/api'; import EventEmitter from 'node:events'; import { GenericException } from '../exception/genericException.ts'; import { getLogger } from '../log/logger.ts'; import type { Event } from '../message/event.ts'; -/** - * The input type for subscribing to an event. - */ -export type SubscribeEventInput = { - type: string; - handler: (event: Event) => Promise; - onError?: (error: any, event: Event) => void; - options?: NimbusEventBusOptions; -}; +const tracer = trace.getTracer('nimbus'); +const meter = metrics.getMeter('nimbus'); + +const eventsPublishedCounter = meter.createCounter( + 'eventbus_events_published_total', + { + description: 'Total number of events published to the event bus', + }, +); + +const eventsDeliveredCounter = meter.createCounter( + 'eventbus_events_delivered_total', + { + description: 'Total number of events delivered to handlers', + }, +); + +const handlingDuration = meter.createHistogram( + 'eventbus_event_handling_duration_seconds', + { + description: 'Duration of event handler execution in seconds', + unit: 's', + }, +); + +const retryAttemptsCounter = meter.createCounter( + 'eventbus_retry_attempts_total', + { + description: 'Total number of retry attempts for event handling', + }, +); + +const eventSizeBytes = meter.createHistogram( + 'eventbus_event_size_bytes', + { + description: 'Size of events published to the event bus in bytes', + unit: 'By', + }, +); /** * The type for the NimbusEventBus options. */ export type NimbusEventBusOptions = { + /** + * The name of the event bus instance for metrics and traces. + * Defaults to 'default'. + */ + name?: string; + /** + * The maximum number of retries for handling the event in case of an error. + * Defaults to 2. + */ maxRetries?: number; - retryDelay?: number; + /** + * The base delay for exponential backoff in milliseconds. + * Defaults to 1000ms. + */ + baseDelay?: number; + /** + * The maximum delay cap for exponential backoff in milliseconds. + * Defaults to 30000ms (30 seconds). + */ + maxDelay?: number; + /** + * Whether to add jitter to the retry delay to prevent thundering herd issues. + * Defaults to true. + */ + useJitter?: boolean; + /** + * Optional callback invoked when an event is published. + * Useful for custom logging or debugging. + */ + logPublish?: (event: Event) => void; +}; + +/** + * The input type for subscribing to an event. + */ +export type SubscribeEventInput = { + /** + * The CloudEvents event type to subscribe to (e.g., 'at.overlap.nimbus.order-created'). + */ + type: string; + /** + * The async handler function that processes received events. + */ + handler: (event: TEvent) => Promise; + /** + * Optional error callback invoked when event handling fails after all retries. + * If not provided, errors are logged using the default logger. + */ + onError?: (error: Error, event: TEvent) => void; + /** + * Optional retry options that override the EventBus defaults for this subscription. + */ + options?: Omit; }; /** - * The NimbusEventBus is used to publish and - * subscribe to events within the application. + * The NimbusEventBus is used to publish and subscribe to events within the application. + * + * Events are delivered asynchronously to all registered handlers. If a handler fails, + * it will be retried using exponential backoff until it succeeds or the maximum retry + * count is reached. + * + * All operations are instrumented with OpenTelemetry tracing and metrics for observability. * * @example * ```ts - * export const eventBus = new NimbusEventBus({ + * import { NimbusEventBus } from '@nimbus/core'; + * + * const eventBus = new NimbusEventBus({ + * name: 'orders', * maxRetries: 3, - * retryDelay: 3000, + * baseDelay: 1000, * }); * + * // Subscribe to events * eventBus.subscribeEvent({ - * type: 'at.overlap.nimbus.account-added', - * handler: (event) => { ... }, + * type: 'at.overlap.nimbus.order-created', + * handler: async (event) => { + * console.log('Order created:', event.data); + * }, * }); * - * eventBus.putEvent({ + * // Publish an event + * eventBus.putEvent({ * specversion: '1.0', - * id: '123', - * correlationid: '456', - * time: '2025-01-01T00:00:00Z', - * source: 'https://nimbus.overlap.at', - * type: 'at.overlap.nimbus.account-added', - * subject: '/accounts/123', - * data: { - * accountId: '123', - * }, - * datacontenttype: 'application/json', - * dataschema: 'https://nimbus.overlap.at/schemas/events/account-added/v1', + * id: crypto.randomUUID(), + * type: 'at.overlap.nimbus.order-created', + * source: 'https://api.example.com', + * time: new Date().toISOString(), + * data: { orderId: '12345' }, * }); * ``` */ export class NimbusEventBus { private _eventEmitter: EventEmitter; + private _name: string; private _maxRetries: number; - private _retryDelay: number; + private _baseDelay: number; + private _maxDelay: number; + private _useJitter: boolean; + private _logPublish?: (event: Event) => void; /** * Create a new NimbusEventBus instance. * * @param {NimbusEventBusOptions} [options] - The options for the event bus. + * @param {string} [options.name] - The name of the event bus instance for metrics and traces. * @param {number} [options.maxRetries] - The maximum number of retries for handling the event in case of an error. - * @param {number} [options.retryDelay] - The delay between retries in milliseconds. + * @param {number} [options.baseDelay] - The base delay for exponential backoff in milliseconds. + * @param {number} [options.maxDelay] - The maximum delay cap for exponential backoff in milliseconds. + * @param {boolean} [options.useJitter] - Whether to add jitter to the retry delay. * * @example * ```ts * const eventBus = new NimbusEventBus({ + * name: 'orders', * maxRetries: 3, - * retryDelay: 3000, + * baseDelay: 1000, + * maxDelay: 30000, + * useJitter: true, * }); * ``` */ constructor(options?: NimbusEventBusOptions) { this._eventEmitter = new EventEmitter(); - + this._name = options?.name ?? 'default'; this._maxRetries = options?.maxRetries ?? 2; - this._retryDelay = options?.retryDelay ?? 1000; + this._baseDelay = options?.baseDelay ?? 1000; + this._maxDelay = options?.maxDelay ?? 30000; + this._useJitter = options?.useJitter ?? true; + this._logPublish = options?.logPublish; } /** * Publish an event to the event bus. * - * @param event - The event to send to the event bus. + * The event is validated against the CloudEvents 64KB size limit before publishing. + * All subscribers registered for this event type will receive the event asynchronously. + * + * @param event - The CloudEvents-compliant event to publish. + * @throws {GenericException} If the event size exceeds 64KB. * * @example * ```ts - * eventBus.putEvent({ + * eventBus.putEvent({ * specversion: '1.0', - * id: '123', - * correlationid: '456', - * time: '2025-01-01T00:00:00Z', - * source: 'https://nimbus.overlap.at', - * type: 'at.overlap.nimbus.account-added', - * subject: '/accounts/123', - * data: { - * accountId: '123', - * }, - * datacontenttype: 'application/json', - * dataschema: 'https://nimbus.overlap.at/schemas/events/account-added/v1', + * id: crypto.randomUUID(), + * type: 'at.overlap.nimbus.order-created', + * source: 'https://api.example.com', + * time: new Date().toISOString(), + * data: { orderId: '12345' }, * }); * ``` */ - public putEvent(event: Event): void { - this._validateEventSize(event); + public putEvent(event: TEvent): void { + const eventSize = this._validateEventSize(event); + const metricLabels = { + eventbus_name: this._name, + event_type: event.type, + }; + + tracer.startActiveSpan( + 'eventbus.publish', + { + kind: SpanKind.PRODUCER, + attributes: { + 'messaging.system': 'nimbusEventBus', + 'messaging.eventbus_name': this._name, + 'messaging.operation': 'publish', + 'messaging.destination': event.type, + 'cloudevents.event_id': event.id, + 'cloudevents.event_source': event.source, + ...(event.correlationid && { + correlation_id: event.correlationid, + }), + }, + }, + (span) => { + try { + eventsPublishedCounter.add(1, metricLabels); + eventSizeBytes.record(eventSize, metricLabels); + + if (this._logPublish) { + this._logPublish(event); + } - this._eventEmitter.emit(event.type, event); + this._eventEmitter.emit(event.type, event); + } catch (error) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error + ? error.message + : 'Unknown error', + }); + span.recordException( + error instanceof Error + ? error + : new Error('Unknown error'), + ); + throw error; + } finally { + span.end(); + } + }, + ); } /** - * Subscribe to an event. + * Subscribe to an event type with a handler function. * - * @param {string} eventType - The type of event to subscribe to. - * @param {Function} handler - The handler to process the event. - * @param {Function} [onError] - The function to call when the event could not be handled after the maximum number of retries. - * @param {NimbusEventBusOptions} [options] - The options for the event bus. - * @param {number} [options.maxRetries] - The maximum number of retries for handling the event in case of an error. - * @param {number} [options.retryDelay] - The delay between retries in milliseconds. + * When an event matching the specified type is published, the handler is invoked. + * If the handler throws an error, it will be retried using exponential backoff + * (delay doubles with each attempt) until either it succeeds or the maximum retry + * count is reached. + * + * @param input - The subscription configuration. + * @param input.type - The CloudEvents event type to subscribe to. + * @param input.handler - The async handler function to process events. + * @param input.onError - Optional callback invoked when all retries are exhausted. + * @param input.options - Optional retry options to override EventBus defaults. * * @example * ```ts * eventBus.subscribeEvent({ - * type: 'at.overlap.nimbus.account-added', - * handler: accountAddedHandler, - * allowUnsafeInput: true, // Disables input validation (not recommended) + * type: 'at.overlap.nimbus.order-created', + * handler: async (event) => { + * console.log('Order created:', event.data); + * }, + * onError: (error, event) => { + * console.error('Failed to process order:', error); + * }, * }); * ``` */ - public subscribeEvent({ + public subscribeEvent({ type, handler, onError, options, - }: SubscribeEventInput): void { + }: SubscribeEventInput): void { getLogger().info({ category: 'Nimbus', message: `Subscribed to ${type} event`, }); const maxRetries = options?.maxRetries ?? this._maxRetries; - const retryDelay = options?.retryDelay ?? this._retryDelay; + const baseDelay = options?.baseDelay ?? this._baseDelay; + const maxDelay = options?.maxDelay ?? this._maxDelay; + const useJitter = options?.useJitter ?? this._useJitter; - const handleEvent = async (event: Event) => { + const handleEvent = async (event: TEvent) => { try { - await this._processEvent( + await this._processEvent( handler, event, maxRetries, - retryDelay, + baseDelay, + maxDelay, + useJitter, ); } catch (error: any) { if (onError) { @@ -166,65 +324,221 @@ export class NimbusEventBus { this._eventEmitter.on(type, handleEvent); } - private async _processEvent( - handler: (event: Event) => Promise, - event: Event, + private _processEvent( + handler: (event: TEvent) => Promise, + event: TEvent, maxRetries: number, - retryDelay: number, - ) { - let attempt = -1; + baseDelay: number, + maxDelay: number, + useJitter: boolean, + ): Promise { + const startTime = performance.now(); + const metricLabels = { + eventbus_name: this._name, + event_type: event.type, + }; - while (attempt < maxRetries) { - try { - await handler(event); - break; - } catch (error: any) { - attempt++; - - if (attempt >= maxRetries) { - const exception = new GenericException( - `Failed to handle event: ${event.type} from ${event.source}`, - { - retryAttempts: maxRetries, - retryDelay: retryDelay, - }, - ); + return tracer.startActiveSpan( + 'eventbus.handle', + { + kind: SpanKind.CONSUMER, + attributes: { + 'messaging.system': 'nimbusEventBus', + 'messaging.eventbus_name': this._name, + 'messaging.operation': 'process', + 'messaging.destination': event.type, + 'cloudevents.event_id': event.id, + 'cloudevents.event_source': event.source, + ...(event.correlationid && { + correlation_id: event.correlationid, + }), + }, + }, + async (span) => { + let attempt = 0; - if (error.stack) { - exception.stack = error.stack; - } + while (attempt <= maxRetries) { + try { + await handler(event); - throw exception; - } + // Record success metrics + eventsDeliveredCounter.add(1, { + ...metricLabels, + status: 'success', + }); + handlingDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + span.end(); + return; + } catch (error: any) { + attempt++; - await new Promise((resolve) => setTimeout(resolve, retryDelay)); - } - } + if (attempt > maxRetries) { + // Record error metrics + eventsDeliveredCounter.add(1, { + ...metricLabels, + status: 'error', + }); + handlingDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error + ? error.message + : 'Unknown error', + }); + span.recordException( + error instanceof Error + ? error + : new Error('Unknown error'), + ); + span.end(); + + const exception = new GenericException( + `Failed to handle event: ${event.type} from ${event.source}`, + { + retryAttempts: maxRetries, + baseDelay, + maxDelay, + }, + ); + + if (error.stack) { + exception.stack = error.stack; + } + + throw exception; + } + + // Record retry metric + retryAttemptsCounter.add(1, metricLabels); + + // Exponential backoff with optional jitter + const delay = Math.min( + baseDelay * Math.pow(2, attempt - 1), + maxDelay, + ); + const jitter = useJitter + ? Math.random() * delay * 0.1 + : 0; + + span.addEvent('retry', { + attempt, + delay_ms: delay + jitter, + }); + + await new Promise((resolve) => + setTimeout(resolve, delay + jitter) + ); + } + } + }, + ); } /** - * Validate the size of the event. + * Validate the size of the event and return the size in bytes. * * To comply with the CloudEvent spec a transmitted event * can only have a maximum size of 64KB. * * @param event - The event to validate. + * @returns The size of the event in bytes. */ - private _validateEventSize(event: Event): void { + private _validateEventSize(event: Event): number { const eventJson = JSON.stringify(event); - const eventSizeBytes = new TextEncoder().encode(eventJson).length; + const size = new TextEncoder().encode(eventJson).length; const maxSizeBytes = 64 * 1024; // 64KB - if (eventSizeBytes > maxSizeBytes) { + if (size > maxSizeBytes) { throw new GenericException( `Event size exceeds the limit of 64KB`, { eventType: event.type, eventSource: event.source, - eventSizeBytes, + eventSizeBytes: size, maxSizeBytes, }, ); } + + return size; } } + +/** + * Registry to store named EventBus instances. + */ +const eventBusRegistry = new Map(); + +/** + * Setup a named EventBus instance and register it for later retrieval. + * + * Use this function to configure an EventBus with specific options at application + * startup, then retrieve it later using {@link getEventBus}. + * + * @param name - The unique name for this EventBus instance. + * @param options - Optional configuration options for the EventBus. + * + * @example + * ```ts + * import { setupEventBus } from '@nimbus/core'; + * + * // At application startup + * setupEventBus('orders', { + * maxRetries: 5, + * baseDelay: 500, + * }); + * + * setupEventBus('notifications', { + * maxRetries: 3, + * baseDelay: 1000, + * }); + * ``` + */ +export const setupEventBus = ( + name: string, + options?: Omit, +): void => { + eventBusRegistry.set(name, new NimbusEventBus({ ...options, name })); +}; + +/** + * Get a named EventBus instance. + * + * If an EventBus with the given name has been configured via {@link setupEventBus}, + * that instance is returned. Otherwise, a new EventBus with default options is created + * and registered. + * + * @param name - The name of the EventBus instance to retrieve. Defaults to 'default'. + * @returns The NimbusEventBus instance. + * + * @example + * ```ts + * import { getEventBus } from '@nimbus/core'; + * + * // Get the orders EventBus (configured earlier with setupEventBus) + * const ordersEventBus = getEventBus('orders'); + * + * ordersEventBus.subscribeEvent({ + * type: 'order.created', + * handler: async (event) => { + * console.log('Order created:', event.data); + * }, + * }); + * + * // Get the default EventBus + * const defaultEventBus = getEventBus(); + * ``` + */ +export const getEventBus = (name: string = 'default'): NimbusEventBus => { + if (!eventBusRegistry.has(name)) { + eventBusRegistry.set(name, new NimbusEventBus({ name })); + } + return eventBusRegistry.get(name)!; +}; From 65a2e0133ac1c78fdede967b7773d512a436984d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Thu, 22 Jan 2026 16:32:06 +0100 Subject: [PATCH 21/37] feat(core): rework router to implement observability and use the same patter as eventBus --- .../src/iam/users/shell/http/router.ts | 7 +- .../shell/messages/registerUserMessages.ts | 8 +- examples/hono-demo/src/main.ts | 26 ++- examples/hono-demo/src/shared/shell/http.ts | 4 - .../src/shared/shell/messageRouter.ts | 29 --- .../hono-demo/src/shared/shell/messages.ts | 5 + packages/core/src/lib/message/router.ts | 221 +++++++++++++++--- 7 files changed, 223 insertions(+), 77 deletions(-) delete mode 100644 examples/hono-demo/src/shared/shell/messageRouter.ts create mode 100644 examples/hono-demo/src/shared/shell/messages.ts diff --git a/examples/hono-demo/src/iam/users/shell/http/router.ts b/examples/hono-demo/src/iam/users/shell/http/router.ts index 9b05e3c..8e50c72 100644 --- a/examples/hono-demo/src/iam/users/shell/http/router.ts +++ b/examples/hono-demo/src/iam/users/shell/http/router.ts @@ -1,7 +1,6 @@ -import { createCommand, createQuery } from '@nimbus/core'; +import { createCommand, createQuery, getRouter } from '@nimbus/core'; import { getCorrelationId } from '@nimbus/hono'; import { Hono } from 'hono'; -import { messageRouter } from '../../../../shared/shell/messageRouter.ts'; import { ADD_USER_COMMAND_TYPE, AddUserCommand, @@ -26,7 +25,7 @@ usersRouter.post( data: body, }); - const result = await messageRouter.route(command); + const result = await getRouter('default').route(command); return c.json(result); }, @@ -47,7 +46,7 @@ usersRouter.get( }, }); - const result = await messageRouter.route(query); + const result = await getRouter('default').route(query); return c.json(result); }, diff --git a/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts index 0fd6d39..5367481 100644 --- a/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts +++ b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts @@ -1,5 +1,4 @@ -import { getEventBus } from '@nimbus/core'; -import { messageRouter } from '../../../../shared/shell/messageRouter.ts'; +import { getEventBus, getRouter } from '@nimbus/core'; import { ADD_USER_COMMAND_TYPE, addUserCommandSchema, @@ -15,19 +14,20 @@ import { getUserQueryHandler } from './queries/getUser.query.ts'; export const registerUserMessages = () => { const eventBus = getEventBus('default'); + const router = getRouter('default'); eventBus.subscribeEvent({ type: USER_ADDED_EVENT_TYPE, handler: userAddedEventHandler, }); - messageRouter.register( + router.register( ADD_USER_COMMAND_TYPE, addUserCommandHandler, addUserCommandSchema, ); - messageRouter.register( + router.register( GET_USER_QUERY_TYPE, getUserQueryHandler, getUserQuerySchema, diff --git a/examples/hono-demo/src/main.ts b/examples/hono-demo/src/main.ts index 536c3f3..14c03ee 100644 --- a/examples/hono-demo/src/main.ts +++ b/examples/hono-demo/src/main.ts @@ -5,11 +5,12 @@ import { prettyLogFormatter, setupEventBus, setupLogger, + setupRouter, } from '@nimbus/core'; import '@std/dotenv/load'; import process from 'node:process'; import { app } from './shared/shell/http.ts'; -import { initMessages } from './shared/shell/messageRouter.ts'; +import { initMessages } from './shared/shell/messages.ts'; import { initMongoConnectionManager } from './shared/shell/mongodb.ts'; setupLogger({ @@ -37,6 +38,29 @@ setupEventBus('default', { }, }); +setupRouter('default', { + logInput: (input) => { + getLogger().debug({ + category: 'MessageRouter', + message: 'Received input', + data: { input }, + ...(input?.correlationid + ? { correlationId: input.correlationid } + : {}), + }); + }, + logOutput: (output) => { + getLogger().debug({ + category: 'MessageRouter', + message: 'Output', + data: { output }, + ...(output?.correlationid + ? { correlationId: output.correlationid } + : {}), + }); + }, +}); + initMessages(); initMongoConnectionManager(); diff --git a/examples/hono-demo/src/shared/shell/http.ts b/examples/hono-demo/src/shared/shell/http.ts index 95a9d00..bcf0594 100644 --- a/examples/hono-demo/src/shared/shell/http.ts +++ b/examples/hono-demo/src/shared/shell/http.ts @@ -7,10 +7,6 @@ import usersRouter from '../../iam/users/shell/http/router.ts'; export const app = new Hono(); -// Next Steps: -// - Finish example and get the documentation completed -// - Try out PostgresSQL - // TODO: document correlationId middleware app.use(correlationId()); diff --git a/examples/hono-demo/src/shared/shell/messageRouter.ts b/examples/hono-demo/src/shared/shell/messageRouter.ts deleted file mode 100644 index 714a02f..0000000 --- a/examples/hono-demo/src/shared/shell/messageRouter.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { getLogger, MessageRouter } from '@nimbus/core'; -import { registerUserMessages } from '../../iam/users/shell/messages/registerUserMessages.ts'; - -export const messageRouter = new MessageRouter({ - logInput: (input) => { - getLogger().debug({ - category: 'MessageRouter', - message: 'Received input', - data: { input }, - ...(input?.correlationid - ? { correlationId: input.correlationid } - : {}), - }); - }, - logOutput: (output) => { - getLogger().debug({ - category: 'MessageRouter', - message: 'Output', - data: { output }, - ...(output?.correlationid - ? { correlationId: output.correlationid } - : {}), - }); - }, -}); - -export const initMessages = () => { - registerUserMessages(); -}; diff --git a/examples/hono-demo/src/shared/shell/messages.ts b/examples/hono-demo/src/shared/shell/messages.ts new file mode 100644 index 0000000..59879a5 --- /dev/null +++ b/examples/hono-demo/src/shared/shell/messages.ts @@ -0,0 +1,5 @@ +import { registerUserMessages } from '../../iam/users/shell/messages/registerUserMessages.ts'; + +export const initMessages = () => { + registerUserMessages(); +}; diff --git a/packages/core/src/lib/message/router.ts b/packages/core/src/lib/message/router.ts index 6c733fc..14090da 100644 --- a/packages/core/src/lib/message/router.ts +++ b/packages/core/src/lib/message/router.ts @@ -1,8 +1,28 @@ -import { getLogger, InvalidInputException } from '@nimbus/core'; +import { metrics, SpanKind, SpanStatusCode, trace } from '@opentelemetry/api'; import type { z } from 'zod'; +import { InvalidInputException } from '../exception/invalidInputException.ts'; import { NotFoundException } from '../exception/notFoundException.ts'; +import { getLogger } from '../log/logger.ts'; import type { Message } from './message.ts'; +const tracer = trace.getTracer('nimbus'); +const meter = metrics.getMeter('nimbus'); + +const messagesRoutedCounter = meter.createCounter( + 'router_messages_routed_total', + { + description: 'Total number of messages routed', + }, +); + +const routingDuration = meter.createHistogram( + 'router_routing_duration_seconds', + { + description: 'Duration of message routing in seconds', + unit: 's', + }, +); + /** * The message handler type - transport-agnostic, just returns domain data. * @@ -20,6 +40,11 @@ export type MessageHandler< * Options for creating a MessageRouter. */ export type MessageRouterOptions = { + /** + * The name of the router instance for metrics and traces. + * Defaults to 'default'. + */ + name?: string; logInput?: (input: any) => void; logOutput?: (output: any) => void; }; @@ -67,6 +92,7 @@ type HandlerRegistration = { */ export class MessageRouter { private readonly _handlers: Map; + private readonly _name: string; private readonly _logInput?: (input: any) => void; private readonly _logOutput?: (output: any) => void; @@ -74,6 +100,7 @@ export class MessageRouter { options?: MessageRouterOptions, ) { this._handlers = new Map(); + this._name = options?.name ?? 'default'; this._logInput = options?.logInput; this._logOutput = options?.logOutput; } @@ -128,46 +155,170 @@ export class MessageRouter { * @throws {GenericException} - If an error occurs during routing */ public async route(input: any): Promise { - if (this._logInput) { - this._logInput(input); - } - - if (!input?.type) { - throw new InvalidInputException( - 'The provided input has no type attribute', - ); - } - - const registration = this._handlers.get(input.type); - if (!registration) { - throw new NotFoundException( - 'Message handler not found', - { - reason: - `Could not find a handler for message type: "${input.type}"`, + const startTime = performance.now(); + const messageType = input?.type ?? 'unknown'; + + return await tracer.startActiveSpan( + 'router.route', + { + kind: SpanKind.INTERNAL, + attributes: { + 'messaging.system': 'nimbusRouter', + 'messaging.router_name': this._name, + 'messaging.operation': 'route', + 'messaging.destination': messageType, + ...(input?.correlationid && { + correlation_id: input.correlationid, + }), }, - ); - } + }, + async (span) => { + try { + if (this._logInput) { + this._logInput(input); + } - const { handler, schema } = registration; + if (!input?.type) { + throw new InvalidInputException( + 'The provided input has no type attribute', + ); + } - const validationResult = schema.safeParse(input); + const registration = this._handlers.get(input.type); + if (!registration) { + throw new NotFoundException( + 'Message handler not found', + { + reason: + `Could not find a handler for message type: "${input.type}"`, + }, + ); + } - if (!validationResult.success) { - throw new InvalidInputException( - 'The provided input is invalid', - { - issues: validationResult.error.issues, - }, - ); - } + const { handler, schema } = registration; + + const validationResult = schema.safeParse(input); + + if (!validationResult.success) { + throw new InvalidInputException( + 'The provided input is invalid', + { + issues: validationResult.error.issues, + }, + ); + } + + const result = await handler(validationResult.data); + + if (this._logOutput) { + this._logOutput(result); + } - const result = await handler(validationResult.data); + messagesRoutedCounter.add(1, { + router_name: this._name, + message_type: input.type, + status: 'success', + }); + routingDuration.record( + (performance.now() - startTime) / 1000, + { router_name: this._name, message_type: input.type }, + ); - if (this._logOutput) { - this._logOutput(result); - } + return result; + } catch (error: any) { + messagesRoutedCounter.add(1, { + router_name: this._name, + message_type: messageType, + status: 'error', + }); + routingDuration.record( + (performance.now() - startTime) / 1000, + { router_name: this._name, message_type: messageType }, + ); - return result; + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error + ? error.message + : 'Unknown error', + }); + span.recordException( + error instanceof Error + ? error + : new Error('Unknown error'), + ); + + throw error; + } finally { + span.end(); + } + }, + ); } } + +/** + * Registry to store named MessageRouter instances. + */ +const routerRegistry = new Map(); + +/** + * Setup a named MessageRouter instance and register it for later retrieval. + * + * Use this function to configure a MessageRouter with specific options at application + * startup, then retrieve it later using {@link getRouter}. + * + * @param name - The unique name for this MessageRouter instance. + * @param options - Optional configuration options for the MessageRouter. + * + * @example + * ```ts + * import { setupRouter } from '@nimbus/core'; + * + * // At application startup + * setupRouter('default', { + * logInput: (input) => console.log('Input:', input), + * logOutput: (output) => console.log('Output:', output), + * }); + * ``` + */ +export const setupRouter = ( + name: string, + options?: Omit, +): void => { + routerRegistry.set(name, new MessageRouter({ ...options, name })); +}; + +/** + * Get a named MessageRouter instance. + * + * If a MessageRouter with the given name has been configured via {@link setupRouter}, + * that instance is returned. Otherwise, a new MessageRouter with default options is created + * and registered. + * + * @param name - The name of the MessageRouter instance to retrieve. Defaults to 'default'. + * @returns The MessageRouter instance. + * + * @example + * ```ts + * import { getRouter } from '@nimbus/core'; + * + * // Get the default router (configured earlier with setupRouter) + * const router = getRouter('default'); + * + * router.register( + * 'order.create', + * createOrderHandler, + * createOrderSchema, + * ); + * + * // Get the default router + * const defaultRouter = getRouter(); + * ``` + */ +export const getRouter = (name: string = 'default'): MessageRouter => { + if (!routerRegistry.has(name)) { + routerRegistry.set(name, new MessageRouter({ name })); + } + return routerRegistry.get(name)!; +}; From f4bfe04141e7d0f85b1a549c44a137e0738c2994 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Thu, 22 Jan 2026 17:12:27 +0100 Subject: [PATCH 22/37] chore(core): better docs and comments --- packages/core/src/lib/eventBus/eventBus.ts | 152 +++++++++++++----- packages/core/src/lib/log/logger.ts | 134 +++++++++++++++- packages/core/src/lib/log/options.ts | 21 ++- packages/core/src/lib/message/router.ts | 173 ++++++++++++++++----- 4 files changed, 396 insertions(+), 84 deletions(-) diff --git a/packages/core/src/lib/eventBus/eventBus.ts b/packages/core/src/lib/eventBus/eventBus.ts index d64e9a5..8ccbce0 100644 --- a/packages/core/src/lib/eventBus/eventBus.ts +++ b/packages/core/src/lib/eventBus/eventBus.ts @@ -114,12 +114,17 @@ export type SubscribeEventInput = { * * @example * ```ts - * import { NimbusEventBus } from '@nimbus/core'; + * import { createEvent, NimbusEventBus } from '@nimbus/core'; * * const eventBus = new NimbusEventBus({ * name: 'orders', * maxRetries: 3, * baseDelay: 1000, + * maxDelay: 30000, + * useJitter: true, + * logPublish: (event) => { + * console.log('Event published:', event.type, event.correlationid); + * }, * }); * * // Subscribe to events @@ -128,17 +133,28 @@ export type SubscribeEventInput = { * handler: async (event) => { * console.log('Order created:', event.data); * }, + * onError: (error, event) => { + * console.error('Failed to handle event:', event.id, error.message); + * }, + * options: { + * maxRetries: 5, + * baseDelay: 500, + * maxDelay: 15000, + * useJitter: true, + * }, * }); * * // Publish an event - * eventBus.putEvent({ - * specversion: '1.0', - * id: crypto.randomUUID(), + * const event = createEvent({ * type: 'at.overlap.nimbus.order-created', * source: 'https://api.example.com', - * time: new Date().toISOString(), - * data: { orderId: '12345' }, + * correlationid: '550e8400-e29b-41d4-a716-446655440000', + * subject: '/orders/12345', + * data: { orderId: '12345', customerId: '67890' }, + * datacontenttype: 'application/json', * }); + * + * eventBus.putEvent(event); * ``` */ export class NimbusEventBus { @@ -153,21 +169,32 @@ export class NimbusEventBus { /** * Create a new NimbusEventBus instance. * - * @param {NimbusEventBusOptions} [options] - The options for the event bus. - * @param {string} [options.name] - The name of the event bus instance for metrics and traces. - * @param {number} [options.maxRetries] - The maximum number of retries for handling the event in case of an error. - * @param {number} [options.baseDelay] - The base delay for exponential backoff in milliseconds. - * @param {number} [options.maxDelay] - The maximum delay cap for exponential backoff in milliseconds. - * @param {boolean} [options.useJitter] - Whether to add jitter to the retry delay. + * @param options - The options for the event bus. + * @param options.name - The name of the event bus instance for metrics and traces. Defaults to 'default'. + * @param options.maxRetries - The maximum number of retries for handling the event in case of an error. Defaults to 2. + * @param options.baseDelay - The base delay for exponential backoff in milliseconds. Defaults to 1000ms. + * @param options.maxDelay - The maximum delay cap for exponential backoff in milliseconds. Defaults to 30000ms. + * @param options.useJitter - Whether to add jitter to the retry delay. Defaults to true. + * @param options.logPublish - Optional callback invoked when an event is published. * * @example * ```ts + * import { getLogger, NimbusEventBus } from '@nimbus/core'; + * * const eventBus = new NimbusEventBus({ * name: 'orders', * maxRetries: 3, * baseDelay: 1000, * maxDelay: 30000, * useJitter: true, + * logPublish: (event) => { + * getLogger().debug({ + * category: 'EventBus', + * message: 'Published event', + * data: { type: event.type, id: event.id }, + * correlationId: event.correlationid, + * }); + * }, * }); * ``` */ @@ -192,14 +219,27 @@ export class NimbusEventBus { * * @example * ```ts - * eventBus.putEvent({ - * specversion: '1.0', - * id: crypto.randomUUID(), + * import { createEvent, getEventBus } from '@nimbus/core'; + * + * const eventBus = getEventBus('default'); + * + * // Create and publish an event with all CloudEvents properties + * const event = createEvent({ * type: 'at.overlap.nimbus.order-created', * source: 'https://api.example.com', - * time: new Date().toISOString(), - * data: { orderId: '12345' }, + * correlationid: '550e8400-e29b-41d4-a716-446655440000', + * subject: '/orders/12345', + * data: { + * orderId: '12345', + * customerId: '67890', + * items: ['item-1', 'item-2'], + * total: 99.99, + * }, + * datacontenttype: 'application/json', + * dataschema: 'https://schemas.example.com/order-created.json', * }); + * + * eventBus.putEvent(event); * ``` */ public putEvent(event: TEvent): void { @@ -268,16 +308,39 @@ export class NimbusEventBus { * @param input.handler - The async handler function to process events. * @param input.onError - Optional callback invoked when all retries are exhausted. * @param input.options - Optional retry options to override EventBus defaults. + * @param input.options.maxRetries - Override maximum retry attempts for this subscription. + * @param input.options.baseDelay - Override base delay in milliseconds for this subscription. + * @param input.options.maxDelay - Override maximum delay cap in milliseconds for this subscription. + * @param input.options.useJitter - Override jitter setting for this subscription. * * @example * ```ts + * import { getEventBus, getLogger } from '@nimbus/core'; + * + * const eventBus = getEventBus('default'); + * + * // Subscribe with all available options * eventBus.subscribeEvent({ * type: 'at.overlap.nimbus.order-created', * handler: async (event) => { - * console.log('Order created:', event.data); + * // Process the event + * console.log('Order created:', event.data.orderId); + * console.log('Correlation ID:', event.correlationid); * }, * onError: (error, event) => { - * console.error('Failed to process order:', error); + * getLogger().error({ + * category: 'OrderHandler', + * message: 'Failed to process order after all retries', + * data: { eventId: event.id, orderId: event.data.orderId }, + * error, + * correlationId: event.correlationid, + * }); + * }, + * options: { + * maxRetries: 5, + * baseDelay: 500, + * maxDelay: 15000, + * useJitter: true, * }, * }); * ``` @@ -484,20 +547,30 @@ const eventBusRegistry = new Map(); * * @param name - The unique name for this EventBus instance. * @param options - Optional configuration options for the EventBus. + * @param options.maxRetries - The maximum number of retries for handling events. Defaults to 2. + * @param options.baseDelay - The base delay for exponential backoff in milliseconds. Defaults to 1000ms. + * @param options.maxDelay - The maximum delay cap for exponential backoff in milliseconds. Defaults to 30000ms. + * @param options.useJitter - Whether to add jitter to the retry delay. Defaults to true. + * @param options.logPublish - Optional callback invoked when an event is published. * * @example * ```ts - * import { setupEventBus } from '@nimbus/core'; + * import { getLogger, setupEventBus } from '@nimbus/core'; * - * // At application startup - * setupEventBus('orders', { - * maxRetries: 5, - * baseDelay: 500, - * }); - * - * setupEventBus('notifications', { + * // At application startup, configure the event bus with all options + * setupEventBus('default', { * maxRetries: 3, * baseDelay: 1000, + * maxDelay: 30000, + * useJitter: true, + * logPublish: (event) => { + * getLogger().debug({ + * category: 'EventBus', + * message: 'Published event', + * data: { type: event.type, id: event.id }, + * correlationId: event.correlationid, + * }); + * }, * }); * ``` */ @@ -520,20 +593,29 @@ export const setupEventBus = ( * * @example * ```ts - * import { getEventBus } from '@nimbus/core'; + * import { createEvent, getEventBus } from '@nimbus/core'; * - * // Get the orders EventBus (configured earlier with setupEventBus) - * const ordersEventBus = getEventBus('orders'); + * // Get the event bus configured earlier with setupEventBus + * const eventBus = getEventBus('default'); * - * ordersEventBus.subscribeEvent({ - * type: 'order.created', + * // Subscribe to events + * eventBus.subscribeEvent({ + * type: 'at.overlap.nimbus.order-created', * handler: async (event) => { - * console.log('Order created:', event.data); + * console.log('Order created:', event.data.orderId); * }, * }); * - * // Get the default EventBus - * const defaultEventBus = getEventBus(); + * // Publish an event + * const event = createEvent({ + * type: 'at.overlap.nimbus.order-created', + * source: 'https://api.example.com', + * correlationid: '550e8400-e29b-41d4-a716-446655440000', + * data: { orderId: '12345', customerId: '67890' }, + * datacontenttype: 'application/json', + * }); + * + * eventBus.putEvent(event); * ``` */ export const getEventBus = (name: string = 'default'): NimbusEventBus => { diff --git a/packages/core/src/lib/log/logger.ts b/packages/core/src/lib/log/logger.ts index b2015c6..88db4fc 100644 --- a/packages/core/src/lib/log/logger.ts +++ b/packages/core/src/lib/log/logger.ts @@ -6,31 +6,117 @@ import { defaultLogOptions, type LogOptions } from './options.ts'; /** * The input for a log message. + * + * Use this type when calling any of the Logger's log methods (debug, info, warn, error, critical). */ export type LogInput = { + /** + * The log message to output. This should be a human-readable description of what occurred. + */ message: string; + /** + * An optional category to group related log messages. + * Useful for filtering logs by component or domain (e.g., 'Database', 'API', 'Auth'). + * Defaults to 'Default' if not provided. + */ category?: string; + /** + * Optional structured data to include with the log message. + * This data will be serialized according to the configured formatter. + */ data?: Record; - error?: Error | Exception; + /** + * Optional error or exception to include with the log message. + * The error's message and stack trace will be captured in the log output. + */ + error?: Error; + /** + * Optional correlation ID to trace related log messages across operations. + * Useful for tracking a request through multiple services or handlers. + */ correlationId?: string; }; /** * A full log record with the log input and additional metadata attached. + * + * This type is passed to the LogFormatter function to produce the final log output. */ export type LogRecord = { + /** + * The timestamp when the log message was created. + */ timestamp: Date; + /** + * The severity level of the log message (debug, info, warn, error, critical). + */ level: LogLevel; + /** + * The category for grouping related log messages. + */ category: string; + /** + * The log message describing what occurred. + */ message: string; + /** + * Optional structured data included with the log message. + */ data?: Record; + /** + * Optional error or exception included with the log message. + */ error?: Error | Exception; + /** + * Optional correlation ID for tracing related log messages. + */ correlationId?: string; }; /** - * The Logger provides different log methods to - * log messages at different levels. + * The Logger provides structured logging with configurable log levels, formatters, and console colors. + * + * The Logger is a singleton that should be configured once at application startup using + * {@link setupLogger}, then accessed throughout the application using {@link getLogger}. + * + * Log levels in order of severity: debug < info < warn < error < critical. + * Messages below the configured log level are silently ignored. + * + * @example + * ```ts + * import { + * getLogger, + * jsonLogFormatter, + * parseLogLevel, + * prettyLogFormatter, + * setupLogger, + * } from '@nimbus/core'; + * + * // Configure the logger at application startup + * setupLogger({ + * logLevel: parseLogLevel(process.env.LOG_LEVEL), + * formatter: process.env.NODE_ENV === 'production' + * ? jsonLogFormatter + * : prettyLogFormatter, + * useConsoleColors: process.env.NODE_ENV !== 'production', + * }); + * + * // Use the logger throughout your application + * const logger = getLogger(); + * + * logger.info({ + * message: 'Application started', + * category: 'App', + * data: { port: 3000, environment: 'production' }, + * }); + * + * logger.error({ + * message: 'Failed to connect to database', + * category: 'Database', + * error: new Error('Connection timeout'), + * correlationId: '550e8400-e29b-41d4-a716-446655440000', + * }); + * ``` */ export class Logger { private static _instance: Logger; @@ -358,13 +444,51 @@ export const setupLogger = (options: LogOptions): void => { /** * Get the Logger instance. * - * @returns {Logger} The Logger instance + * Returns the singleton Logger instance. If the logger has not been configured + * via {@link setupLogger}, a default logger with silent log level is returned. + * + * @returns The Logger instance. * * @example * ```ts - * import { getLogger } from "@nimbus/core"; + * import { getLogger } from '@nimbus/core'; * * const logger = getLogger(); + * + * // Log at different levels with all available options + * logger.debug({ + * message: 'Processing request', + * category: 'API', + * data: { method: 'POST', path: '/orders' }, + * correlationId: '550e8400-e29b-41d4-a716-446655440000', + * }); + * + * logger.info({ + * message: 'Order created successfully', + * category: 'Orders', + * data: { orderId: '12345', customerId: '67890' }, + * correlationId: '550e8400-e29b-41d4-a716-446655440000', + * }); + * + * logger.warn({ + * message: 'Rate limit approaching', + * category: 'API', + * data: { currentRate: 95, maxRate: 100 }, + * }); + * + * logger.error({ + * message: 'Failed to process payment', + * category: 'Payments', + * data: { orderId: '12345' }, + * error: new Error('Payment gateway timeout'), + * correlationId: '550e8400-e29b-41d4-a716-446655440000', + * }); + * + * logger.critical({ + * message: 'Database connection lost', + * category: 'Database', + * error: new Error('Connection refused'), + * }); * ``` */ export const getLogger = (): Logger => { diff --git a/packages/core/src/lib/log/options.ts b/packages/core/src/lib/log/options.ts index 7310025..54f293c 100644 --- a/packages/core/src/lib/log/options.ts +++ b/packages/core/src/lib/log/options.ts @@ -2,11 +2,30 @@ import { jsonLogFormatter, type LogFormatter } from './logFormatter.ts'; import type { LogLevel } from './logLevel.ts'; /** - * The options for the Log class. + * Configuration options for the Logger. + * + * Use these options with {@link setupLogger} to configure the logger at application startup. */ export type LogOptions = { + /** + * The minimum log level to output. Messages below this level are silently ignored. + * Levels in order of severity: debug < info < warn < error < critical. + * Defaults to 'silent' (no logs output). + */ logLevel?: LogLevel; + /** + * The formatter function used to convert LogRecord objects into output strings. + * Use `jsonLogFormatter` for structured JSON output (recommended for production), + * or `prettyLogFormatter` for human-readable output (recommended for development). + * Defaults to `jsonLogFormatter`. + */ formatter?: LogFormatter; + /** + * Whether to apply ANSI color codes to the console output based on log level. + * Set to true for colored output in development terminals. + * Set to false for production or when outputting to log aggregation systems. + * Defaults to false. + */ useConsoleColors?: boolean; }; diff --git a/packages/core/src/lib/message/router.ts b/packages/core/src/lib/message/router.ts index 14090da..90423f6 100644 --- a/packages/core/src/lib/message/router.ts +++ b/packages/core/src/lib/message/router.ts @@ -45,7 +45,19 @@ export type MessageRouterOptions = { * Defaults to 'default'. */ name?: string; + /** + * Optional callback invoked when a message is received for routing. + * Useful for custom logging or debugging of incoming messages. + * + * @param input - The incoming message to be routed. + */ logInput?: (input: any) => void; + /** + * Optional callback invoked after a message has been successfully handled. + * Useful for custom logging or debugging of handler results. + * + * @param output - The result returned by the message handler. + */ logOutput?: (output: any) => void; }; @@ -60,34 +72,47 @@ type HandlerRegistration = { }; /** - * The MessageRouter routes messages to their handlers - * based on the type value of the message. + * The MessageRouter routes messages to their handlers based on the type value of the message. + * + * Messages are validated against their registered Zod schemas before being passed to handlers. + * All routing operations are instrumented with OpenTelemetry tracing and metrics for observability. * * @example * ```ts - * import { MessageRouter } from "@nimbus/core"; + * import { createCommand, MessageRouter } from '@nimbus/core'; * - * const messageRouter = new MessageRouter(); + * const messageRouter = new MessageRouter({ + * name: 'api', + * logInput: (input) => { + * console.log('Received message:', input.type); + * }, + * logOutput: (output) => { + * console.log('Handler result:', output); + * }, + * }); * + * // Register command handler * messageRouter.register( - * 'at.overlap.nimbus.add-recipe', - * addRecipeHandler, - * addRecipeSchema, + * 'at.overlap.nimbus.create-order', + * createOrderHandler, + * createOrderCommandSchema, * ); * + * // Register query handler * messageRouter.register( - * 'at.overlap.nimbus.recipe-added', - * recipeAddedHandler, - * recipeAddedSchema, + * 'at.overlap.nimbus.get-order', + * getOrderHandler, + * getOrderQuerySchema, * ); * - * messageRouter.register( - * 'at.overlap.nimbus.get-recipe', - * getRecipeHandler, - * getRecipeSchema, - * ); + * // Route a command + * const command = createCommand({ + * type: 'at.overlap.nimbus.create-order', + * source: 'https://api.example.com', + * data: { customerId: '123', items: ['item-1', 'item-2'] }, + * }); * - * const result = await messageRouter.route(someInput); + * const result = await messageRouter.route(command); * ``` */ export class MessageRouter { @@ -108,23 +133,37 @@ export class MessageRouter { /** * Register a handler for a specific message type. * - * @param {string} messageType - The messages type as defined in the CloudEvents specification (e.g., 'at.overlap.nimbus.add-recipe') - * @param {MessageHandler} handler - The handler function - * @param {ZodSchema} schema - The schema to validate the command + * @param messageType - The message type as defined in the CloudEvents specification + * (e.g., 'at.overlap.nimbus.create-order'). + * @param handler - The async handler function that processes the message and returns a result. + * @param schema - The Zod schema to validate the incoming message before passing to the handler. * * @example * ```ts - * router.register( - * 'at.overlap.nimbus.add-recipe', - * addRecipeHandler, - * addRecipeSchema, - * ); + * import { commandSchema, type Command, getRouter } from '@nimbus/core'; + * import { z } from 'zod'; * - * router.register( - * 'at.overlap.nimbus.get-recipe', - * getRecipeHandler, - * getRecipeSchema, - * ); + * // Define the command type and schema + * const CREATE_ORDER_TYPE = 'at.overlap.nimbus.create-order'; + * + * const createOrderSchema = commandSchema.extend({ + * type: z.literal(CREATE_ORDER_TYPE), + * data: z.object({ + * customerId: z.string(), + * items: z.array(z.string()), + * }), + * }); + * type CreateOrderCommand = z.infer; + * + * // Define the handler + * const createOrderHandler = async (command: CreateOrderCommand) => { + * // Process the command and return the result + * return { orderId: '12345', status: 'created' }; + * }; + * + * // Register the handler + * const router = getRouter('default'); + * router.register(CREATE_ORDER_TYPE, createOrderHandler, createOrderSchema); * ``` */ public register( @@ -146,13 +185,37 @@ export class MessageRouter { /** * Route a message to its handler. * - * @param {unknown} input - The raw input to route + * The message is validated against the registered schema before being passed to the handler. + * The routing operation is instrumented with OpenTelemetry tracing and metrics. + * + * @param input - The CloudEvents-compliant message to route (command, query, or event). + * @returns The result from the handler. + * + * @throws {NotFoundException} If no handler is registered for the message type. + * @throws {InvalidInputException} If the message has no type attribute or fails schema validation. + * + * @example + * ```ts + * import { createCommand, getRouter } from '@nimbus/core'; + * + * const router = getRouter('default'); * - * @returns {Promise} The result from the handler + * // Create a command with all CloudEvents properties + * const command = createCommand({ + * type: 'at.overlap.nimbus.create-order', + * source: 'https://api.example.com', + * correlationid: '550e8400-e29b-41d4-a716-446655440000', + * data: { + * customerId: '123', + * items: ['item-1', 'item-2'], + * }, + * datacontenttype: 'application/json', + * }); * - * @throws {NotFoundException} - If no handler is registered for the message type - * @throws {InvalidInputException} - If the message is invalid - * @throws {GenericException} - If an error occurs during routing + * // Route the command to its registered handler + * const result = await router.route(command); + * console.log('Order created:', result); + * ``` */ public async route(input: any): Promise { const startTime = performance.now(); @@ -270,15 +333,30 @@ const routerRegistry = new Map(); * * @param name - The unique name for this MessageRouter instance. * @param options - Optional configuration options for the MessageRouter. + * @param options.logInput - Optional callback invoked when a message is received for routing. + * @param options.logOutput - Optional callback invoked after a message has been successfully handled. * * @example * ```ts - * import { setupRouter } from '@nimbus/core'; + * import { getLogger, setupRouter } from '@nimbus/core'; * - * // At application startup + * // At application startup, configure the router with all options * setupRouter('default', { - * logInput: (input) => console.log('Input:', input), - * logOutput: (output) => console.log('Output:', output), + * logInput: (input) => { + * getLogger().debug({ + * category: 'Router', + * message: 'Received message', + * data: { type: input.type, correlationId: input.correlationid }, + * correlationId: input.correlationid, + * }); + * }, + * logOutput: (output) => { + * getLogger().debug({ + * category: 'Router', + * message: 'Handler completed', + * data: { output }, + * }); + * }, * }); * ``` */ @@ -301,19 +379,28 @@ export const setupRouter = ( * * @example * ```ts - * import { getRouter } from '@nimbus/core'; + * import { createCommand, getRouter } from '@nimbus/core'; * - * // Get the default router (configured earlier with setupRouter) + * // Get the router configured earlier with setupRouter * const router = getRouter('default'); * + * // Register handlers * router.register( - * 'order.create', + * 'at.overlap.nimbus.create-order', * createOrderHandler, * createOrderSchema, * ); * - * // Get the default router - * const defaultRouter = getRouter(); + * // Route a message + * const command = createCommand({ + * type: 'at.overlap.nimbus.create-order', + * source: 'https://api.example.com', + * correlationid: '550e8400-e29b-41d4-a716-446655440000', + * data: { customerId: '123', items: ['item-1'] }, + * datacontenttype: 'application/json', + * }); + * + * const result = await router.route(command); * ``` */ export const getRouter = (name: string = 'default'): MessageRouter => { From d9e2c48f0b3692bbb88485c2627614b2bfefb09d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Fri, 23 Jan 2026 14:52:34 +0100 Subject: [PATCH 23/37] changing the docs --- docs/.vitepress/config.mjs | 43 +-- docs/guide/core/commands.md | 267 ++++--------- docs/guide/core/event-bus.md | 185 ++++++--- docs/guide/core/events.md | 365 +++++------------- docs/guide/core/exceptions.md | 168 ++++++-- docs/guide/core/index.md | 27 +- docs/guide/core/logging.md | 258 ++++++++++--- docs/guide/core/queries.md | 324 +++------------- docs/guide/core/router.md | 210 +++++++--- docs/guide/eventsourcing/aggregate.md | 1 - docs/guide/eventsourcing/event-store.md | 1 - docs/guide/eventsourcing/index.md | 24 -- .../guide/eventsourcing/why-event-sourcing.md | 7 - docs/guide/eventsourcingdb/event-store.md | 1 - docs/guide/eventsourcingdb/index.md | 23 -- docs/guide/observability.md | 234 +++++++++++ docs/guide/project-structure.md | 40 -- .../messages/commands/addUser.command.ts | 17 +- .../lib/exception/invalidInputException.ts | 2 - packages/core/src/lib/message/event.ts | 3 +- packages/core/src/lib/message/router.ts | 5 +- 21 files changed, 1108 insertions(+), 1097 deletions(-) delete mode 100644 docs/guide/eventsourcing/aggregate.md delete mode 100644 docs/guide/eventsourcing/event-store.md delete mode 100644 docs/guide/eventsourcing/index.md delete mode 100644 docs/guide/eventsourcing/why-event-sourcing.md delete mode 100644 docs/guide/eventsourcingdb/event-store.md delete mode 100644 docs/guide/eventsourcingdb/index.md create mode 100644 docs/guide/observability.md delete mode 100644 docs/guide/project-structure.md diff --git a/docs/.vitepress/config.mjs b/docs/.vitepress/config.mjs index d2d0639..a6fc2be 100644 --- a/docs/.vitepress/config.mjs +++ b/docs/.vitepress/config.mjs @@ -33,10 +33,9 @@ export default defineConfig({ link: "/guide/quickstart", }, { - text: "Project Structure", - link: "/guide/project-structure", + text: "Observability", + link: "/guide/observability", }, - { text: "Core", link: "/guide/core", @@ -61,14 +60,14 @@ export default defineConfig({ text: "Event Bus", link: "/guide/core/event-bus", }, - { - text: "Exceptions", - link: "/guide/core/exceptions", - }, { text: "Logging", link: "/guide/core/logging", }, + { + text: "Exceptions", + link: "/guide/core/exceptions", + }, ], }, @@ -83,36 +82,6 @@ export default defineConfig({ ], }, - { - text: "Event Sourcing", - link: "/guide/eventsourcing", - items: [ - { - text: "Why Event Sourcing?", - link: "/guide/eventsourcing/why-event-sourcing", - }, - { - text: "Event Store", - link: "/guide/eventsourcing/event-store", - }, - { - text: "Aggregate", - link: "/guide/eventsourcing/aggregate", - }, - ], - }, - - { - text: "EventSourcingDB", - link: "/guide/eventsourcingdb", - items: [ - { - text: "EventSourcingDB Event Store", - link: "/guide/eventsourcingdb/event-store", - }, - ], - }, - { text: "MongoDB", link: "/guide/mongodb", diff --git a/docs/guide/core/commands.md b/docs/guide/core/commands.md index 93b56b4..25a47d1 100644 --- a/docs/guide/core/commands.md +++ b/docs/guide/core/commands.md @@ -1,228 +1,121 @@ +--- +prev: + text: "Observability" + link: "/guide/core/observability" + +next: + text: "Queries" + link: "/guide/core/queries" +--- + # Commands Commands represent write operations - intentions to change system state in the application. -Commands follow the Command pattern from CQRS (Command Query Responsibility Segregation), where writes and reads are separated for better scalability and maintainability. +Commands also fit perfectly into the CQRS pattern (Command Query Responsibility Segregation), where writes and reads are separated for better scalability and maintainability. But keep it simple for your use case and needs. CQRS in an option, but not required. ::: info Example Application -The examples on this page reference the Gustav application. +The examples on this page reference the hono-demo application. -You can find the full example on GitHub: [Gustav Recipe App](https://github.com/overlap-dev/Nimbus/tree/main/examples/gustav) +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) ::: ## Key Characteristics -- **Write Operations**: Commands modify application state -- **Intent-Based**: Commands express what should happen (e.g., "AddRecipe", "DeleteRecipe") -- **Validated**: Command data is validated before execution -- **Type-Safe**: Full TypeScript type safety for command data and handlers +- **Write Operations**: Commands modify application state +- **Intent-Based**: Commands express what should happen (e.g., "AddUser", "DeleteUser") +- **Type-Safe**: Commands are fully typed and validated using Zod ## Command Structure A command in Nimbus follows the CloudEvents specification and consists of: ```typescript -export type Command = { - specversion: '1.0'; +type Command = { + specversion: "1.0"; id: string; correlationid: string; time: string; source: string; type: string; - data: T; - datacontenttype: string; -}; -``` - -## Example: Add Recipe Command - -### Define the Command Type - -Create a command type definition in the core layer: - -```typescript -// core/commands/addRecipe.ts -import { Command } from '@nimbus/core'; -import { Recipe } from '../domain/recipe.ts'; - -export const AddRecipeCommandType = 'at.overlap.nimbus.app-recipe' as const; - -export type AddRecipeCommand = Command & { - type: typeof AddRecipeCommandType; + subject?: string; + data: TData; + datacontenttype?: string; + dataschema?: string; }; ``` -### Implement Pure Core Logic - -The core function contains pure business logic with no I/O operations: +| Property | Description | +| ----------------- | ---------------------------------------------------------------------------------- | +| `specversion` | The CloudEvents specification version (always `'1.0'`) | +| `id` | A globally unique identifier for the command | +| `correlationid` | A unique identifier to correlate this command with related messages | +| `time` | ISO 8601 timestamp when the command was created | +| `source` | A URI reference identifying the system creating the command | +| `type` | The command type following CloudEvents naming (e.g., `at.overlap.nimbus.add-user`) | +| `subject` | Optional identifier for the entity the command targets | +| `data` | The command payload containing the business data | +| `datacontenttype` | Optional MIME type of the data (defaults to `application/json`) | +| `dataschema` | Optional URL to the schema the data adheres to | -```typescript -// core/commands/addRecipe.ts -export const addRecipe = ( - command: AddRecipeCommand, - state: RecipeState, -): { - newState: Recipe; - events: RecipeAddedEvent[]; -} => { - // Business validation - if (state !== null) { - throw new InvalidInputException('Recipe already exists', { - errorCode: 'DUPLICATE_RECIPE', - }); - } - - const subject = recipeSubject(command.data.slug); - const event = createRecipeAddedEvent(command, subject); - - return { - newState: command.data, - events: [event], - }; -}; -``` +## Command Schema -### Implement Shell Handler - -The handler orchestrates I/O operations and calls the pure core logic: +Nimbus provides a base Zod schema for validating commands: ```typescript -// infrastructure/http/handler/addRecipe.handler.ts -import { MessageHandler } from '@nimbus/core'; -import { loadAggregate } from '@nimbus/eventsourcing'; -import { eventStore } from '../eventStore.ts'; -import { addRecipe, AddRecipeCommand } from '../../../core/commands/addRecipe.ts'; -import { recipeReducer, recipeSubject } from '../../../core/domain/recipeAggregate.ts'; - -export const addRecipeHandler: MessageHandler = - async (command) => { - const subject = recipeSubject(command.data.slug); - - // Load current state from event store - const snapshot = await loadAggregate( - eventStore, - subject, - null, - recipeReducer, - ); - - // Call pure core logic - const { newState, events } = addRecipe(command, snapshot.state); - - // Persist events with optimistic concurrency control - await eventStore.writeEvents(events, { - preconditions: snapshot.lastEventId !== undefined - ? [{ type: 'isSubjectOnEventId', payload: { subject, eventId: snapshot.lastEventId } }] - : [{ type: 'isSubjectPristine', payload: { subject } }], - }); - - return newState; - }; +import { commandSchema } from "@nimbus/core"; +import { z } from "zod"; + +// Extend the base schema with your specific command type and data +const addUserCommandSchema = commandSchema.extend({ + type: z.literal("at.overlap.nimbus.add-user"), + data: z.object({ + email: z.email(), + firstName: z.string(), + lastName: z.string(), + }), +}); + +type AddUserCommand = z.infer; ``` -## Architecture Pattern - -Commands follow the Pure Core - Imperative Shell pattern: - -1. **Core Layer** (Pure): - - Define command types - - Implement business logic - - No I/O operations - - Returns new state and events to persist +## Create Commands -2. **Infrastructure Layer** (Shell): - - Define message handlers - - Load current state from data sources - - Call pure core functions - - Persist results - - Handle errors and responses - -## Best Practices - -### Keep Core Pure - -The core command logic should be completely free of side effects: +You can create commands using the `createCommand()` helper: ```typescript -// ✅ Good - Pure function -export const addRecipe = (command: AddRecipeCommand, state: RecipeState) => { - if (state !== null) throw new InvalidInputException('Recipe already exists'); - return { newState: command.data, events: [createEvent(command)] }; -}; - -// ❌ Bad - Has side effects -export const addRecipe = async (command: AddRecipeCommand) => { - const existing = await db.findRecipe(command.data.slug); // I/O in core! - if (existing) throw new InvalidInputException('Recipe already exists'); - await db.saveRecipe(command.data); // I/O in core! -}; -``` - -### Use Ports for Dependencies - -When the core needs external data, define ports (interfaces): - -```typescript -// core/ports/recipeRepository.ts -export interface RecipeRepository { - getBySlug(slug: string): Promise; - save(recipe: Recipe): Promise; -} -``` - -The shell provides the implementation (adapter): - -```typescript -// infrastructure/repository/recipeMemoryRepository.ts -export const recipeMemoryRepository: RecipeRepository = { - async getBySlug(slug: string) { /* implementation */ }, - async save(recipe: Recipe) { /* implementation */ }, -}; -``` - -### Validate Early - -Validate command data before reaching core logic using JSON schemas: - -```typescript -// infrastructure/http/schemas/addRecipeCommandSchema.ts -export const addRecipeCommandSchema = { - $id: 'https://nimbus.overlap.at/schemas/commands/add-recipe/v1', - type: 'object', - properties: { - slug: { type: 'string', minLength: 1, maxLength: 100 }, - title: { type: 'string', minLength: 1, maxLength: 200 }, - ingredients: { type: 'array', items: { $ref: '#/definitions/ingredient' } }, +import { createCommand } from "@nimbus/core"; +import { AddUserCommand } from "./addUser.command.ts"; + +const commandForJane = createCommand({ + type: "at.overlap.nimbus.add-user", + source: "nimbus.overlap.at", + data: { + email: "jane@example.com", + firstName: "Jane", + lastName: "Doe", }, - required: ['slug', 'title', 'ingredients'], -}; +}); + +const commandForJohn = createCommand({ + type: "at.overlap.nimbus.add-user", + source: "nimbus.overlap.at", + data: { + email: "john@example.com", + firstName: "John", + lastName: "Doe", + }, +}); ``` -### Emit Events +The `createCommand()` helper automatically generates default values for: -Commands should emit domain events for other parts of the system to react to: - -```typescript -const event: RecipeAddedEvent = { - specversion: '1.0', - id: ulid(), - correlationid: command.correlationid, - time: new Date().toISOString(), - source: EVENT_SOURCE, - type: 'at.overlap.nimbus.recipe-added', - subject: `/recipes/${command.data.slug}`, - data: command.data, - datacontenttype: 'application/json', -}; -``` +- `id` - A unique ULID +- `correlationid` - A unique ULID (if not provided) +- `time` - Current ISO timestamp +- `specversion` - Always `'1.0'` +- `datacontenttype` - Defaults to `'application/json'` ## Routing Commands -Commands are routed to handlers using the message router. See the [HTTP Guide](/guide/http/) for more details on routing commands through HTTP endpoints. - -## Related Patterns - -- [Queries](/guide/core/queries) - Read operations -- [Events](/guide/core/events) - Domain events -- [Event Sourcing](/guide/eventsourcing/) - Persisting state as events -- [CQRS](/guide/what-is-nimbus#cqrs-event-sourcing) - Separating reads and writes +Commands are routed to handlers using the [MessageRouter](/guide/core/router). See the Router documentation for details on registering handlers and routing messages. diff --git a/docs/guide/core/event-bus.md b/docs/guide/core/event-bus.md index 19d1207..330ffba 100644 --- a/docs/guide/core/event-bus.md +++ b/docs/guide/core/event-bus.md @@ -1,90 +1,153 @@ +--- +prev: + text: "Router" + link: "/guide/core/router" + +next: + text: "Exceptions" + link: "/guide/core/exceptions" +--- + # Event Bus -The Nimbus event bus allows to publish and subscribe to [events](/guide/core/events.md) within the application. +The NimbusEventBus enables publish/subscribe messaging for [events](/guide/core/events) within your application. Events are delivered asynchronously to all registered handlers with automatic retry on failure. ::: info Example Application -You can find the full example on GitHub [The Expense Repo](https://github.com/overlap-dev/Nimbus/tree/main/examples/the-expense) +The examples on this page reference the hono-demo application. -Check it out and run it with `deno task dev` +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) ::: -## Event Subscriptions +## Setup and Configuration -To set up event subscriptions, we want to create a new instance of the `NimbusEventBus` first. Then we want to use the `subscribeEvent` method to subscribe to all the events the application needs to handle. +Configure the event bus at application startup using `setupEventBus()`, then retrieve it anywhere using `getEventBus()`. -In the `main.ts` file we call the `initEventBusSubscriptions` function to subscribe to all the events for the different domains when the application starts. +```typescript +import { getLogger, setupEventBus } from "@nimbus/core"; + +setupEventBus("MyEventBus", { + maxRetries: 2, + baseDelay: 1000, + maxDelay: 30000, + useJitter: true, + logPublish: (event) => { + getLogger().debug({ + category: "MyEventBus", + message: "Published event", + data: { event }, + ...(event?.correlationid + ? { correlationId: event.correlationid } + : {}), + }); + }, +}); +``` -::: code-group +### Configuration Options -```typescript [eventBus.ts] -import { NimbusEventBus, RouteHandlerMap } from "@nimbus/core"; -import { accountEventSubscriptions } from "./account/shell/account.eventBus.ts"; +| Option | Type | Default | Description | +| ------------ | ----------------- | ------- | -------------------------------------------------- | +| `maxRetries` | `number` | `2` | Maximum retry attempts for failed handlers | +| `baseDelay` | `number` | `1000` | Base delay in milliseconds for exponential backoff | +| `maxDelay` | `number` | `30000` | Maximum delay cap in milliseconds | +| `useJitter` | `boolean` | `true` | Add randomness to delay to prevent thundering herd | +| `logPublish` | `(event) => void` | - | Optional callback when an event is published | -// -// Create a new instance of the event bus -// -export const eventBus = new NimbusEventBus({ - maxRetries: 3, -}); +## Subscribing to Events -// -// Create a function that subscribes to all -// the events for the different domains -// -export const initEventBusSubscriptions = () => { - const subscriptions: Record = { - account: accountEventSubscriptions, - }; - - for (const [, handlerMap] of Object.entries(subscriptions)) { - for (const eventName of Object.keys(handlerMap)) { - eventBus.subscribeEvent( - eventName, - handlerMap[eventName].inputType, - handlerMap[eventName].handler - ); - } - } -}; -``` +Subscribe to event types using `subscribeEvent()`: -```typescript [account.eventBus.ts] -import { RouteHandlerMap } from "@nimbus/core"; -import { AccountAddedEvent } from "../core/events/accountAdded.ts"; -import { accountAddedHandler } from "./events/accountAdded.handler.ts"; +```typescript +import { getEventBus } from "@nimbus/core"; -export const accountEventSubscriptions: RouteHandlerMap = { - ACCOUNT_ADDED: { - handler: accountAddedHandler, - inputType: AccountAddedEvent, +const eventBus = getEventBus("MyEventBus"); + +eventBus.subscribeEvent({ + type: "at.overlap.nimbus.user-added", + handler: async (event: UserAddedEvent) => { + // Process event and return result + }, +}); + +eventBus.subscribeEvent({ + type: "at.overlap.nimbus.onboarding-started", + handler: async (event: OnboardingStartedEvent) => { + // Process event and return result }, -}; + onError: (error, event) => { + // Handle the error + }, + options: { + maxRetries: 0, // Override the default of 2 retries for this subscription + }, +}); ``` -```typescript [main.ts] -import { initEventBusSubscriptions } from "./eventBus.ts"; +### Subscription Options -initEventBusSubscriptions(); -``` +The `subscribeEvent()` method accepts the following options: -::: +| Option | Type | Description | +| --------- | -------------------------- | ---------------------------------------------------- | +| `type` | `string` | The CloudEvents type to subscribe to | +| `handler` | `(event) => Promise` | Async handler function for the event | +| `onError` | `(error, event) => void` | Optional callback when all retries are exhausted | +| `options` | `object` | Optional retry options to override EventBus defaults | -## Publish Events +## Publishing Events -To publish an event, we can use the `putEvent` method of the `NimbusEventBus` class. +Publish events using `putEvent()`: ```typescript -import { eventBus } from "../../../eventBus.ts"; -import { AccountAddedEvent } from "../../core/events/accountAdded.ts"; +import { createEvent, getEventBus } from "@nimbus/core"; + +const eventBus = getEventBus("default"); -eventBus.putEvent({ - name: "ACCOUNT_ADDED", +const event = createEvent({ + type: "at.overlap.nimbus.user-added", + source: "nimbus.overlap.at", + correlationid: command.correlationid, + subject: `/users/${user.id}`, data: { - account: account, - }, - metadata: { - correlationId: command.metadata.correlationId, - authContext: command.metadata.authContext, + email: user.email, + firstName: user.firstName, + lastName: user.lastName, }, }); + +eventBus.putEvent(event); ``` + +## Retry Mechanism + +When a handler throws an error, the event bus automatically retries using exponential backoff: + +1. **First retry**: Waits `baseDelay` ms (default: 1000ms) +2. **Second retry**: Waits `baseDelay * 2` ms (2000ms) +3. **Third retry**: Waits `baseDelay * 4` ms (4000ms) +4. ... continues until `maxDelay` is reached + +With `useJitter: true`, a small random amount (up to 10% of the delay) is added to prevent multiple handlers from retrying simultaneously. + +After all retries are exhausted, the `onError` callback is invoked (if provided), or the error is logged. + +## Event Size Limit + +The event bus enforces the CloudEvents specification size limit of 64KB. If you attempt to publish an event larger than this, a `GenericException` is thrown. + +## Observability + +The event bus is fully instrumented with OpenTelemetry tracing and metrics. See the [Observability](/guide/core/observability) documentation for details. + +**Tracing:** + +- `eventbus.publish` span for event publishing +- `eventbus.handle` span for event handling + +**Metrics:** + +- `eventbus_events_published_total` - Counter for published events +- `eventbus_events_delivered_total` - Counter for delivered events (with success/error status) +- `eventbus_event_handling_duration_seconds` - Histogram of handler execution time +- `eventbus_retry_attempts_total` - Counter for retry attempts +- `eventbus_event_size_bytes` - Histogram of event sizes diff --git a/docs/guide/core/events.md b/docs/guide/core/events.md index 1e359e3..c44b667 100644 --- a/docs/guide/core/events.md +++ b/docs/guide/core/events.md @@ -1,3 +1,13 @@ +--- +prev: + text: "Queries" + link: "/guide/core/queries" + +next: + text: "Router" + link: "/guide/core/router" +--- + # Events Events represent facts - things that have already happened in the system. @@ -5,229 +15,108 @@ Events represent facts - things that have already happened in the system. Events are immutable records of state changes that occurred in the application. They enable event-driven architectures, event sourcing, and asynchronous processing. ::: info Example Application -The examples on this page reference the Gustav application. +The examples on this page reference the hono-demo application. -You can find the full example on GitHub: [Gustav Recipe App](https://github.com/overlap-dev/Nimbus/tree/main/examples/gustav) +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) ::: ## Key Characteristics -- **Immutable Facts**: Events represent things that already happened and cannot be changed -- **Past Tense**: Event names use past tense (e.g., "RecipeAdded", not "AddRecipe") -- **Observable**: Other parts of the system can subscribe and react to events -- **Type-Safe**: Full TypeScript type safety for event data and handlers +- **Immutable Facts**: Events represent things that already happened and cannot be changed +- **Past Tense**: Event names use past tense (e.g., "UserAdded", not "AddUser") +- **Observable**: Other parts of the system can subscribe and react to events +- **Type-Safe**: Events are fully typed and validated using Zod ## Event Structure An event in Nimbus follows the CloudEvents specification and consists of: ```typescript -export type Event = { - specversion: '1.0'; +type Event = { + specversion: "1.0"; id: string; correlationid: string; time: string; source: string; type: string; subject: string; - data: T; - datacontenttype: string; + data: TData; + datacontenttype?: string; + dataschema?: string; }; ``` -## Example: Recipe Added Event - -### Define the Event Type +| Property | Description | +| ----------------- | ---------------------------------------------------------------------------------- | +| `specversion` | The CloudEvents specification version (always `'1.0'`) | +| `id` | A globally unique identifier for the event | +| `correlationid` | A unique identifier to correlate this event with related messages | +| `time` | ISO 8601 timestamp when the event was created | +| `source` | A URI reference identifying the system creating the event | +| `type` | The event type following CloudEvents naming (e.g., `at.overlap.nimbus.user-added`) | +| `subject` | An identifier for the entity the event is about (e.g., `/users/123`) | +| `data` | The event payload containing the business data | +| `datacontenttype` | Optional MIME type of the data (defaults to `application/json`) | +| `dataschema` | Optional URL to the schema the data adheres to | -Create an event type definition in the core layer: - -```typescript -// core/events/recipeAdded.ts -import { Event } from '@nimbus/core'; -import { Recipe } from '../domain/recipe.ts'; - -export const RecipeAddedEventType = 'at.overlap.nimbus.recipe-added' as const; - -export type RecipeAddedEvent = Event & { - type: typeof RecipeAddedEventType; -}; -``` - -### Create Events in Command Handlers +## Event Subjects -Events are typically created and emitted by command handlers: +Unlike commands and queries, events **require** a `subject` field. +Events use subjects to organize and identify the entities they relate to: ```typescript -// core/commands/addRecipe.ts -import { ulid } from '@std/ulid'; -import { getEnv } from '@nimbus/utils'; - -export const addRecipe = ( - command: AddRecipeCommand, - state: RecipeState, -): { - newState: Recipe; - events: RecipeAddedEvent[]; -} => { - if (state !== null) { - throw new InvalidInputException('Recipe already exists'); - } - - const { EVENT_SOURCE } = getEnv({ variables: ['EVENT_SOURCE'] }); - const subject = recipeSubject(command.data.slug); - - // Create the event - const recipeAddedEvent: RecipeAddedEvent = { - specversion: '1.0', - id: ulid(), - correlationid: command.correlationid, - time: new Date().toISOString(), - source: EVENT_SOURCE, - type: RecipeAddedEventType, - subject, - data: command.data, - datacontenttype: 'application/json', - }; - - return { - newState: command.data, - events: [recipeAddedEvent], - }; -}; +// Subject examples +"/users/123"; // Specific user +"/orders/456"; // Specific order +"/users/123/orders/456"; // Order belonging to a user ``` -### Subscribe to Events +## Event Schema -Event handlers react to events by updating read models, sending notifications, or triggering other processes: +Nimbus provides a base Zod schema for validating events: ```typescript -// infrastructure/eventHandler/recipeAdded.handler.ts -import { getLogger } from '@nimbus/core'; -import { RecipeAddedEvent } from '../../core/events/recipeAdded.ts'; - -export const recipeAdded = (event: RecipeAddedEvent) => { - getLogger().info({ - message: 'Recipe added', - slug: event.data.slug, - title: event.data.title, - }); - - // Additional side effects: - // - Update read model - // - Send notification - // - Trigger related processes -}; -``` - -### Observe Events from Event Store +import { eventSchema } from "@nimbus/core"; +import { z } from "zod"; -Use the event store observer to subscribe to events: - -```typescript -// shared/infrastructure/eventStore.ts -import { EventStore } from '@nimbus/eventsourcingdb'; -import { recipeAdded } from './eventHandler/recipeAdded.handler.ts'; - -export const eventStore = new EventStore(/* config */); - -// Subscribe to recipe events -eventStore.observe({ - subjects: ['/recipes/*'], - handler: async (event) => { - switch (event.type) { - case 'at.overlap.nimbus.recipe-added': - recipeAdded(event as RecipeAddedEvent); - break; - case 'at.overlap.nimbus.recipe-updated': - recipeUpdated(event as RecipeUpdatedEvent); - break; - case 'at.overlap.nimbus.recipe-deleted': - recipeDeleted(event as RecipeDeletedEvent); - break; - } - }, +// Extend the base schema with your specific event type and data +const userAddedEventSchema = eventSchema.extend({ + type: z.literal("at.overlap.nimbus.user-added"), + data: z.object({ + _id: z.string(), + email: z.string(), + firstName: z.string(), + lastName: z.string(), + }), }); -``` - -## Event Sourcing - -Events can be used as the source of truth for application state through event sourcing: - -### Event Reducer -An event reducer reconstructs aggregate state by replaying events: - -```typescript -// core/domain/recipeAggregate.ts -import { Event } from '@nimbus/core'; -import { EventReducer } from '@nimbus/eventsourcing'; -import { Recipe } from './recipe.ts'; - -export type RecipeState = Recipe | null; - -export const recipeReducer: EventReducer = ( - state: RecipeState, - event: Event, -): RecipeState => { - switch (event.type) { - case 'at.overlap.nimbus.recipe-added': - return event.data as Recipe; - case 'at.overlap.nimbus.recipe-updated': - return { ...state, ...(event.data as Partial) }; - case 'at.overlap.nimbus.recipe-deleted': - return null; - default: - return state; - } -}; +type UserAddedEvent = z.infer; ``` -### Load Aggregate from Events +## Create Events -Load current state by replaying all events for a subject: +You can create events using the `createEvent()` helper: ```typescript -import { loadAggregate } from '@nimbus/eventsourcing'; - -const subject = recipeSubject(slug); -const snapshot = await loadAggregate( - eventStore, - subject, - null, - recipeReducer, -); - -// snapshot.state contains the current state -// snapshot.lastEventId can be used for optimistic concurrency -``` +import { createEvent } from "@nimbus/core"; +import { UserAddedEvent } from "./userAdded.event.ts"; -## Event Subjects - -Events use subjects to organize and filter events hierarchically: - -```typescript -// Subject patterns -'/recipes/carbonara' // Specific recipe -'/recipes/*' // All recipes -'/users/123/preferences' // User preferences -'/orders/456/items/*' // All items in an order +const event = createEvent({ + type: "at.overlap.nimbus.user-added", + source: "nimbus.overlap.at", + correlationid: command.correlationid, + subject: `/users/${userState._id}`, + data: userState, +}); ``` -Observers can subscribe to subject patterns: +The `createEvent()` helper automatically generates default values for: -```typescript -// Subscribe to all recipes -eventStore.observe({ - subjects: ['/recipes/*'], - handler: async (event) => { /* handle event */ }, -}); - -// Subscribe to multiple patterns -eventStore.observe({ - subjects: ['/recipes/*', '/ingredients/*'], - handler: async (event) => { /* handle event */ }, -}); -``` +- `id` - A unique ULID +- `correlationid` - A unique ULID (if not provided) +- `time` - Current ISO timestamp +- `specversion` - Always `'1.0'` +- `datacontenttype` - Defaults to `'application/json'` ## Best Practices @@ -237,38 +126,27 @@ Event names should describe what happened, not what should happen: ```typescript // ✅ Good - Past tense -RecipeAddedEvent -UserRegisteredEvent -OrderShippedEvent +UserAddedEvent; +OrderShippedEvent; +PaymentProcessedEvent; // ❌ Bad - Imperative -AddRecipeEvent -RegisterUserEvent -ShipOrderEvent -``` - -### Keep Events Immutable - -Events should never be modified after creation: - -```typescript -// ✅ Good - Create new event -const updatedEvent = { ...originalEvent, data: newData }; - -// ❌ Bad - Mutate existing event -originalEvent.data = newData; +AddUserEvent; +ShipOrderEvent; +ProcessPaymentEvent; ``` -### Include Correlation IDs +### Propagate Correlation IDs -Maintain correlation IDs across commands and events for tracing: +Always pass correlation IDs from commands to events for tracing: ```typescript -const event: RecipeAddedEvent = { - // ... - correlationid: command.correlationid, // Inherit from command - // ... -}; +const event = createEvent({ + type: USER_ADDED_EVENT_TYPE, + source: "nimbus.overlap.at", + correlationid: command.correlationid, // Always propagate + data: state, +}); ``` ### Use Meaningful Subjects @@ -277,75 +155,10 @@ Subjects should be hierarchical and meaningful: ```typescript // ✅ Good - Hierarchical and clear -`/recipes/${slug}` -`/users/${userId}/orders/${orderId}` - -// ❌ Bad - Flat and unclear -`recipe-${slug}` -`order_${orderId}` -``` - -### Version Event Schemas - -Include version information in event types for schema evolution: - -```typescript -export const RecipeAddedEventType = 'at.overlap.nimbus.recipe-added.v1' as const; - -// Later, when schema changes -export const RecipeAddedEventTypeV2 = 'at.overlap.nimbus.recipe-added.v2' as const; -``` - -### Handle Event Ordering - -Be aware that events may arrive out of order in distributed systems. Use event IDs and timestamps when ordering matters. - -## Read Model Updates - -Events are commonly used to update read models in CQRS systems: - -```typescript -// Update read model based on events -eventStore.observe({ - subjects: ['/recipes/*'], - handler: async (event) => { - switch (event.type) { - case 'at.overlap.nimbus.recipe-added': - await recipeReadModel.insertOne(event.data); - break; - case 'at.overlap.nimbus.recipe-updated': - await recipeReadModel.updateOne( - { slug: event.data.slug }, - { $set: event.data }, - ); - break; - case 'at.overlap.nimbus.recipe-deleted': - await recipeReadModel.deleteOne({ slug: event.data.slug }); - break; - } - }, -}); -``` - -## Event Replay - -Event sourcing enables replaying events to rebuild state: - -```typescript -// Replay all events for a subject -const events = await eventStore.readEvents(subject); -const currentState = events.reduce(recipeReducer, null); - -// Replay events up to a specific point in time -const events = await eventStore.readEvents(subject, { - untilTime: '2024-01-01T00:00:00Z', -}); -const pastState = events.reduce(recipeReducer, null); +`/users/${userId}``/users/${userId}/orders/${orderId}``/organizations/${orgId}/members/${memberId}` // ❌ Bad - Flat and unclear +`user-${userId}``order_${orderId}`; ``` -## Related Patterns +## Publish & Subscribe Events -- [Commands](/guide/core/commands) - Write operations that emit events -- [Queries](/guide/core/queries) - Read operations -- [Event Sourcing](/guide/eventsourcing/) - Using events as source of truth -- [CQRS](/guide/what-is-nimbus#cqrs-event-sourcing) - Separating reads and writes +Events are published and subscribed to using the [EventBus](/guide/core/event-bus). See the EventBus documentation for details on publishing and subscribing to events. diff --git a/docs/guide/core/exceptions.md b/docs/guide/core/exceptions.md index beae563..6b62cc4 100644 --- a/docs/guide/core/exceptions.md +++ b/docs/guide/core/exceptions.md @@ -1,18 +1,37 @@ +--- +prev: + text: "Logging" + link: "/guide/core/logging" + +next: false +--- + # Exceptions -Nimbus defines a set of exceptions that you can use to handle errors in your application. These exceptions are used to communicate errors of a certain type. +Nimbus provides a set of structured exceptions for handling errors in your application. These exceptions +have and optional status code and can include additional details for debugging. + +## Status Codes -## Examples +The basic Exception class has an optional status code that can be set when creating the exceptions and you can assign any number as a value. -You can optionally pass a message and a details object to provide further information. +However, Nimbus comes with some built-in exceptions that use the related HTTP status codes. As HTTP status codes are standardized and well-known we thought it would be a good idea to use them even though the Exceptions itself are transport agnostic. -All Exceptions have a `fromError()` method to convert a standard JavaScript error into a Nimbus exception. This takes care to keep the original error message and stack trace. +## Built-in Exception Types -For the `InvalidInputException` you can use the `fromZodError()` method to convert a Zod error into a Nimbus exception. This will keep the original error message and stack trace and also keeps the validation details. +| Exception | Status Code | Use Case | +| ----------------------- | ----------- | ------------------------------------------- | +| `GenericException` | 500 | Internal server errors, unexpected failures | +| `InvalidInputException` | 400 | Validation errors, malformed requests | +| `NotFoundException` | 404 | Resource not found | +| `UnauthorizedException` | 401 | Authentication required or failed | +| `ForbiddenException` | 403 | Authorization failed, access denied | -::: code-group +## Basic Usage -```typescript [Basics] +All exceptions accept an optional message and details object: + +```typescript import { ForbiddenException, GenericException, @@ -21,75 +40,148 @@ import { UnauthorizedException, } from "@nimbus/core"; -// Status code 500 +// Generic server error (500) throw new GenericException("Something went wrong"); -// Status code 400 -throw new InvalidInputException("The input is invalid", { foo: "bar" }); +// Invalid input with details (400) +throw new InvalidInputException("The input is invalid", { + field: "email", + reason: "Invalid email format", +}); -// Status code 401 +// Unauthorized (401) throw new UnauthorizedException(); -// Status code 403 +// Forbidden (403) throw new ForbiddenException(); -// Status code 404 -throw new NotFoundException("Account not found", { - errorCode: "ACCOUNT_NOT_FOUND", - reason: "The account with the provided id was not found", +// Not found with details (404) +throw new NotFoundException("User not found", { + errorCode: "USER_NOT_FOUND", + userId: "12345", }); ``` -```typescript [.fromError] -import { GenericException } from "@nimbus/core"; +## Converting from Standard Errors -const someError = new Error("Something went wrong"); +Use `fromError()` to convert a standard JavaScript error while preserving the stack trace: -const exception = new GenericException(); -exception.fromError(someError); +```typescript +import { GenericException } from "@nimbus/core"; -throw exception; +try { + await someExternalService.call(); +} catch (error) { + const exception = new GenericException(); + exception.fromError(error); + throw exception; +} ``` -```typescript [.fromZodError] +## Converting from Zod Errors + +If you need to manually handle Zod validation: + +```typescript import { InvalidInputException } from "@nimbus/core"; import { z } from "zod"; -const MyZodType = z.object({ - sub: z.string(), - groups: z.array(z.string()), +const UserSchema = z.object({ + email: z.email(), + name: z.string().min(1), }); try { - MyZodType.parse({ sub: 123, groups: ["bar"] }); + UserSchema.parse({ email: "invalid", name: "" }); } catch (error) { const exception = new InvalidInputException(); exception.fromZodError(error); - throw exception; } ``` -::: - -## Create a new exception +## Creating Custom Exceptions -In case you need to add other types of exceptions you can simply create a new exception by extending the `BaseException` class. +Create custom exceptions by extending the base `Exception` class: ```typescript import { Exception } from "@nimbus/core"; -export class MySpecialException extends Exception { +export class RateLimitException extends Exception { constructor(message?: string, details?: Record) { super( - "MY_SPECIAL_EXCEPTION", // The exception name - message ?? "Something Special", // provided message or fallback - details, // pass the provided details - 500 // the status code + "RATE_LIMIT_EXCEEDED", + message ?? "Rate limit exceeded", + details, + 429 // Too Many Requests ); } } // Usage -throw new MySpecialException("Something went wrong", { foo: "bar" }); +throw new RateLimitException("Too many requests", { + retryAfter: 60, + limit: 100, +}); +``` + +## HTTP Integration + +When using the `@nimbus/hono` package, exceptions are automatically converted to HTTP responses: + +```typescript +import { onError } from "@nimbus/hono"; +import { Hono } from "hono"; + +const app = new Hono(); + +// Configure error handler +app.onError(onError); + +// Exceptions thrown in routes are converted to JSON responses +app.get("/users/:id", async (c) => { + throw new NotFoundException("User not found", { + userId: c.req.param("id"), + }); + // Returns: { "error": "NOT_FOUND", "message": "User not found", "details": { "userId": "123" } } + // Status: 404 +}); +``` + +## Best Practices + +### Use Specific Exceptions + +Choose the most specific exception type for the situation: + +```typescript +// ✅ Good - Specific exception +throw new NotFoundException("Order not found"); + +// ❌ Bad - Generic exception for known error +throw new GenericException("Order not found"); +``` + +### Include Helpful Details + +Add details that help with debugging: + +```typescript +throw new InvalidInputException("Invalid order data", { + errorCode: "INVALID_ORDER", + field: "quantity", + value: -5, + constraint: "must be positive", +}); +``` + +### Use Error Codes + +Include machine- and human-readable error codes for client handling: + +```typescript +throw new NotFoundException("User not found", { + errorCode: "USER_NOT_FOUND", // Clients can check this AND translate it to a human-readable error message in multiple languages + userId: id, +}); ``` diff --git a/docs/guide/core/index.md b/docs/guide/core/index.md index 9eaa40e..9ec8846 100644 --- a/docs/guide/core/index.md +++ b/docs/guide/core/index.md @@ -4,16 +4,37 @@ prev: link: "/guide/project-structure" next: - text: "Commands" - link: "/guide/core/commands" + text: "Observability" + link: "/guide/core/observability" --- # Nimbus Core Package -The core package is the foundation of the entire framework. It provides the basic building blocks for all other packages to build upon. +The core package is the foundation of the entire framework. It provides the essential building blocks for building event-driven applications following the CloudEvents specification. [https://jsr.io/@nimbus/core](https://jsr.io/@nimbus/core) +::: info Example Application +The examples throughout the core documentation reference the hono-demo application. + +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) +::: + +## What's Included + +The core package provides: + +- **[Observability](/guide/core/observability)** - Built-in logging, tracing, and metrics using OpenTelemetry +- **[Commands](/guide/core/commands)** - Write operations following the CloudEvents specification +- **[Queries](/guide/core/queries)** - Read operations for fetching data +- **[Events](/guide/core/events)** - Domain events for reactive architectures +- **[Router](/guide/core/router)** - Message routing with validation and tracing +- **[Event Bus](/guide/core/event-bus)** - Publish/subscribe for in-process events +- **[Exceptions](/guide/core/exceptions)** - Structured error handling with HTTP status codes +- **[Logging](/guide/core/logging)** - Structured logging with configurable formatters + +## Installation + ### Deno ```bash diff --git a/docs/guide/core/logging.md b/docs/guide/core/logging.md index a0bdbd3..525d95c 100644 --- a/docs/guide/core/logging.md +++ b/docs/guide/core/logging.md @@ -1,122 +1,196 @@ -# Logging - -Nimbus provides a very simple logger that enables you to log messages for different severity levels to the console. - -It is basically a wrapper around the `console` object and the `console.debug()`, `console.info()`, `console.warn()`, `console.error()` and `console.critical()` methods. - -It helps to have consistent logs with important meta information (timestamp, log level,category, error stack traces, etc) across your application. - -No other transports or sinks are supported. As we want to keep the core as lightweight as possible and encourage the use of tools like [OpenTelemetry](https://opentelemetry.io/) to transport logs for monitoring and tracing. +--- +prev: + text: "Event Bus" + link: "/guide/core/event-bus" -As [Deno supports OpenTelemetry](https://docs.deno.com/runtime/fundamentals/open_telemetry/) out of the box, you can easily transport logs to any other monitoring system without the need to change the code of the application. +next: + text: "Exceptions" + link: "/guide/core/exceptions" +--- -## Log Levels - -Nimbus supports the following log levels for logging messages. - -- `debug` - Outputs a `console.debug()` -- `info` - Outputs a `console.info()` -- `warn` - Outputs a `console.warn()` -- `error` - Outputs a `console.error()` -- `critical` - Outputs a `console.error()` - -Also `silent` can be used in the setup to completely disable log output. +# Logging -## Setup +Nimbus provides a structured logger that outputs consistent, formatted log messages to the console. The logger integrates with Deno's native OpenTelemetry support for automatic log export to observability backends. -Nimbus provides a simple function to setup the logger. You can pass in the log level and the formatter you want to use. +::: info Example Application +The examples on this page reference the hono-demo application. -The `prettyLogFormatter` is recommended for development environments only. In production you should use the `jsonLogFormatter`. +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) +::: -For the pretty formatter the `useConsoleColors` option can be used to enable colors in the console output. +## Setup and Configuration -::: code-group +Configure the logger at application startup using `setupLogger()`: -```typescript [main.ts] +```typescript import { jsonLogFormatter, parseLogLevel, prettyLogFormatter, setupLogger, } from "@nimbus/core"; +import process from "node:process"; setupLogger({ logLevel: parseLogLevel(process.env.LOG_LEVEL), formatter: - process.env.NODE_ENV === "development" + process.env.LOG_FORMAT === "pretty" ? prettyLogFormatter : jsonLogFormatter, - useConsoleColors: process.env.NODE_ENV === "development", + useConsoleColors: process.env.LOG_FORMAT === "pretty", }); ``` -::: +### Configuration Options + +| Option | Type | Default | Description | +| ------------------ | -------------- | ------------------ | -------------------------------------------- | +| `logLevel` | `LogLevel` | `'silent'` | Minimum level to output | +| `formatter` | `LogFormatter` | `jsonLogFormatter` | Function to format log records | +| `useConsoleColors` | `boolean` | `false` | Enable colored output (for pretty formatter) | + +## Log Levels + +Nimbus supports the following log levels in order of severity: -## Usage +| Level | Method | Description | +| ---------- | ----------------- | -------------------------------------------- | +| `debug` | `console.debug()` | Detailed debugging information | +| `info` | `console.info()` | General information about application flow | +| `warn` | `console.warn()` | Warning conditions that should be reviewed | +| `error` | `console.error()` | Error conditions that need attention | +| `critical` | `console.error()` | Critical failures requiring immediate action | +| `silent` | _(none)_ | Disables all log output | -The logger can be accessed via the `getLogger` function. -The logger is a singleton and will return the same instance every time it is called. +Messages below the configured log level are silently ignored. -To create a new log you can use the `info`, `warn`, `error` or `critical` methods depending on the severity of the message. +### Parsing Log Levels -The log input is an object that can contain the following properties: +Use `parseLogLevel()` to safely parse environment variables: -- `message` - The message to log. -- `correlationId` - An optional correlation ID to keep track of commands, queries, and events that are related to each other. -- `category` - An optional category of the log, useful for grouping logs together. -- `data` - Optional additional data to log, can be an object with any properties. -- `error` - Optional error object to log. +```typescript +import { parseLogLevel } from "@nimbus/core"; + +// Returns 'info' if LOG_LEVEL is 'info', otherwise returns default 'silent' +const level = parseLogLevel(process.env.LOG_LEVEL); +``` -The error object is specified as a dedicated property and not as part of the `data` object to make sure all error properties and the stack trace are preserved and logged correctly. +## Basic Usage -::: code-group +Access the logger using `getLogger()`: -```typescript [logExample.ts] +```typescript import { getLogger } from "@nimbus/core"; const logger = getLogger(); logger.debug({ - message: "Hello World!", - correlationId: "1234567890", - data: { foo: "bar" }, + message: "Processing request", + category: "API", + data: { method: "POST", path: "/users" }, + correlationId: "550e8400-e29b-41d4-a716-446655440000", }); -logger.info({ message: "Hello World!" }); +logger.info({ + message: "User created successfully", + category: "Users", + data: { userId: "12345" }, +}); logger.warn({ - category: "MyCategory", - message: "Ding Dong!", + message: "Rate limit approaching", + category: "API", + data: { currentRate: 95, maxRate: 100 }, }); logger.error({ - message: "Ohh no!", - error: new Error("Something went wrong!"), + message: "Failed to process payment", + category: "Payments", + error: new Error("Payment gateway timeout"), + correlationId: "550e8400-e29b-41d4-a716-446655440000", }); logger.critical({ - category: "MyCategory", - message: "It is over, run!", - error: new Error("Something is burning!"), - data: { - accountId: "1234567890", - foo: "bar", - }, + message: "Database connection lost", + category: "Database", + error: new Error("Connection refused"), }); ``` -::: +## Log Input + +The log input object can contain the following properties: + +| Property | Type | Description | +| --------------- | ------------------------- | ------------------------------------------------------------- | +| `message` | `string` | **Required.** The log message | +| `category` | `string` | Optional category for grouping logs (defaults to `'Default'`) | +| `data` | `Record` | Optional structured data to include | +| `error` | `Error` | Optional error with stack trace | +| `correlationId` | `string` | Optional ID for tracing related operations | -## Nimbus Logs +## Formatters -As the various Nimbus features have implemented log statements as well it uses the same logger provided by the `getLogger()` function. +Nimbus provides two built-in formatters: -Therefore all log statements from Nimbus will respect the log level and formatter you have configured for the application. +### JSON Formatter (Production) -In case you do not configure the logger in your application the Nimbus logs will use the default settings. +Outputs structured JSON for easy parsing by log aggregation tools: + +```typescript +import { jsonLogFormatter, setupLogger } from "@nimbus/core"; + +setupLogger({ + logLevel: "info", + formatter: jsonLogFormatter, +}); + +// Output: +// {"timestamp":"2025-01-22T10:00:00.000Z","level":"info","category":"Users","message":"User created","data":{"userId":"123"}} +``` + +### Pretty Formatter (Development) + +Outputs human-readable colored logs for development: + +```typescript +import { prettyLogFormatter, setupLogger, getLogger } from "@nimbus/core"; + +setupLogger({ + logLevel: "debug", + formatter: prettyLogFormatter, + useConsoleColors: true, +}); + +getLogger().debug({ + message: "My message", + category: "Category", + data: { userId: "12345" }, +}); + +// Outputs: +// [Category] DEBUG :: My message +// { +// userId: '12345' +// } +``` + +## OpenTelemetry Integration + +When combined with Deno's native OpenTelemetry support, logs are automatically exported alongside traces and metrics. See the [Observability](/guide/core/observability) documentation for details on enabling OTEL export. + +```bash +export OTEL_DENO=true +export OTEL_EXPORTER_OTLP_ENDPOINT="https://your-otlp-endpoint.com/otlp" +export OTEL_SERVICE_NAME=your-service-name + +deno run src/main.ts +``` ## Default Settings +If `setupLogger()` is not called, the logger uses these defaults: + ```typescript const defaultSettings = { logLevel: "silent", @@ -124,3 +198,61 @@ const defaultSettings = { useConsoleColors: false, }; ``` + +This means logs are silent by default - you must explicitly configure the logger to see output. + +## Nimbus Internal Logs + +All Nimbus components (Router, EventBus, etc.) use the same logger configured via `setupLogger()`. This ensures consistent log formatting and level filtering across your application. + +## Best Practices + +### Use Categories + +Group related logs with consistent category names: + +```typescript +logger.info({ message: "Query executed", category: "Database" }); +logger.info({ message: "Request received", category: "API" }); +logger.info({ message: "Email sent", category: "Notifications" }); +``` + +### Include Correlation IDs + +Always include correlation IDs when available for distributed tracing: + +```typescript +logger.info({ + message: "Processing order", + category: "Orders", + data: { orderId: order.id }, + correlationId: command.correlationid, +}); +``` + +### Log Errors Properly + +Use the dedicated `error` property for errors to preserve stack traces: + +```typescript +// ✅ Good - Error is properly captured +logger.error({ + message: "Failed to save user", + error: error, + correlationId: command.correlationid, +}); + +// ❌ Bad - Stack trace is lost +logger.error({ + message: "Failed to save user", + data: { error: error.message }, +}); +``` + +### Use Appropriate Log Levels + +- `debug`: Detailed info for debugging (disabled in production) +- `info`: Normal application flow +- `warn`: Unexpected but recoverable situations +- `error`: Errors that need investigation +- `critical`: Failures requiring immediate action diff --git a/docs/guide/core/queries.md b/docs/guide/core/queries.md index 57f3d67..ba0318e 100644 --- a/docs/guide/core/queries.md +++ b/docs/guide/core/queries.md @@ -1,304 +1,106 @@ +--- +prev: + text: "Commands" + link: "/guide/core/commands" + +next: + text: "Events" + link: "/guide/core/events" +--- + # Queries Queries represent read operations - requests for information without changing application state. -Queries follow the Query pattern from CQRS (Command Query Responsibility Segregation), where reads are separated from writes to allow independent optimization and scaling. +Queries also fit perfectly into the CQRS pattern (Command Query Responsibility Segregation), where reads and writes are separated for better scalability and maintainability. But keep it simple for your use case and needs. CQRS in an option, but not required. ::: info Example Application -The examples on this page reference the Gustav application. +The examples on this page reference the hono-demo application. -You can find the full example on GitHub: [Gustav Recipe App](https://github.com/overlap-dev/Nimbus/tree/main/examples/gustav) +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) ::: ## Key Characteristics -- **Read Operations**: Queries fetch data without modifying state -- **Idempotent**: Multiple executions return the same result (if data hasn't changed) -- **Optimized for Reading**: Can use specialized read models or databases -- **Type-Safe**: Full TypeScript type safety for query parameters and results +- **Read Operations**: Queries fetch data without modifying state +- **Idempotent**: Multiple executions return the same result (if data hasn't changed) +- **Type-Safe**: Queries are fully typed and validated using Zod +- **Optimized for Reading**: Can use specialized read models or databases ## Query Structure A query in Nimbus follows the CloudEvents specification and consists of: ```typescript -export type Query = { - specversion: '1.0'; +type Query = { + specversion: "1.0"; id: string; correlationid: string; time: string; source: string; type: string; - data: T; - datacontenttype: string; -}; -``` - -## Example: Get Recipe Query - -### Define the Query Type - -Create a query type definition in the core layer: - -```typescript -// core/queries/getRecipe.ts -import { Query } from '@nimbus/core'; -import { Recipe } from '../domain/recipe.ts'; - -export const GetRecipeQueryType = 'at.overlap.nimbus.get-recipe' as const; - -export type GetRecipeParams = { - slug: string; -}; - -export type GetRecipeQuery = Query & { - type: typeof GetRecipeQueryType; + data: TData; + datacontenttype?: string; + dataschema?: string; }; ``` -### Implement Core Logic with Port - -The core defines the query logic and uses a port (interface) for data access: +| Property | Description | +| ----------------- | -------------------------------------------------------------------------------- | +| `specversion` | The CloudEvents specification version (always `'1.0'`) | +| `id` | A globally unique identifier for the query | +| `correlationid` | A unique identifier to correlate this query with related messages | +| `time` | ISO 8601 timestamp when the query was created | +| `source` | A URI reference identifying the system creating the query | +| `type` | The query type following CloudEvents naming (e.g., `at.overlap.nimbus.get-user`) | +| `data` | The query parameters (e.g., filters, pagination) | +| `datacontenttype` | Optional MIME type of the data (defaults to `application/json`) | +| `dataschema` | Optional URL to the schema the data adheres to | -```typescript -// core/queries/getRecipe.ts -import { RecipeRepository } from '../ports/recipeRepository.ts'; +## Query Schema -export const getRecipe = async ( - query: GetRecipeQuery, - repository: RecipeRepository, -): Promise => { - return await repository.getBySlug(query.data.slug); -}; -``` - -### Define the Port - -The port is an interface that defines the contract for data access: +Nimbus provides a base Zod schema for validating queries: ```typescript -// core/ports/recipeRepository.ts -export interface RecipeRepository { - getBySlug(slug: string): Promise; - list(): Promise; -} -``` - -### Implement Shell Handler - -The handler provides the repository implementation and calls the core: - -```typescript -// infrastructure/http/handler/getRecipe.handler.ts -import { MessageHandler } from '@nimbus/core'; -import { Recipe } from '../../../core/domain/recipe.ts'; -import { getRecipe, GetRecipeQuery } from '../../../core/queries/getRecipe.ts'; -import { recipeMemoryRepository } from '../../repository/recipeMemoryRepository.ts'; - -export const getRecipeHandler: MessageHandler = - async (query) => { - const recipe = await getRecipe(query, recipeMemoryRepository); - return recipe; - }; -``` - -### Implement the Adapter - -The adapter provides the actual implementation of the repository port: - -```typescript -// infrastructure/repository/recipeMemoryRepository.ts -import { NotFoundException } from '@nimbus/core'; -import { Recipe } from '../../core/domain/recipe.ts'; -import { RecipeRepository } from '../../core/ports/recipeRepository.ts'; - -const recipes = new Map(); - -export const recipeMemoryRepository: RecipeRepository = { - async getBySlug(slug: string): Promise { - const recipe = recipes.get(slug); - if (!recipe) { - throw new NotFoundException('Recipe not found', { - errorCode: 'RECIPE_NOT_FOUND', - }); - } - return recipe; - }, +import { querySchema } from "@nimbus/core"; +import { z } from "zod"; + +// Extend the base schema with your specific query type and data +const getUserQuerySchema = querySchema.extend({ + type: z.literal("at.overlap.nimbus.get-user"), + data: z.object({ + id: z.string().length(24), + }), +}); - async list(): Promise { - return Array.from(recipes.values()); - }, -}; +type GetUserQuery = z.infer; ``` -## Architecture Pattern - -Queries follow the Pure Core - Imperative Shell pattern with ports and adapters: - -1. **Core Layer**: - - Define query types - - Define ports (interfaces) for data access - - Implement query logic that uses ports - - Apply business rules for data filtering/transformation - -2. **Infrastructure Layer** (Shell): - - Implement adapters that fulfill port contracts - - Define message handlers - - Connect handlers to adapters - - Handle errors and responses - -## Read Models - -In CQRS systems, queries often read from optimized read models rather than the event store: - -```typescript -// infrastructure/readModel/recipeReadModel.ts -import { MongoCollection } from '@nimbus/mongodb'; -import { Recipe } from '../../core/domain/recipe.ts'; - -export const recipeReadModel = new MongoCollection('recipes'); - -// Update read model when events occur -export const updateRecipeReadModel = async (event: RecipeAddedEvent) => { - await recipeReadModel.insertOne(event.data); -}; -``` +## Create Queries -The read model is kept in sync by subscribing to domain events: +You can create queries using the `createQuery()` helper: ```typescript -// Event handler updates the read model -eventStore.observe({ - subjects: ['/recipes/*'], - handler: async (event) => { - switch (event.type) { - case 'at.overlap.nimbus.recipe-added': - await recipeReadModel.insertOne(event.data); - break; - case 'at.overlap.nimbus.recipe-updated': - await recipeReadModel.updateOne( - { slug: event.data.slug }, - event.data, - ); - break; - case 'at.overlap.nimbus.recipe-deleted': - await recipeReadModel.deleteOne({ slug: event.data.slug }); - break; - } +import { createQuery } from "@nimbus/core"; +import { GetUserQuery } from "./getUser.query.ts"; + +const query = createQuery({ + type: "at.overlap.nimbus.get-user", + source: "nimbus.overlap.at", + data: { + id: "123", }, }); ``` -## Best Practices - -### Keep Queries Simple - -Queries should focus on data retrieval with minimal business logic: - -```typescript -// ✅ Good - Simple data retrieval -export const getRecipe = async ( - query: GetRecipeQuery, - repository: RecipeRepository, -): Promise => { - return await repository.getBySlug(query.data.slug); -}; - -// ⚠️ Consider - Business logic might belong in query -export const getRecipe = async ( - query: GetRecipeQuery, - repository: RecipeRepository, - authContext: AuthContext, -): Promise => { - const recipe = await repository.getBySlug(query.data.slug); - - // Filter sensitive data based on permissions - if (!authContext.hasRole('admin')) { - delete recipe.internalNotes; - } - - return recipe; -}; -``` - -### Use Pagination for Lists +The `createQuery()` helper automatically generates default values for: -Always paginate list queries to prevent performance issues: - -```typescript -export type ListRecipesParams = { - page?: number; - pageSize?: number; - category?: string; -}; - -export type ListRecipesQuery = Query & { - type: typeof ListRecipesQueryType; -}; - -export const listRecipes = async ( - query: ListRecipesQuery, - repository: RecipeRepository, -): Promise<{ recipes: Recipe[]; total: number }> => { - const { page = 1, pageSize = 20, category } = query.data; - return await repository.list({ page, pageSize, category }); -}; -``` - -### Optimize Read Models - -Read models should be denormalized and optimized for specific query patterns: - -```typescript -// Denormalized read model for recipe list view -export type RecipeListItem = { - slug: string; - title: string; - category: string; - cookingTime: number; - difficulty: string; - thumbnailUrl: string; - // No full ingredient list or instructions -}; - -// Separate detailed read model for single recipe view -export type RecipeDetail = Recipe & { - relatedRecipes: string[]; - authorInfo: AuthorInfo; -}; -``` - -### Cache Frequently Accessed Data - -Consider caching for queries that are called frequently: - -```typescript -const recipeCache = new Map(); -const CACHE_TTL = 5 * 60 * 1000; // 5 minutes - -export const getRecipe = async ( - query: GetRecipeQuery, - repository: RecipeRepository, -): Promise => { - const cached = recipeCache.get(query.data.slug); - if (cached) return cached; - - const recipe = await repository.getBySlug(query.data.slug); - recipeCache.set(query.data.slug, recipe); - - setTimeout(() => recipeCache.delete(query.data.slug), CACHE_TTL); - - return recipe; -}; -``` +- `id` - A unique ULID +- `correlationid` - A unique ULID (if not provided) +- `time` - Current ISO timestamp +- `specversion` - Always `'1.0'` +- `datacontenttype` - Defaults to `'application/json'` ## Routing Queries -Queries are routed to handlers using the message router. See the [HTTP Guide](/guide/http/) for more details on routing queries through HTTP endpoints. - -## Related Patterns - -- [Commands](/guide/core/commands) - Write operations -- [Events](/guide/core/events) - Domain events -- [Event Sourcing](/guide/eventsourcing/) - Event-based state management -- [CQRS](/guide/what-is-nimbus#cqrs-event-sourcing) - Separating reads and writes +Queries are routed to handlers using the [MessageRouter](/guide/core/router). See the Router documentation for details on registering handlers and routing messages. diff --git a/docs/guide/core/router.md b/docs/guide/core/router.md index 18e1a93..de37650 100644 --- a/docs/guide/core/router.md +++ b/docs/guide/core/router.md @@ -1,79 +1,171 @@ -# Router +--- +prev: + text: "Events" + link: "/guide/core/events" -The Nimbus router is responsible to take any input and route it to the correct handler. It is the entry point for all incoming messages. +next: + text: "Event Bus" + link: "/guide/core/event-bus" +--- -## Example +# Router -In this example we create a router for the `addAccount` Command, the `getAccount` Query and the `accountAdded` Event from the previous examples. +The MessageRouter is responsible for routing incoming messages (commands, queries, and events) to their registered handlers. It provides automatic validation, type safety, and observability for all routed messages. -```typescript -import { createRouter } from "@nimbus/core"; +::: info Example Application +The examples on this page reference the hono-demo application. -import { getAccountHandler } from "./queries/getAccount.handler.ts"; -import { GetAccountQuery } from "../core/queries/getAccount.ts"; +You can find the full example on GitHub: [hono-demo](https://github.com/overlap-dev/Nimbus/tree/main/examples/hono-demo) +::: -import { addAccountHandler } from "./commands/addAccount.handler.ts"; -import { AddAccountCommand } from "../core/command/addAccount.ts"; +## Setup and Configuration -import { accountAddedHandler } from "./events/accountAdded.handler.ts"; -import { AccountAddedEvent } from "../core/events/accountAdded.ts"; +Configure the router at application startup using `setupRouter()`, then retrieve it anywhere using `getRouter()`. -const accountRouter = createRouter({ - handlerMap: { - GET_ACCOUNT: { - handler: getAccountHandler, - inputType: GetAccountQuery, - }, - ADD_ACCOUNT: { - handler: addAccountHandler, - inputType: AddAccountCommand, - }, - ACCOUNT_ADDED: { - handler: accountAddedHandler, - inputType: AccountAddedEvent, - }, - }, -}); +```typescript +import { getLogger, setupRouter } from "@nimbus/core"; -// Will result in a successful response -const result = await accountRouter({ - name: "GET_ACCOUNT", - params: { - id: "67580951d5260d05eaa7f913", +setupRouter("MyRouter", { + logInput: (input) => { + getLogger().debug({ + category: "MyRouter", + message: "Received input", + data: { input }, + ...(input?.correlationid + ? { correlationId: input.correlationid } + : {}), + }); }, - metadata: { - correlationId: "123", - authContext: { - sub: "admin@host.tld", - }, + logOutput: (output) => { + getLogger().debug({ + category: "MyRouter", + message: "Output", + data: { output }, + ...(output?.correlationid + ? { correlationId: output.correlationid } + : {}), + }); }, }); +``` + +### Configuration Options -// Will throw an InvalidInputException as the id parameter is missing -const result = await accountRouter({ - name: "UNKNOWN_QUERY", - params: {}, - metadata: { - correlationId: "123", - authContext: { - sub: "admin@host.tld", +| Option | Type | Description | +| ----------- | ----------------------- | ---------------------------------------------------- | +| `logInput` | `(input: any) => void` | Optional callback invoked when a message is received | +| `logOutput` | `(output: any) => void` | Optional callback invoked after successful handling | + +## Registering Handlers + +Register handlers for message types using the `register()` method: + +```typescript +import { getRouter } from "@nimbus/core"; + +export const registerUserMessages = () => { + const router = getRouter("MyRouter"); + + // Register a command + router.register( + "at.overlap.nimbus.add-user", + async (command: AddUserCommand) => { + // Process command and return result }, - }, -}); + addUserCommandSchema + ); -// Will throw an NotFoundException as no route for UNKNOWN_QUERY is defined. -const result = await accountRouter({ - name: "UNKNOWN_QUERY", - params: {}, - metadata: { - correlationId: "123", - authContext: { - sub: "admin@host.tld", + // Register an event + router.register( + "at.overlap.nimbus.user-added", + async (event: UserAddedEvent) => { + // Process event and return result }, - }, + addUserCommandSchema + ); + + // Register a query + router.register( + "at.overlap.nimbus.get-user", + async (query: GetUserQuery) => { + // Process query and return result + }, + getUserQuerySchema + ); +}; +``` + +The `register()` method takes three arguments: + +| Argument | Description | +| ------------- | ------------------------------------------------------------------ | +| `messageType` | The CloudEvents type string (e.g., `'at.overlap.nimbus.add-user'`) | +| `handler` | An async function that processes the message and returns a result | +| `schema` | A Zod schema used to validate the incoming message | + +## Routing Messages + +Route messages to their handlers using the `route()` method: + +```typescript +import { createCommand, getRouter } from "@nimbus/core"; + +const command = createCommand({ + type: "at.overlap.nimbus.add-user", + source: "nimbus.overlap.at", + correlationid: httpRequestCorrelationId, + data: httpRequestBody, }); + +const router = getRouter("MyRouter"); + +const result = await router.route(command); ``` -## Type Safety +## Validation + +The router automatically validates incoming messages against their registered schemas: + +1. **Message Type Check**: Verifies the message has a `type` attribute +2. **Handler Lookup**: Finds the registered handler for the message type +3. **Schema Validation**: Validates the message against the Zod schema +4. **Handler Execution**: Passes the validated message to the handler + +If validation fails, an `InvalidInputException` is thrown with details about the validation errors: + +```typescript +{ + name: 'INVALID_INPUT', + message: 'The provided input is invalid', + statusCode: 400, + details: { + issues: [ + { path: ['data', 'email'], message: 'Invalid email' } + ] + } +} +``` + +## Error Handling + +The router throws appropriate exceptions for different error conditions: + +| Error | Exception | Description | +| ------------------ | ----------------------- | --------------------------------------------- | +| Missing type | `InvalidInputException` | The message has no `type` attribute | +| Unknown type | `NotFoundException` | No handler registered for the message type | +| Validation failure | `InvalidInputException` | The message failed schema validation | +| Handler error | _(propagated)_ | Errors from handlers are propagated unchanged | + +## Observability + +The router is fully instrumented with OpenTelemetry tracing and metrics. See the [Observability](/guide/core/observability) documentation for details. + +**Tracing**: + +- Automatic spans for every routed message + +**Metrics**: -The router will validate the input against the input type defined in the handler map and will throw an `InvalidInputException` if the input is invalid. This ensures that the handler function will always receive the correct type checked input. +- `router_messages_routed_total` counter +- `router_routing_duration_seconds` histogram diff --git a/docs/guide/eventsourcing/aggregate.md b/docs/guide/eventsourcing/aggregate.md deleted file mode 100644 index 80890aa..0000000 --- a/docs/guide/eventsourcing/aggregate.md +++ /dev/null @@ -1 +0,0 @@ -# Aggregate diff --git a/docs/guide/eventsourcing/event-store.md b/docs/guide/eventsourcing/event-store.md deleted file mode 100644 index 528759a..0000000 --- a/docs/guide/eventsourcing/event-store.md +++ /dev/null @@ -1 +0,0 @@ -# Event Store diff --git a/docs/guide/eventsourcing/index.md b/docs/guide/eventsourcing/index.md deleted file mode 100644 index 5e7658e..0000000 --- a/docs/guide/eventsourcing/index.md +++ /dev/null @@ -1,24 +0,0 @@ -# Event Sourcing - -The eventsourcing package provides a simple set of abstractions for working with event sourcing. -Like event store and aggregate utilities. - -[https://jsr.io/@nimbus/eventsourcing](https://jsr.io/@nimbus/eventsourcing) - -### Deno - -```bash -deno add jsr:@nimbus/eventsourcing -``` - -### NPM - -```bash -npx jsr add @nimbus/eventsourcing -``` - -### Bun - -```bash -bunx jsr add @nimbus/eventsourcing -``` diff --git a/docs/guide/eventsourcing/why-event-sourcing.md b/docs/guide/eventsourcing/why-event-sourcing.md deleted file mode 100644 index 760e091..0000000 --- a/docs/guide/eventsourcing/why-event-sourcing.md +++ /dev/null @@ -1,7 +0,0 @@ -# Why Event Sourcing? - -Data quality and completeness is the backbone of all data analytics and AI applications. - -All the hype about AI is cool but in reality it is not the latest hyped AI Model that is the most important. It is the data that a model can use to learn, predict, enhance, or recommend something. - -Event Sourcing is the most powerful pattern to achieve this. diff --git a/docs/guide/eventsourcingdb/event-store.md b/docs/guide/eventsourcingdb/event-store.md deleted file mode 100644 index 1c230b3..0000000 --- a/docs/guide/eventsourcingdb/event-store.md +++ /dev/null @@ -1 +0,0 @@ -# EventSourcingDB Event Store diff --git a/docs/guide/eventsourcingdb/index.md b/docs/guide/eventsourcingdb/index.md deleted file mode 100644 index 1b615e2..0000000 --- a/docs/guide/eventsourcingdb/index.md +++ /dev/null @@ -1,23 +0,0 @@ -# EventSourcingDB - -The eventsourcingdb package provides an adapter for the EventStore interface to work with [EventSourcingDB](https://eventsourcingdb.io/). - -[https://jsr.io/@nimbus/eventsourcingdb](https://jsr.io/@nimbus/eventsourcingdb) - -### Deno - -```bash -deno add jsr:@nimbus/eventsourcingdb -``` - -### NPM - -```bash -npx jsr add @nimbus/eventsourcingdb -``` - -### Bun - -```bash -bunx jsr add @nimbus/eventsourcingdb -``` diff --git a/docs/guide/observability.md b/docs/guide/observability.md new file mode 100644 index 0000000..066b854 --- /dev/null +++ b/docs/guide/observability.md @@ -0,0 +1,234 @@ +--- +prev: + text: "Core" + link: "/guide/core" + +next: + text: "Commands" + link: "/guide/core/commands" +--- + +# TODOS: + +- Get over documentation and simplify code examples. +- fromZodError deprecated? +- Overthink structure + - Observability in main section not under core +- work on documentation for other packages + +# Observability + +Observability is a first-class citizen in Nimbus. The framework is designed so that developers can focus on business logic without implementing logging, tracing, and metrics from the ground up. + +## Philosophy + +Nimbus follows the principle that observability should be built-in, not bolted-on. Every core component - from message routing to event handling - comes with automatic instrumentation. This means: + +- **Zero boilerplate** - Tracing spans and metrics are created automatically +- **Consistent structure** - All logs follow the same format across your application +- **Correlation built-in** - Every message carries a correlation ID for distributed tracing + +The three pillars of observability in Nimbus: + +1. **Logging** - Structured console output with configurable formatters +2. **Tracing** - Distributed traces via OpenTelemetry spans +3. **Metrics** - Counters and histograms for monitoring + +## OpenTelemetry Standards + +Nimbus uses the [OpenTelemetry API](https://opentelemetry.io/) (`@opentelemetry/api`) for all observability instrumentation. This provides: + +- **Vendor-agnostic** - Export to any OTLP-compatible backend (Jaeger, Zipkin, Grafana, Honeycomb, Datadog, etc.) +- **Industry standard** - Wide ecosystem support and community adoption +- **Future-proof** - Backed by CNCF with active development + +## Deno Native Observability + +Nimbus builds upon [Deno's native OpenTelemetry support](https://docs.deno.com/runtime/fundamentals/open_telemetry/). With Deno 2.x, you can enable OTEL export with zero additional dependencies. + +### Enabling OpenTelemetry + +Set environment variables to enable OTEL export: + +```bash +export OTEL_DENO=true +export OTEL_EXPORTER_OTLP_PROTOCOL="http/protobuf" +export OTEL_EXPORTER_OTLP_ENDPOINT="https://your-otlp-endpoint.com/otlp" +export OTEL_SERVICE_NAME=your-service-name +export OTEL_RESOURCE_ATTRIBUTES=deployment.environment=production + +deno run -A src/main.ts +``` + +All traces, metrics, and logs from Nimbus will automatically be exported to your configured backend. + +## Built-in Instrumentation + +### MessageRouter + +The [MessageRouter](/guide/core/router) automatically creates spans for every routed message: + +**Tracing:** + +- Span name: `router.route` +- Attributes: `messaging.system`, `messaging.router_name`, `messaging.destination`, `correlation_id` + +**Metrics:** + +- `router_messages_routed_total` - Counter for total messages routed (with `status: success|error`) +- `router_routing_duration_seconds` - Histogram of routing duration + +### EventBus + +The [NimbusEventBus](/guide/core/event-bus) instruments both publishing and handling: + +**Tracing:** + +- `eventbus.publish` span for event publishing +- `eventbus.handle` span for event handling with retry tracking + +**Metrics:** + +- `eventbus_events_published_total` - Counter for published events +- `eventbus_events_delivered_total` - Counter for delivered events (with `status: success|error`) +- `eventbus_event_handling_duration_seconds` - Histogram of handler execution time +- `eventbus_retry_attempts_total` - Counter for retry attempts +- `eventbus_event_size_bytes` - Histogram of event sizes + +### Logger + +The [Logger](/guide/core/logging) outputs structured logs to the console. When combined with Deno's OTEL support, logs are automatically exported alongside traces and metrics. + +## Custom Tracing with withSpan() + +For business logic that needs custom tracing, use the `withSpan()` higher-order function: + +```typescript +import { withSpan } from "@nimbus/core"; + +const fetchUser = withSpan( + { + name: "fetchUser", + attributes: { + "user.source": "database", + }, + }, + async (userId: string) => { + return await db.users.findById(userId); + } +); + +// Usage - automatically traced +const user = await fetchUser("123"); +``` + +### Adding Dynamic Attributes + +Access the span within your function to add attributes based on runtime data: + +```typescript +import { withSpan } from "@nimbus/core"; +import { Span } from "@opentelemetry/api"; + +const processOrder = withSpan( + { name: "processOrder" }, + async (orderId: string, span: Span) => { + const order = await db.orders.findById(orderId); + + // Add attributes based on the order + span.setAttribute("order.total", order.total); + span.setAttribute("order.items", order.items.length); + + return await processPayment(order); + } +); +``` + +### Options + +| Option | Type | Description | +| ------------ | ------------ | ------------------------------------------ | +| `name` | `string` | The span name displayed in your tracing UI | +| `tracerName` | `string` | Tracer name (defaults to `"nimbus"`) | +| `kind` | `SpanKind` | Span kind (defaults to `INTERNAL`) | +| `attributes` | `Attributes` | Initial attributes to set on the span | + +## Correlation IDs + +All messages in Nimbus (Commands, Queries, Events) carry a `correlationid` field. This enables: + +- **Request tracing** - Follow a request through commands, events, and queries +- **Log correlation** - Group related logs together +- **Distributed tracing** - Track requests across services + +The correlation ID is automatically: + +- Generated when creating messages with `createCommand()`, `createQuery()`, or `createEvent()` +- Propagated from commands to events they produce +- Included in log output when provided +- Added as a span attribute for tracing + +```typescript +// Correlation ID is passed from command to event +const command = createCommand({ + type: ADD_USER_COMMAND_TYPE, + source: "nimbus.overlap.at", + correlationid: getCorrelationId(c), // From HTTP request + data: body, +}); + +// In the handler, create event with same correlation ID +const event = createEvent({ + type: USER_ADDED_EVENT_TYPE, + source: "nimbus.overlap.at", + correlationid: command.correlationid, // Propagate + data: state, +}); +``` + +## Best Practices + +### Use Structured Logging + +Always use the structured logger instead of `console.log`: + +```typescript +import { getLogger } from "@nimbus/core"; + +// Good - structured and traceable +getLogger().info({ + message: "User created", + category: "Users", + data: { userId: user.id }, + correlationId: command.correlationid, +}); + +// Avoid - unstructured +console.log("User created:", user.id); +``` + +### Propagate Correlation IDs + +Always pass correlation IDs when creating events from commands: + +```typescript +const event = createEvent({ + type: USER_ADDED_EVENT_TYPE, + source: "nimbus.overlap.at", + correlationid: command.correlationid, // Always propagate + data: state, +}); +``` + +### Use withSpan for Important Operations + +Wrap critical business logic with `withSpan()` for visibility: + +```typescript +const validatePayment = withSpan( + { name: "validatePayment" }, + async (paymentDetails: PaymentDetails) => { + // Critical logic is now traced + } +); +``` diff --git a/docs/guide/project-structure.md b/docs/guide/project-structure.md deleted file mode 100644 index b02abe3..0000000 --- a/docs/guide/project-structure.md +++ /dev/null @@ -1,40 +0,0 @@ -# Project Structure - -Nimbus is not opinionated about the project structure and you can adjust it to your needs. But here is a suggestion on how to structure your project. - -Let's say we are building an application to track expenses so we can come up with the following structure based on a Domain-Driven-Design (DDD) approach. - -::: info Example Application -You can find the full example on GitHub [The Expense Repo](https://github.com/overlap-dev/Nimbus/tree/main/examples/the-expense) - -Check it out and run it with `deno task dev` -::: - -``` -/- - |- src - |- account - |- core - |- shell - |- auth - |- core - |- shell - |- another-domain - |- context-one - |- core - |- shell - |- context-two - |- core - |- shell - |- shared - |- ... - |- main.ts - |- .gitignore - |- deno.json - |- deno.lock - |- README.md -``` - -At first we want to separate the different domains and contexts of our problem. So we create a directory for each domain like `account` and `auth`. We also have a `shared` directory for things that are used across multiple domains. As seen in the example above, we can separate multiple contexts within a domain into their own directories like `context-one` and `context-two` under `another-domain`. - -And secondly we want to separate the core logic from the shell implementation. So we create a `core` and a `shell` directory in each context. diff --git a/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts b/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts index 6f544d4..7bb4abb 100644 --- a/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts +++ b/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts @@ -28,16 +28,17 @@ export const addUserCommandHandler = async (command: AddUserCommand) => { state = await userRepository.insertOne({ item: state, }); - } - const event = createEvent({ - type: USER_ADDED_EVENT_TYPE, - source: 'nimbus.overlap.at', - correlationid: command.correlationid, - data: state, - }); + const event = createEvent({ + type: USER_ADDED_EVENT_TYPE, + source: 'nimbus.overlap.at', + correlationid: command.correlationid, + subject: `/users/${state._id}`, + data: state, + }); - eventBus.putEvent(event); + eventBus.putEvent(event); + } return state; }; diff --git a/packages/core/src/lib/exception/invalidInputException.ts b/packages/core/src/lib/exception/invalidInputException.ts index cffc29c..fe25bcc 100644 --- a/packages/core/src/lib/exception/invalidInputException.ts +++ b/packages/core/src/lib/exception/invalidInputException.ts @@ -21,8 +21,6 @@ export class InvalidInputException extends Exception { * @param {ZodError} error - The Zod error. * * @returns {InvalidInputException} The InvalidInputException. - * - * @deprecated We do not want to depend on Zod anymore. */ public fromZodError(error: ZodError): InvalidInputException { if (error.stack) { diff --git a/packages/core/src/lib/message/event.ts b/packages/core/src/lib/message/event.ts index d91feb3..57728b4 100644 --- a/packages/core/src/lib/message/event.ts +++ b/packages/core/src/lib/message/event.ts @@ -88,6 +88,7 @@ export const eventSchema = z.object({ export type CreateEventInput = Partial> & { type: string; source: string; + subject: string; data: unknown; }; @@ -115,7 +116,7 @@ export const createEvent = ( time: time ?? new Date().toISOString(), source, type, - ...(subject && { subject }), + subject, data, datacontenttype: datacontenttype ?? 'application/json', ...(dataschema && { dataschema }), diff --git a/packages/core/src/lib/message/router.ts b/packages/core/src/lib/message/router.ts index 90423f6..39f19e5 100644 --- a/packages/core/src/lib/message/router.ts +++ b/packages/core/src/lib/message/router.ts @@ -265,10 +265,7 @@ export class MessageRouter { if (!validationResult.success) { throw new InvalidInputException( 'The provided input is invalid', - { - issues: validationResult.error.issues, - }, - ); + ).fromZodError(validationResult.error); } const result = await handler(validationResult.data); From 6047936cb720c2342f2788dde78639aea1edde51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Sat, 24 Jan 2026 17:33:43 +0100 Subject: [PATCH 24/37] more docs --- docs/.vitepress/config.mjs | 18 ++- docs/guide/core/exceptions.md | 4 +- docs/guide/hono/correlationid.md | 11 ++ docs/guide/hono/index.md | 33 +++++ docs/guide/hono/logger.md | 11 ++ docs/guide/hono/on-error.md | 11 ++ docs/guide/http/index.md | 33 ----- docs/guide/http/router.md | 1 - docs/guide/mongodb/index.md | 4 +- docs/guide/observability.md | 12 +- docs/guide/quickstart.md | 3 +- docs/guide/what-is-nimbus.md | 216 ++++--------------------------- docs/index.md | 10 +- 13 files changed, 114 insertions(+), 253 deletions(-) create mode 100644 docs/guide/hono/correlationid.md create mode 100644 docs/guide/hono/index.md create mode 100644 docs/guide/hono/logger.md create mode 100644 docs/guide/hono/on-error.md delete mode 100644 docs/guide/http/index.md delete mode 100644 docs/guide/http/router.md diff --git a/docs/.vitepress/config.mjs b/docs/.vitepress/config.mjs index a6fc2be..6033af7 100644 --- a/docs/.vitepress/config.mjs +++ b/docs/.vitepress/config.mjs @@ -3,7 +3,7 @@ import { defineConfig } from "vitepress"; // https://vitepress.dev/reference/site-config export default defineConfig({ title: "Nimbus", - description: "A Framework to build event-driven applications in the cloud.", + description: "Build event-driven applications with typescript.", themeConfig: { // https://vitepress.dev/reference/default-theme-config logo: @@ -72,12 +72,20 @@ export default defineConfig({ }, { - text: "HTTP (Oak)", - link: "/guide/http", + text: "Hono", + link: "/guide/hono", items: [ { - text: "Router", - link: "/guide/http/router", + text: "CorrelationID Middleware", + link: "/guide/hono/correlationid", + }, + { + text: "Logger Middleware", + link: "/guide/hono/logger", + }, + { + text: "onError Handler", + link: "/guide/hono/on-error", }, ], }, diff --git a/docs/guide/core/exceptions.md b/docs/guide/core/exceptions.md index 6b62cc4..66a1d6b 100644 --- a/docs/guide/core/exceptions.md +++ b/docs/guide/core/exceptions.md @@ -3,7 +3,9 @@ prev: text: "Logging" link: "/guide/core/logging" -next: false +next: + text: "Nimbus Hono" + link: "/guide/hono" --- # Exceptions diff --git a/docs/guide/hono/correlationid.md b/docs/guide/hono/correlationid.md new file mode 100644 index 0000000..f930e6c --- /dev/null +++ b/docs/guide/hono/correlationid.md @@ -0,0 +1,11 @@ +--- +prev: + text: "Nimbus Hono" + link: "/guide/hono" + +next: + text: "Logger Middleware" + link: "/guide/hono/logger" +--- + +# Nimbus Hono - CorrelationID Middleware diff --git a/docs/guide/hono/index.md b/docs/guide/hono/index.md new file mode 100644 index 0000000..286b42b --- /dev/null +++ b/docs/guide/hono/index.md @@ -0,0 +1,33 @@ +--- +prev: + text: "Exceptions" + link: "/guide/core/exceptions" + +next: + text: "CorrelationID Middleware" + link: "/guide/hono/correlationid" +--- + +# Nimbus Hono + +Adapters and useful functionality to bridge Nimbus and [Hono](https://hono.dev/). + +[https://jsr.io/@nimbus/oak](https://jsr.io/@nimbus/hono) + +### Deno + +```bash +deno add jsr:@nimbus/hono +``` + +### NPM + +```bash +npx jsr add @nimbus/hono +``` + +### Bun + +```bash +bunx jsr add @nimbus/hono +``` diff --git a/docs/guide/hono/logger.md b/docs/guide/hono/logger.md new file mode 100644 index 0000000..63f156c --- /dev/null +++ b/docs/guide/hono/logger.md @@ -0,0 +1,11 @@ +--- +prev: + text: "CorrelationID Middleware" + link: "/guide/hono/correlationid" + +next: + text: "onError Handler" + link: "/guide/hono/on-error" +--- + +# Nimbus Hono - Logger Middleware diff --git a/docs/guide/hono/on-error.md b/docs/guide/hono/on-error.md new file mode 100644 index 0000000..bf3f82f --- /dev/null +++ b/docs/guide/hono/on-error.md @@ -0,0 +1,11 @@ +--- +prev: + text: "Logger Middleware" + link: "/guide/hono/logger" + +next: + text: "Nimbus MongoDB" + link: "/guide/mongodb" +--- + +# Nimbus Hono - onError Handler diff --git a/docs/guide/http/index.md b/docs/guide/http/index.md deleted file mode 100644 index 513682f..0000000 --- a/docs/guide/http/index.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -prev: - text: "Logging" - link: "/guide/core/logging" - -next: - text: "Router" - link: "/guide/oak/router" ---- - -# Nimbus Oak Package - -The Oak package provides a simple and easy-to-use interface for working with the Oak web framework. - -[https://jsr.io/@nimbus/oak](https://jsr.io/@nimbus/oak) - -### Deno - -```bash -deno add jsr:@nimbus/oak -``` - -### NPM - -```bash -npx jsr add @nimbus/oak -``` - -### Bun - -```bash -bunx jsr add @nimbus/oak -``` diff --git a/docs/guide/http/router.md b/docs/guide/http/router.md deleted file mode 100644 index ccd32ef..0000000 --- a/docs/guide/http/router.md +++ /dev/null @@ -1 +0,0 @@ -# Nimbus Oak - Router diff --git a/docs/guide/mongodb/index.md b/docs/guide/mongodb/index.md index 9bb7446..2971a18 100644 --- a/docs/guide/mongodb/index.md +++ b/docs/guide/mongodb/index.md @@ -1,7 +1,7 @@ --- prev: - text: "Middleware" - link: "/guide/oak/middleware" + text: "onError Handler" + link: "/guide/hono/on-error" next: text: "Repository" diff --git a/docs/guide/observability.md b/docs/guide/observability.md index 066b854..3c2db89 100644 --- a/docs/guide/observability.md +++ b/docs/guide/observability.md @@ -1,21 +1,13 @@ --- prev: - text: "Core" - link: "/guide/core" + text: "Quickstart" + link: "/guide/quickstart" next: text: "Commands" link: "/guide/core/commands" --- -# TODOS: - -- Get over documentation and simplify code examples. -- fromZodError deprecated? -- Overthink structure - - Observability in main section not under core -- work on documentation for other packages - # Observability Observability is a first-class citizen in Nimbus. The framework is designed so that developers can focus on business logic without implementing logging, tracing, and metrics from the ground up. diff --git a/docs/guide/quickstart.md b/docs/guide/quickstart.md index 612f1b1..22f1e7c 100644 --- a/docs/guide/quickstart.md +++ b/docs/guide/quickstart.md @@ -8,8 +8,7 @@ Nimbus tries to keep dependencies as low as possible. Theses are the dependencies Nimbus relies on. - [Deno Standard Library](https://docs.deno.com/runtime/fundamentals/standard_library/) ([@std](https://jsr.io/@std)) -- [ajv](https://ajv.js.org/) -- [ajv-formats](https://ajv.js.org/packages/ajv-formats.html) +- [Zod](https://zod.dev/) ## Installation diff --git a/docs/guide/what-is-nimbus.md b/docs/guide/what-is-nimbus.md index 8368674..51e020c 100644 --- a/docs/guide/what-is-nimbus.md +++ b/docs/guide/what-is-nimbus.md @@ -1,29 +1,30 @@ # What is Nimbus? -Nimbus is a TypeScript framework for building message-driven applications with a focus on Domain-Driven Design (DDD) and clean architecture principles. It provides the building blocks for implementing Commands, Queries, and Events while keeping business logic pure and testable. +Yet another framework to build software ... ## Philosophy -Nimbus aims to keep things simple and avoid complex OOP or FP principles. -No complex inheritance hierarchies, no dependency injection, no decorators, no magic. -Just code that is easy to understand. +... with some concepts to think things differently. -:::tip Simplicity first! -Keep it as simple as possible with the least amount of external dependencies. -::: +### Simplicity -There are already great Frameworks like [NestJS](https://nestjs.com/) and [Effect](https://effect.website/) out there for building TypeScript applications. So the question is why build another one? +Nimbus aims to keep things simple and to avoid overly complex OOP or FP principles. No complex inheritance hierarchies, no dependency injection, no decorators. Just explicit code that is easy to understand and reason about. -While those frameworks heavily emphasize either object-oriented or functional programming patterns this comes with the cost of a steep learning curve. Nimbus aim is to have a learning curve that is as flat as possible. +### No Framework Magic -:::tip Start with Nimbus -Our recommendation is to start with Nimbus and see where it takes you. -In case you really need specific features or want to add more complexity you can always add it later. -::: +Three lines of code to build a whole API is great, until something goes wrong and you have no clue why the magic stopped working. -## Architecture Philosophy +### Flat and easy learning curve -Nimbus is built around the idea of a **Pure Core** and an **Imperative Shell**, aligning well with Hexagonal Architecture (Ports & Adapters) and supporting modern patterns like CQRS and Event Sourcing. +There are already great Frameworks like [NestJS](https://nestjs.com/) and [Effect](https://effect.website/) out there for building TypeScript applications. + +While those frameworks heavily emphasize either object-oriented or functional programming patterns this comes with the cost of a steep learning curve. Nimbus aims to have a learning curve that is as flat as possible. + +Be productive right from the start. + +## Architecture Recommendation + +It would be valuable to build your application around the idea of a **Pure Core** and an **Imperative Shell**. It aligns well with Hexagonal Architecture (Ports & Adapters) and is a good foundation for patterns like CQRS and Event Sourcing. ![Illustration of the pure core imperative shell architecture](/nimbus-pure-core-imperative-shell.svg) @@ -38,33 +39,6 @@ The pure core contains domain logic that: - Can be tested by running functions with different inputs and comparing outputs - no mocking needed! - Represents the unique value proposition of the application -Example from a recipe management system: - -```typescript -// Pure domain logic - no I/O, completely side-effect free -export const addRecipe = ( - command: AddRecipeCommand, - state: RecipeState, -): { - newState: Recipe; - events: RecipeAddedEvent[]; -} => { - if (state !== null) { - throw new InvalidInputException('Recipe already exists', { - errorCode: 'DUPLICATE_RECIPE', - }); - } - - const subject = recipeSubject(command.data.slug); - const event = createRecipeAddedEvent(command, subject); - - return { - newState: command.data, - events: [event], - }; -}; -``` - ### The Imperative Shell The shell handles all interactions with the outside world - HTTP requests, database operations, file system access, and other I/O operations. It orchestrates the pure core by providing it with data and persisting the results. @@ -77,36 +51,6 @@ The shell's responsibilities include: - Persisting results - Sending responses -Example handler in the shell: - -```typescript -// Shell layer - handles all I/O and orchestrates core logic -export const addRecipeHandler: MessageHandler = - async (command) => { - const subject = recipeSubject(command.data.slug); - - // Shell loads current state from event store - const snapshot = await loadAggregate( - eventStore, - subject, - null, - recipeReducer, - ); - - // Shell calls pure core logic (no I/O happens here) - const { newState, events } = addRecipe(command, snapshot.state); - - // Shell persists events with optimistic concurrency control - await eventStore.writeEvents(events, { - preconditions: snapshot.lastEventId !== undefined - ? [{ type: 'isSubjectOnEventId', payload: { subject, eventId: snapshot.lastEventId } }] - : [{ type: 'isSubjectPristine', payload: { subject } }], - }); - - return newState; - }; -``` - ### Flow of Information Information flows in one direction: **Shell → Core → Shell** @@ -125,129 +69,13 @@ In an HTTP API scenario: For complex scenarios requiring multiple database queries with business logic in between, core functions can be composed and called sequentially by the shell. -## Message-Driven Architecture - -Nimbus uses a message-driven approach with three core message types that follow the Command Query Responsibility Segregation (CQRS) pattern. - -### Commands - -Commands represent write operations - intentions to change system state. They are processed by command handlers that execute business logic and persist changes. - -Learn more in the [Commands Guide](/guide/core/commands). - -### Queries - -Queries represent read operations - requests for information without changing state. Query handlers fetch and return data. - -Learn more in the [Queries Guide](/guide/core/queries). - -### Events - -Events represent facts - things that have already happened in the system. Event handlers react to these facts to update read models, trigger notifications, or coordinate between different parts of the system. - -Learn more in the [Events Guide](/guide/core/events). - -## Domain-Driven Design & Hexagonal Architecture - -Nimbus encourages organizing code around business domains using DDD principles and hexagonal architecture: - -``` -src/ -├── contexts/ # Bounded contexts -│ └── recipe/ -│ ├── core/ # Pure domain logic -│ │ ├── domain/ # Entities, value objects, aggregates -│ │ ├── commands/ # Command logic -│ │ ├── queries/ # Query logic -│ │ ├── events/ # Event definitions -│ │ └── ports/ # Interfaces for external dependencies -│ └── infrastructure/ # Adapters & implementation details -│ ├── http/ # HTTP handlers -│ └── repository/ # Data access -└── shared/ # Shared kernel -``` - -While Nimbus works well with this structure, it remains agnostic and does not enforce any specific organizational pattern. Applications can be structured as needed. - -## CQRS & Event Sourcing - -Command Query Responsibility Segregation (CQRS) and Event Sourcing are highly recommended patterns when using Nimbus, especially as historical data becomes increasingly valuable for AI and data analytics. - -### Why CQRS? - -CQRS separates write operations (commands) from read operations (queries), allowing each to be optimized independently: - -- **Commands** change state and emit events -- **Queries** read from optimized read models -- **Events** synchronize write and read models - -### Why Event Sourcing? - -Event sourcing stores every state change as an immutable event, providing: - -- Complete audit trail of all changes -- Ability to reconstruct state at any point in time -- Historical data for analytics and AI training -- Natural fit with event-driven architectures - -Example of an event reducer reconstructing aggregate state: - -```typescript -export const recipeReducer: EventReducer = ( - state: RecipeState, - event: Event -): RecipeState => { - switch (event.type) { - case "at.overlap.nimbus.recipe-added": - return event.data as Recipe; - case "at.overlap.nimbus.recipe-updated": - return { ...state, ...(event.data as Partial) }; - case "at.overlap.nimbus.recipe-deleted": - return null; - default: - return state; - } -}; -``` - -Loading current state by replaying events: - -```typescript -const snapshot = await loadAggregate(eventStore, subject, null, recipeReducer); -``` - -While these patterns are recommended, Nimbus does not force their use. Applications can start simple and adopt these patterns as requirements evolve. - -## Package Ecosystem - -Nimbus provides a modular package ecosystem: - -- **[@nimbus/core](https://jsr.io/@nimbus/core)** - Core message types, routing, validation, and logging -- **[@nimbus/eventsourcing](https://jsr.io/@nimbus/eventsourcing)** - Event sourcing abstractions and aggregate utilities -- **[@nimbus/eventsourcingdb](https://jsr.io/@nimbus/eventsourcingdb)** - EventSourcingDB integration for event storage -- **[@nimbus/oak](https://jsr.io/@nimbus/oak)** - Oak HTTP framework integration -- **[@nimbus/mongodb](https://jsr.io/@nimbus/mongodb)** - MongoDB integration for read models - -Each package can be used independently or combined as needed. - -## Deno & JSR - -Following the principle of "keep it simple all the way," Nimbus is built with [Deno](https://deno.com) and published on [jsr.io/@nimbus](https://jsr.io/packages?search=@nimbus). - -Nimbus is a TypeScript framework compatible with any Node.js runtime, though Deno is recommended for the best development experience. - -## Roadmap - -Future development focuses on: +## Testing Recommendation -- AsyncAPI and OpenAPI specification support for schema-driven development -- CLI tool for project initialization and code generation -- Schema registry for contract management +- Unit tests for the pure core. +- E2E tests to ensure the whole system works. -## FAQ +As the name "pure" core already implies, no side effects are allowed. This makes it easy to test the core by running functions with different inputs and comparing outputs - no mocking needed! -:::info Isn't it called Functional Core, Imperative Shell? -Nimbus aims to keep things simple and avoid overly complex Object-Oriented Programming (OOP) principles. The same applies to overly complex Functional Programming (FP) principles. +Also the core is the most important part of your application as it hold your whole business logic. So fast and easy to write unit tests give you the most bang for your buck. -The term **Pure Core** is preferred as it can follow FP patterns but does not require them. -::: +End to end tests will ensure all parts of your application work together as expected. diff --git a/docs/index.md b/docs/index.md index 83a2443..ae728d1 100644 --- a/docs/index.md +++ b/docs/index.md @@ -25,9 +25,9 @@ features: - title: Core Concepts details: Compose an application with commands, events, and queries. - title: Simple Design - details: No need for clunky and complex object-oriented design principles. - - title: Pure Core - details: The only rule to follow is to keep your core logic pure and side-effect free. - - title: Type Safety - details: Ensure type safety for all I/O sources. + details: No clunky and complex OOP or FP design principles. + - title: No Framework Magic + details: Just explicit code without any magic. + - title: Observability Built-In + details: Logging, tracing, and metrics. Batteries included. --- From 5515163478d79151624f82bcda5db66e9f1d20d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 11:59:43 +0100 Subject: [PATCH 25/37] more docs --- docs/.vitepress/config.mjs | 16 + docs/guide/hono/correlationid.md | 79 ++++- docs/guide/hono/logger.md | 108 ++++++- docs/guide/hono/on-error.md | 150 ++++++++- docs/guide/mongodb/connection-manager.md | 208 ++++++++++++ docs/guide/mongodb/crud.md | 295 +++++++++++++++++- docs/guide/mongodb/deploy-collection.md | 149 +++++++++ docs/guide/mongodb/handle-mongo-error.md | 155 +++++++++ docs/guide/mongodb/index.md | 4 +- docs/guide/mongodb/mongo-json.md | 127 ++++++++ docs/guide/mongodb/repository.md | 261 +++++++++++++++- docs/guide/utils/get-env.md | 134 +++++++- docs/guide/utils/index.md | 4 +- .../users/core/commands/addUser.command.ts | 2 + .../src/iam/users/core/domain/user.ts | 1 + .../src/iam/users/core/domain/userGroup.ts | 9 + .../iam/users/core/queries/getUserGroups.ts | 10 + .../src/iam/users/shell/http/router.ts | 22 ++ .../messages/queries/getUserGroups.query.ts | 8 + .../shell/messages/registerUserMessages.ts | 10 + .../users/shell/mongodb/user.collection.ts | 5 + .../users/shell/mongodb/user.repository.ts | 32 +- packages/hono/src/lib/middleware/logger.ts | 3 +- packages/mongodb/src/index.ts | 1 + packages/mongodb/src/lib/crud/aggregate.ts | 8 +- 25 files changed, 1785 insertions(+), 16 deletions(-) create mode 100644 docs/guide/mongodb/connection-manager.md create mode 100644 docs/guide/mongodb/deploy-collection.md create mode 100644 docs/guide/mongodb/handle-mongo-error.md create mode 100644 docs/guide/mongodb/mongo-json.md create mode 100644 examples/hono-demo/src/iam/users/core/domain/userGroup.ts create mode 100644 examples/hono-demo/src/iam/users/core/queries/getUserGroups.ts create mode 100644 examples/hono-demo/src/iam/users/shell/messages/queries/getUserGroups.query.ts diff --git a/docs/.vitepress/config.mjs b/docs/.vitepress/config.mjs index 6033af7..0a6fac2 100644 --- a/docs/.vitepress/config.mjs +++ b/docs/.vitepress/config.mjs @@ -94,6 +94,10 @@ export default defineConfig({ text: "MongoDB", link: "/guide/mongodb", items: [ + { + text: "Connection Manager", + link: "/guide/mongodb/connection-manager", + }, { text: "Repository", link: "/guide/mongodb/repository", @@ -102,6 +106,18 @@ export default defineConfig({ text: "CRUD+", link: "/guide/mongodb/crud", }, + { + text: "MongoJSON", + link: "/guide/mongodb/mongo-json", + }, + { + text: "handleMongoError", + link: "/guide/mongodb/handle-mongo-error", + }, + { + text: "Deploy Collection", + link: "/guide/mongodb/deploy-collection", + }, ], }, diff --git a/docs/guide/hono/correlationid.md b/docs/guide/hono/correlationid.md index f930e6c..2678f4b 100644 --- a/docs/guide/hono/correlationid.md +++ b/docs/guide/hono/correlationid.md @@ -8,4 +8,81 @@ next: link: "/guide/hono/logger" --- -# Nimbus Hono - CorrelationID Middleware +# CorrelationID Middleware + +The CorrelationID middleware extracts a correlation ID from incoming request headers or generates a new one using ULID. This ID is stored in the Hono context and optionally added to response headers, enabling request tracing across your application. + +## Basic Usage + +```typescript +import { Hono } from "hono"; +import { correlationId, getCorrelationId } from "@nimbus/hono"; + +const app = new Hono(); + +// Add the middleware +app.use(correlationId()); + +app.get("/", (c) => { + const id = getCorrelationId(c); + return c.json({ correlationId: id }); +}); +``` + +## Header Detection + +The middleware checks the following headers in order of priority: + +| Priority | Header Name | +| -------- | ------------------ | +| 1 | `x-correlation-id` | +| 2 | `x-request-id` | +| 3 | `request-id` | + +If none of these headers are present, a new ULID is generated. + +## Configuration Options + +| Option | Type | Default | Description | +| ---------------------- | --------- | -------------------- | ------------------------------------------ | +| `addToResponseHeaders` | `boolean` | `true` | Add the correlation ID to response headers | +| `responseHeaderName` | `string` | `"x-correlation-id"` | The header name to use in the response | + +```typescript +import { correlationId } from "@nimbus/hono"; + +// Custom configuration +app.use( + correlationId({ + addToResponseHeaders: true, + responseHeaderName: "x-request-id", + }) +); +``` + +## Retrieving the Correlation ID + +Use the `getCorrelationId()` helper function to retrieve the correlation ID from the Hono context: + +```typescript +import { getCorrelationId } from "@nimbus/hono"; + +app.get("/users/:id", async (c) => { + const correlationId = getCorrelationId(c); + + // Use in logging + logger.info({ + message: "Fetching user", + correlationId, + }); + + // Pass to commands/queries + const command = createCommand({ + type: "get-user", + correlationid: correlationId, + data: { id: c.req.param("id") }, + }); + + return c.json(await router.route(command)); +}); +``` diff --git a/docs/guide/hono/logger.md b/docs/guide/hono/logger.md index 63f156c..9053730 100644 --- a/docs/guide/hono/logger.md +++ b/docs/guide/hono/logger.md @@ -8,4 +8,110 @@ next: link: "/guide/hono/on-error" --- -# Nimbus Hono - Logger Middleware +# Logger Middleware + +The Logger middleware logs HTTP requests and responses with timing information using the Nimbus logger. It optionally integrates with OpenTelemetry for distributed tracing. + +## Basic Usage + +```typescript +import { Hono } from "hono"; +import { correlationId, logger } from "@nimbus/hono"; + +const app = new Hono(); + +// Use correlationId middleware first to enable correlation ID in logs +app.use(correlationId()); +app.use(logger()); +``` + +## Configuration Options + +| Option | Type | Default | Description | +| --------------- | --------- | ---------- | ----------------------------------------- | +| `enableTracing` | `boolean` | `true` | Enable OpenTelemetry tracing for requests | +| `tracerName` | `string` | `"nimbus"` | The name of the tracer for OpenTelemetry | + +```typescript +import { logger } from "@nimbus/hono"; + +app.use( + logger({ + enableTracing: true, + tracerName: "api", + }) +); +``` + +## Log Output + +The middleware logs each request and response using the Nimbus logger: + +**Request log:** + +``` +[API] INFO :: REQ: [GET] /users/123 +``` + +**Response log (with timing):** + +``` +[API] INFO :: RES: [GET] /users/123 - 45ms +``` + +Both logs include the correlation ID when the `correlationId` middleware is used. + +## OpenTelemetry Tracing + +When `enableTracing` is set to `true`, the middleware: + +1. **Extracts trace context** from incoming `traceparent` and `tracestate` headers +2. **Creates a server span** for the HTTP request +3. **Records span attributes** for observability +4. **Propagates context** so child spans can be created in handlers + +### Span Attributes + +| Attribute | Description | +| ------------------ | --------------------------------- | +| `http.method` | The HTTP method (GET, POST, etc.) | +| `url.path` | The request path | +| `http.target` | The full request URL | +| `correlation_id` | The correlation ID (if available) | +| `http.status_code` | The response status code | + +### Example with Tracing + +```typescript +import { Hono } from "hono"; +import { correlationId, logger } from "@nimbus/hono"; + +const app = new Hono(); + +app.use(correlationId()); +app.use( + logger({ + enableTracing: true, + tracerName: "api", + }) +); + +app.get("/users/:id", async (c) => { + // This handler runs within the HTTP span context + // Any spans created here will be children of the HTTP span + const user = await userRepository.findOne({ + filter: { _id: c.req.param("id") }, + }); + + return c.json(user); +}); +``` + +## Error Handling + +When an error occurs during request handling: + +- The span status is set to `ERROR` +- The error message is recorded in the span +- The exception is recorded for debugging +- The error is re-thrown for the error handler to process diff --git a/docs/guide/hono/on-error.md b/docs/guide/hono/on-error.md index bf3f82f..36fdc64 100644 --- a/docs/guide/hono/on-error.md +++ b/docs/guide/hono/on-error.md @@ -8,4 +8,152 @@ next: link: "/guide/mongodb" --- -# Nimbus Hono - onError Handler +# onError Handler + +The `handleError` function is an error handler for Hono applications that converts Nimbus exceptions to structured HTTP JSON responses. + +## Basic Usage + +```typescript +import { Hono } from "hono"; +import { handleError } from "@nimbus/hono"; + +const app = new Hono(); + +// Register the error handler +app.onError(handleError); +``` + +## Response Format + +When a Nimbus exception is thrown, the handler returns a JSON response with the following structure: + +```json +{ + "error": "EXCEPTION_NAME", + "message": "Human-readable error message", + "details": { ... } +} +``` + +| Field | Description | +| --------- | -------------------------------------------------------- | +| `error` | The exception name (e.g., `NOT_FOUND`, `INVALID_INPUT`) | +| `message` | The error message provided when throwing the exception | +| `details` | Optional additional details (only included if provided) | + +## Status Code Mapping + +The HTTP status code is taken directly from the exception's `statusCode` property: + +| Exception | Status Code | Response `error` | +| ----------------------- | ----------- | ----------------------- | +| `GenericException` | 500 | `GENERIC_EXCEPTION` | +| `InvalidInputException` | 400 | `INVALID_INPUT` | +| `NotFoundException` | 404 | `NOT_FOUND` | +| `UnauthorizedException` | 401 | `UNAUTHORIZED` | +| `ForbiddenException` | 403 | `FORBIDDEN` | +| Custom exceptions | (custom) | (custom name) | + +## Logging Behavior + +The handler logs errors differently based on the status code: + +| Status Code | Log Level | Description | +| ----------- | ---------- | ---------------------------------------- | +| 5xx | `error` | Server errors that need investigation | +| 4xx | `debug` | Client errors, typically expected | +| Unhandled | `critical` | Non-Nimbus errors, unexpected failures | + +## Example: Exception Handling + +```typescript +import { Hono } from "hono"; +import { handleError } from "@nimbus/hono"; +import { NotFoundException, InvalidInputException } from "@nimbus/core"; + +const app = new Hono(); + +app.get("/users/:id", async (c) => { + const user = await findUser(c.req.param("id")); + + if (!user) { + throw new NotFoundException("User not found", { + userId: c.req.param("id"), + }); + } + + return c.json(user); +}); + +app.post("/users", async (c) => { + const body = await c.req.json(); + + if (!body.email) { + throw new InvalidInputException("Email is required", { + field: "email", + }); + } + + const user = await createUser(body); + return c.json(user, 201); +}); + +app.onError(handleError); +``` + +### Response Examples + +**NotFoundException (404):** +```json +{ + "error": "NOT_FOUND", + "message": "User not found", + "details": { + "userId": "123" + } +} +``` + +**InvalidInputException (400):** +```json +{ + "error": "INVALID_INPUT", + "message": "Email is required", + "details": { + "field": "email" + } +} +``` + +**Unhandled Error (500):** +```json +{ + "error": "INTERNAL_SERVER_ERROR" +} +``` + +## Complete Application Setup + +```typescript +import { Hono } from "hono"; +import { correlationId, handleError, logger } from "@nimbus/hono"; +import { setupLogger, parseLogLevel } from "@nimbus/core"; + +setupLogger({ + logLevel: parseLogLevel(process.env.LOG_LEVEL), +}); + +const app = new Hono(); + +app.use(correlationId()); +app.use(logger({ enableTracing: true })); + +// Your routes here +app.get("/health", (c) => c.json({ status: "ok" })); + +// Error handler must be registered last +app.onError(handleError); + +export default app; +``` diff --git a/docs/guide/mongodb/connection-manager.md b/docs/guide/mongodb/connection-manager.md new file mode 100644 index 0000000..c1c69b6 --- /dev/null +++ b/docs/guide/mongodb/connection-manager.md @@ -0,0 +1,208 @@ +--- +prev: + text: "Nimbus MongoDB" + link: "/guide/mongodb" + +next: + text: "Repository" + link: "/guide/mongodb/repository" +--- + +# Connection Manager + +The `MongoConnectionManager` is a singleton class that manages MongoDB connections with automatic reconnection, health checks, and cleanup of inactive connections. + +## Basic Usage + +```typescript +import { MongoConnectionManager } from "@nimbus/mongodb"; +import { ServerApiVersion } from "mongodb"; + +const mongoManager = MongoConnectionManager.getInstance( + process.env.MONGO_URL ?? "", + { + mongoClientOptions: { + appName: "my-app", + serverApi: { + version: ServerApiVersion.v1, + strict: false, + deprecationErrors: true, + }, + }, + } +); + +// Get a collection +const collection = await mongoManager.getCollection("myDatabase", "users"); +``` + +## Configuration Options + +| Option | Type | Default | Description | +| -------------------- | -------------------- | ------------ | -------------------------------------------------- | +| `connectionTimeout` | `number` | `1800000` | Inactivity timeout in ms before cleanup (30 min) | +| `mongoClientOptions` | `MongoClientOptions` | _(required)_ | MongoDB driver client options | + +### Recommended Configuration + +```typescript +import { MongoConnectionManager } from "@nimbus/mongodb"; +import { ServerApiVersion } from "mongodb"; + +const mongoManager = MongoConnectionManager.getInstance( + process.env.MONGO_URL ?? "", + { + connectionTimeout: 1000 * 60 * 5, // 5 minutes + mongoClientOptions: { + appName: "my-app", + serverApi: { + version: ServerApiVersion.v1, + strict: false, + deprecationErrors: true, + }, + maxPoolSize: 10, + minPoolSize: 0, + maxIdleTimeMS: 1000 * 60 * 1, // 1 minute idle timeout + connectTimeoutMS: 1000 * 15, // 15 seconds connection timeout + socketTimeoutMS: 1000 * 30, // 30 seconds socket timeout + }, + } +); +``` + +## Available Methods + +| Method | Return Type | Description | +| ------------------------------------- | --------------------- | -------------------------------------------- | +| `getInstance(uri, options)` | `MongoConnectionManager` | Get the singleton instance | +| `getClient()` | `Promise` | Get a connected MongoDB client | +| `getDatabase(dbName)` | `Promise` | Get a database instance | +| `getCollection(dbName, collection)` | `Promise` | Get a collection instance | +| `healthCheck()` | `Promise<{ status, details? }>` | Check connection health | +| `cleanup()` | `Promise` | Close inactive connections | + +## Connection Management + +The manager automatically handles: + +- **Connection pooling**: Reuses existing connections when available +- **Reconnection**: Automatically reconnects when the connection is lost +- **Connection testing**: Verifies connections with a ping before returning + +### Getting Resources + +```typescript +// Get a connected client +const client = await mongoManager.getClient(); + +// Get a database +const db = await mongoManager.getDatabase("myDatabase"); + +// Get a collection (most common) +const usersCollection = await mongoManager.getCollection("myDatabase", "users"); +``` + +## Health Checks + +Use `healthCheck()` to verify the database connection: + +```typescript +app.get("/health", async (c) => { + const dbHealth = await mongoManager.healthCheck(); + + return c.json({ + status: dbHealth.status === "healthy" ? "ok" : "error", + database: dbHealth, + }); +}); +``` + +**Response format:** + +```typescript +// Healthy +{ status: "healthy" } + +// Error +{ status: "error", details: "Failed to ping MongoDB server" } +``` + +## Cleanup + +The `cleanup()` method closes connections that have been inactive longer than the configured `connectionTimeout`. Set up an interval to call this periodically: + +```typescript +import { getLogger } from "@nimbus/core"; + +// Check every minute for inactive connections +setInterval(() => { + mongoManager.cleanup().catch((error) => { + getLogger().error({ + message: "Failed to cleanup MongoDB connections", + error, + }); + }); +}, 1000 * 60); +``` + +## Complete Setup Example + +```typescript +import { getLogger } from "@nimbus/core"; +import { MongoConnectionManager } from "@nimbus/mongodb"; +import { ServerApiVersion } from "mongodb"; + +export const mongoManager = MongoConnectionManager.getInstance( + process.env.MONGO_URL ?? "", + { + connectionTimeout: 1000 * 60 * 5, + mongoClientOptions: { + appName: "my-app", + serverApi: { + version: ServerApiVersion.v1, + strict: false, + deprecationErrors: true, + }, + maxPoolSize: 10, + minPoolSize: 0, + maxIdleTimeMS: 1000 * 60 * 1, + connectTimeoutMS: 1000 * 15, + socketTimeoutMS: 1000 * 30, + }, + } +); + +export const initMongoConnectionManager = () => { + // Periodic cleanup of inactive connections + setInterval(() => { + mongoManager.cleanup().catch((error) => { + getLogger().error({ + message: error.message, + error, + }); + }); + }, 1000 * 60); +}; +``` + +## Using with Repository + +The connection manager integrates seamlessly with the `MongoDBRepository` class: + +```typescript +import { MongoDBRepository } from "@nimbus/mongodb"; +import { mongoManager } from "./mongodb.ts"; +import { User, UserSchema } from "./user.ts"; + +class UserRepository extends MongoDBRepository { + constructor() { + super( + () => mongoManager.getCollection("myDatabase", "users"), + UserSchema, + "User" + ); + } +} + +export const userRepository = new UserRepository(); +``` diff --git a/docs/guide/mongodb/crud.md b/docs/guide/mongodb/crud.md index 653422f..63d6b08 100644 --- a/docs/guide/mongodb/crud.md +++ b/docs/guide/mongodb/crud.md @@ -1 +1,294 @@ -# Nimbus MongoDB - CRUD +--- +prev: + text: "Repository" + link: "/guide/mongodb/repository" + +next: + text: "MongoJSON" + link: "/guide/mongodb/mongo-json" +--- + +# CRUD+ + +The MongoDB package provides low-level CRUD functions for direct database operations. These functions are fully instrumented with OpenTelemetry tracing and metrics, and handle errors using Nimbus exceptions. + +## When to Use + +Use these low-level functions when: + +- You need operations not provided by `MongoDBRepository` +- You want direct control over MongoDB operations +- You're building custom repository methods + +For standard CRUD operations, prefer using the [Repository](/guide/mongodb/repository) class. + +## Available Functions + +| Function | Description | +| ------------------- | ----------------------------------------- | +| `find` | Find multiple documents matching a filter | +| `findOne` | Find a single document matching a filter | +| `insertOne` | Insert a single document | +| `insertMany` | Insert multiple documents | +| `replaceOne` | Replace a single document | +| `updateOne` | Update a single document | +| `updateMany` | Update multiple documents | +| `deleteOne` | Delete a single document | +| `deleteMany` | Delete multiple documents | +| `countDocuments` | Count documents matching a filter | +| `bulkWrite` | Execute multiple write operations | +| `aggregate` | Execute an aggregation pipeline | +| `findOneAndUpdate` | Find and update a document atomically | +| `findOneAndReplace` | Find and replace a document atomically | +| `findOneAndDelete` | Find and delete a document atomically | + +## Usage Examples + +### Find Operations + +Functions that return typed data require `mapDocument` and `outputType` parameters for type-safe results. + +```typescript +import { find, findOne } from "@nimbus/mongodb"; + +type User = { _id: string; email: string; name: string }; + +// Find multiple documents +const users = await find({ + collection, + filter: { status: "active" }, + limit: 10, + skip: 0, + sort: { createdAt: -1 }, + mapDocument: (doc) => ({ + _id: doc._id.toString(), + email: doc.email, + name: doc.name, + }), + outputType: UserSchema, +}); + +// Find a single document +const user = await findOne({ + collection, + filter: { email: "john@example.com" }, + mapDocument: (doc) => ({ + _id: doc._id.toString(), + email: doc.email, + name: doc.name, + }), + outputType: UserSchema, +}); +``` + +### Insert Operations + +```typescript +import { insertOne, insertMany } from "@nimbus/mongodb"; + +// Insert a single document +const result = await insertOne({ + collection, + document: { + email: "john@example.com", + name: "John Doe", + createdAt: new Date(), + }, +}); + +// Insert multiple documents +const results = await insertMany({ + collection, + documents: [ + { email: "john@example.com", name: "John" }, + { email: "jane@example.com", name: "Jane" }, + ], +}); +``` + +### Update Operations + +```typescript +import { updateOne, updateMany, replaceOne } from "@nimbus/mongodb"; + +// Update a single document +const result = await updateOne({ + collection, + filter: { _id: userId }, + update: { $set: { name: "New Name", updatedAt: new Date() } }, +}); + +// Update multiple documents +const results = await updateMany({ + collection, + filter: { status: "pending" }, + update: { $set: { status: "processed" } }, +}); + +// Replace a document entirely +const replaced = await replaceOne({ + collection, + filter: { _id: userId }, + replacement: { + email: "new@example.com", + name: "New Name", + updatedAt: new Date(), + }, +}); +``` + +### Delete Operations + +```typescript +import { deleteOne, deleteMany } from "@nimbus/mongodb"; + +// Delete a single document +const result = await deleteOne({ + collection, + filter: { _id: userId }, +}); + +// Delete multiple documents +const results = await deleteMany({ + collection, + filter: { status: "deleted" }, +}); +``` + +### Atomic Find-and-Modify Operations + +These functions return the document before or after modification, requiring `mapDocument` and `outputType` for type safety. + +```typescript +import { + findOneAndUpdate, + findOneAndReplace, + findOneAndDelete, +} from "@nimbus/mongodb"; + +type User = { _id: string; email: string; loginCount: number }; + +// Find and update atomically +const updated = await findOneAndUpdate({ + collection, + filter: { _id: userId }, + update: { $inc: { loginCount: 1 } }, + mapDocument: (doc) => ({ + _id: doc._id.toString(), + email: doc.email, + loginCount: doc.loginCount, + }), + outputType: UserSchema, + options: { returnDocument: "after" }, +}); + +// Find and replace atomically +const replaced = await findOneAndReplace({ + collection, + filter: { _id: userId }, + replacement: newDocument, + mapDocument: (doc) => ({ + _id: doc._id.toString(), + email: doc.email, + loginCount: doc.loginCount, + }), + outputType: UserSchema, + options: { returnDocument: "after" }, +}); + +// Find and delete atomically +const deleted = await findOneAndDelete({ + collection, + filter: { _id: userId }, + mapDocument: (doc) => ({ + _id: doc._id.toString(), + email: doc.email, + loginCount: doc.loginCount, + }), + outputType: UserSchema, +}); +``` + +### Aggregation + +The `aggregate` function executes a pipeline and maps results to typed output. + +```typescript +import { aggregate } from "@nimbus/mongodb"; + +type CategoryCount = { category: string; count: number }; + +const results = await aggregate({ + collection, + aggregation: [ + { $match: { status: "active" } }, + { $group: { _id: "$category", count: { $sum: 1 } } }, + { $sort: { count: -1 } }, + ], + mapDocument: (doc) => ({ + category: doc._id, + count: doc.count, + }), + outputType: CategoryCountSchema, +}); +``` + +### Bulk Write + +```typescript +import { bulkWrite } from "@nimbus/mongodb"; + +const result = await bulkWrite({ + collection, + operations: [ + { insertOne: { document: { name: "New Item" } } }, + { + updateOne: { + filter: { _id: id1 }, + update: { $set: { status: "updated" } }, + }, + }, + { deleteOne: { filter: { _id: id2 } } }, + ], +}); +``` + +### Count Documents + +```typescript +import { countDocuments } from "@nimbus/mongodb"; + +const count = await countDocuments({ + collection, + filter: { status: "active" }, +}); +``` + +## Observability + +All CRUD functions are automatically instrumented with OpenTelemetry tracing and metrics. + +### Tracing + +Each operation creates a span with the following attributes: + +| Attribute | Description | +| ----------------------- | ---------------------------------------- | +| `db.system` | Always `mongodb` | +| `db.operation` | The operation name (e.g., `find`) | +| `db.mongodb.collection` | The collection name | + +### Metrics + +Two metrics are recorded for every operation: + +| Metric | Type | Labels | Description | +| ----------------------------------- | --------- | -------------------------------- | ---------------------------------- | +| `mongodb_operation_total` | Counter | `operation`, `collection`, `status` | Total number of operations | +| `mongodb_operation_duration_seconds`| Histogram | `operation`, `collection` | Duration of operations in seconds | + +The `status` label is either `success` or `error`. + +## Error Handling + +All functions use `handleMongoError` internally to convert MongoDB errors to Nimbus exceptions. See [handleMongoError](/guide/mongodb/handle-mongo-error) for details on error mapping. diff --git a/docs/guide/mongodb/deploy-collection.md b/docs/guide/mongodb/deploy-collection.md new file mode 100644 index 0000000..d565b9c --- /dev/null +++ b/docs/guide/mongodb/deploy-collection.md @@ -0,0 +1,149 @@ +--- +prev: + text: "handleMongoError" + link: "/guide/mongodb/handle-mongo-error" + +next: + text: "Nimbus Utils" + link: "/guide/utils" +--- + +# Deploy Collection + +The `deployMongoCollection` function creates or updates MongoDB collections with schema validation and indexes. It provides a declarative way to manage your database schema. + +## Basic Usage + +```typescript +import { deployMongoCollection } from "@nimbus/mongodb"; +import { mongoManager } from "./mongodb.ts"; + +const USERS_COLLECTION = { + name: "users", + options: { + validator: { + $jsonSchema: { + bsonType: "object", + required: ["email", "firstName", "lastName"], + properties: { + email: { bsonType: "string" }, + firstName: { bsonType: "string" }, + lastName: { bsonType: "string" }, + }, + }, + }, + }, + indexes: [ + { key: { email: 1 }, unique: true }, + { key: { lastName: 1, firstName: 1 } }, + ], +}; + +const client = await mongoManager.getClient(); + +await deployMongoCollection({ + mongoClient: client, + dbName: "myDatabase", + collectionDefinition: USERS_COLLECTION, + allowUpdateIndexes: true, +}); +``` + +## Function Parameters + +| Parameter | Type | Description | +| ---------------------- | --------------------------- | ------------------------------------------------- | +| `mongoClient` | `MongoClient` | A connected MongoDB client instance | +| `dbName` | `string` | The name of the database | +| `collectionDefinition` | `MongoCollectionDefinition` | The collection definition object | +| `allowUpdateIndexes` | `boolean` | Whether to update indexes on existing collections | + +## Collection Definition + +The `MongoCollectionDefinition` type defines the structure of a collection: + +```typescript +type MongoCollectionDefinition = { + name: string; + options?: CreateCollectionOptions; + indexes?: IndexDescription[]; +}; +``` + +| Property | Type | Description | +| --------- | ------------------------- | --------------------------------------------- | +| `name` | `string` | The name of the collection | +| `options` | `CreateCollectionOptions` | MongoDB collection options (validation, etc.) | +| `indexes` | `IndexDescription[]` | Array of index definitions | + +## Behavior + +The function handles two scenarios: + +### New Collection + +When the collection does not exist: + +1. Creates the collection with the specified options +2. Creates all defined indexes + +### Existing Collection + +When the collection already exists: + +1. Updates collection options using `collMod` +2. If `allowUpdateIndexes` is `true`: + - Creates any new indexes not present in the database + - Drops any indexes not defined in the collection definition (except `_id_`) + +## Index Management + +Indexes are automatically named based on their key fields if no name is provided: + +```typescript +// This index will be named "email_1" +{ key: { email: 1 } } + +// This index will be named "lastName_1_firstName_1" +{ key: { lastName: 1, firstName: 1 } } + +// Explicit name +{ key: { email: 1 }, name: "email_unique_idx", unique: true } +``` + +## Deployment Script + +Create a script to deploy all collections: + +```typescript +import { deployMongoCollection } from "@nimbus/mongodb"; +import { setupLogger, parseLogLevel } from "@nimbus/core"; +import { mongoManager } from "./mongodb.ts"; +import { USERS_COLLECTION } from "./collections/users.ts"; +import { ORDERS_COLLECTION } from "./collections/orders.ts"; + +// Configure logging to see deployment progress +setupLogger({ + logLevel: parseLogLevel("info"), +}); + +const collections = [USERS_COLLECTION, ORDERS_COLLECTION]; + +const deployCollections = async () => { + const client = await mongoManager.getClient(); + const dbName = process.env.MONGO_DB ?? "myDatabase"; + + for (const collection of collections) { + await deployMongoCollection({ + mongoClient: client, + dbName, + collectionDefinition: collection, + allowUpdateIndexes: true, + }); + } + + console.log("All collections deployed successfully"); +}; + +deployCollections().catch(console.error); +``` diff --git a/docs/guide/mongodb/handle-mongo-error.md b/docs/guide/mongodb/handle-mongo-error.md new file mode 100644 index 0000000..ac45176 --- /dev/null +++ b/docs/guide/mongodb/handle-mongo-error.md @@ -0,0 +1,155 @@ +--- +prev: + text: "MongoJSON" + link: "/guide/mongodb/mongo-json" + +next: + text: "Deploy Collection" + link: "/guide/mongodb/deploy-collection" +--- + +# handleMongoError + +The `handleMongoError` function converts MongoDB errors to Nimbus exceptions based on the error code. This provides consistent error handling across your application. + +## Basic Usage + +```typescript +import { handleMongoError } from "@nimbus/mongodb"; + +try { + await collection.insertOne(document); +} catch (error) { + throw handleMongoError(error); +} +``` + +## Error Code Mappings + +| MongoDB Code | Error Type | Nimbus Exception | Details Included | +| ------------ | ------------------- | ----------------------- | ----------------- | +| 121 | Document validation | `InvalidInputException` | `code`, `details` | +| 2 | Bad value | `InvalidInputException` | Error message | +| 11000 | Duplicate key | `InvalidInputException` | `keyValue` | +| Other | Various | `GenericException` | Original error | + +## Error Examples + +### Duplicate Key Error (Code 11000) + +When inserting a document that violates a unique index: + +```typescript +try { + await collection.insertOne({ email: "existing@example.com" }); +} catch (error) { + const exception = handleMongoError(error); + // InvalidInputException with: + // - message: "E11000 duplicate key error..." + // - details: { keyValue: { email: "existing@example.com" } } +} +``` + +### Document Validation Error (Code 121) + +When a document fails schema validation: + +```typescript +try { + await collection.insertOne({ name: 123 }); // name should be string +} catch (error) { + const exception = handleMongoError(error); + // InvalidInputException with: + // - message: "Document failed validation" + // - details: { code: 121, details: { ... validation errors ... } } +} +``` + +### Bad Value Error (Code 2) + +When a query contains invalid values: + +```typescript +try { + await collection.find({ $invalid: true }).toArray(); +} catch (error) { + const exception = handleMongoError(error); + // InvalidInputException with original error message +} +``` + +### Other Errors + +All other MongoDB errors are wrapped in a `GenericException`: + +```typescript +try { + await collection.find({}).toArray(); +} catch (error) { + const exception = handleMongoError(error); + // GenericException with original error stack trace +} +``` + +## Integration with CRUD Functions + +All [CRUD functions](/guide/mongodb/crud) use `handleMongoError` internally: + +```typescript +import { insertOne } from "@nimbus/mongodb"; + +try { + await insertOne({ + collection, + document: { email: "duplicate@example.com" }, + }); +} catch (error) { + // Error is already a Nimbus exception + if (error.name === "INVALID_INPUT") { + // Handle duplicate key or validation error + } +} +``` + +## Custom Error Handling + +You can use `handleMongoError` in your own database operations: + +```typescript +import { handleMongoError } from "@nimbus/mongodb"; +import { GenericException } from "@nimbus/core"; + +const customDatabaseOperation = async (collection: Collection) => { + try { + // Custom MongoDB operation + const result = await collection + .aggregate([ + { $match: { status: "active" } }, + { $group: { _id: "$category", total: { $sum: "$amount" } } }, + ]) + .toArray(); + + return result; + } catch (error) { + throw handleMongoError(error); + } +}; +``` + +## Error Response in API + +When combined with the Hono [error handler](/guide/hono/on-error), MongoDB errors are automatically converted to HTTP responses: + +```typescript +// Duplicate key error becomes: +// HTTP 400 +{ + "error": "INVALID_INPUT", + "message": "E11000 duplicate key error collection: db.users index: email_1 dup key: { email: \"existing@example.com\" }", + "details": { + "keyValue": { + "email": "existing@example.com" + } + } +} +``` diff --git a/docs/guide/mongodb/index.md b/docs/guide/mongodb/index.md index 2971a18..7173de4 100644 --- a/docs/guide/mongodb/index.md +++ b/docs/guide/mongodb/index.md @@ -4,8 +4,8 @@ prev: link: "/guide/hono/on-error" next: - text: "Repository" - link: "/guide/mongodb/repository" + text: "Connection Manager" + link: "/guide/mongodb/connection-manager" --- # Nimbus MongoDB Package diff --git a/docs/guide/mongodb/mongo-json.md b/docs/guide/mongodb/mongo-json.md new file mode 100644 index 0000000..3b0f0a8 --- /dev/null +++ b/docs/guide/mongodb/mongo-json.md @@ -0,0 +1,127 @@ +--- +prev: + text: "CRUD+" + link: "/guide/mongodb/crud" + +next: + text: "handleMongoError" + link: "/guide/mongodb/handle-mongo-error" +--- + +# MongoJSON + +`MongoJSON` provides parse and stringify functions with support for MongoDB data types. It allows you to serialize MongoDB filters and documents as JSON strings while preserving type information. + +## Basic Usage + +```typescript +import { MongoJSON } from "@nimbus/mongodb"; + +// Parse a JSON string with MongoDB type prefixes +const filter = MongoJSON.parse('{"_id": "objectId::507f1f77bcf86cd799439011"}'); +// Result: { _id: ObjectId("507f1f77bcf86cd799439011") } + +// Stringify an object to JSON +const json = MongoJSON.stringify({ name: "John", age: 30 }); +// Result: '{"name":"John","age":30}' +``` + +## Type Prefixes + +The `parse` function recognizes special prefixes to convert strings to MongoDB types: + +| Prefix | Converts To | Example | +| ------------ | ----------- | -------------------------------------- | +| `objectId::` | `ObjectId` | `"objectId::507f1f77bcf86cd799439011"` | +| `date::` | `Date` | `"date::2024-01-15T10:30:00Z"` | +| `int::` | `number` | `"int::42"` | +| `double::` | `number` | `"double::19.99"` | + +## Parse Examples + +```typescript +import { MongoJSON } from "@nimbus/mongodb"; + +// ObjectId conversion +const idFilter = MongoJSON.parse( + '{"_id": "objectId::507f1f77bcf86cd799439011"}' +); +// { _id: ObjectId("507f1f77bcf86cd799439011") } + +// Date conversion +const dateFilter = MongoJSON.parse( + '{"createdAt": {"$gte": "date::2024-01-01T00:00:00Z"}}' +); +// { createdAt: { $gte: Date("2024-01-01T00:00:00Z") } } + +// Integer conversion +const countFilter = MongoJSON.parse('{"count": {"$gt": "int::100"}}'); +// { count: { $gt: 100 } } + +// Multiple types +const complexFilter = MongoJSON.parse(`{ + "_id": "objectId::507f1f77bcf86cd799439011", + "price": {"$lte": "double::49.99"}, + "createdAt": {"$gte": "date::2024-01-01T00:00:00Z"}, + "quantity": {"$gt": "int::0"} +}`); +``` + +## Operator Blacklist + +For security, `MongoJSON.parse` blocks certain MongoDB operators by default: + +```typescript +// This will throw an error +MongoJSON.parse('{"$where": "this.name === \\"admin\\""}'); +// Error: Operator '$where' is not allowed + +// Custom blacklist +MongoJSON.parse(jsonString, ["$where", "$expr"]); +``` + +The default blacklist includes `$where` to prevent code injection attacks. + +## Use Case: API Filters + +`MongoJSON` is useful when accepting MongoDB filters from API requests: + +```typescript +import { MongoJSON } from "@nimbus/mongodb"; + +app.get("/users", async (c) => { + const filterParam = c.req.query("filter"); + + // Parse the filter from query string + const filter = filterParam ? MongoJSON.parse(filterParam) : {}; + + const users = await userRepository.find({ filter }); + return c.json(users); +}); + +// Example request: +// GET /users?filter={"status":"active","createdAt":{"$gte":"date::2024-01-01T00:00:00Z"}} +``` + +## Error Handling + +`MongoJSON.parse` throws an `InvalidInputException` for: + +- Invalid JSON syntax +- Blacklisted operators + +```typescript +import { MongoJSON } from "@nimbus/mongodb"; + +try { + const filter = MongoJSON.parse('{"invalid json}'); +} catch (error) { + // InvalidInputException with JSON parse error details +} + +try { + const filter = MongoJSON.parse('{"$where": "1===1"}'); +} catch (error) { + // Error: Operator '$where' is not allowed +} +``` diff --git a/docs/guide/mongodb/repository.md b/docs/guide/mongodb/repository.md index c669169..30fc0be 100644 --- a/docs/guide/mongodb/repository.md +++ b/docs/guide/mongodb/repository.md @@ -1 +1,260 @@ -# Nimbus MongoDB - Repository +--- +prev: + text: "Connection Manager" + link: "/guide/mongodb/connection-manager" + +next: + text: "CRUD+" + link: "/guide/mongodb/crud" +--- + +# Repository + +The `MongoDBRepository` is a type-safe base class for MongoDB CRUD operations. It provides a consistent interface for interacting with MongoDB collections while handling validation, error conversion, and document mapping. + +## Basic Usage + +Create a repository by extending `MongoDBRepository`: + +```typescript +import { MongoDBRepository } from "@nimbus/mongodb"; +import { z } from "zod"; +import { mongoManager } from "./mongodb.ts"; + +// Define your entity schema +const User = z.object({ + _id: z.string(), + email: z.string().email(), + firstName: z.string(), + lastName: z.string(), + createdAt: z.string(), + updatedAt: z.string(), +}); + +type User = z.infer; + +// Create the repository +class UserRepository extends MongoDBRepository { + constructor() { + super( + () => mongoManager.getCollection("myDatabase", "users"), + User, + "User" + ); + } +} + +export const userRepository = new UserRepository(); +``` + +## Constructor Parameters + +| Parameter | Type | Description | +| --------------- | --------------------------- | ------------------------------------------------- | +| `getCollection` | `() => Promise` | Function that returns a MongoDB collection | +| `entityType` | `ZodType` | Zod schema for validating and typing entities | +| `entityName` | `string` (optional) | Name used in error messages (default: "Document") | + +## Available Methods + +| Method | Parameters | Return Type | Description | +| ---------------- | ------------------------------------------------------ | ----------------- | --------------------------- | +| `findOne` | `{ filter }` | `Promise` | Find a single document | +| `find` | `{ filter, limit?, skip?, sort?, project?, options? }` | `Promise` | Find multiple documents | +| `countDocuments` | `{ filter, options? }` | `Promise` | Count matching documents | +| `insertOne` | `{ item }` | `Promise` | Insert a single document | +| `insertMany` | `{ items, options? }` | `Promise` | Insert multiple documents | +| `replaceOne` | `{ item, options? }` | `Promise` | Replace a document by `_id` | +| `replaceMany` | `{ items, options? }` | `Promise` | Replace multiple documents | +| `deleteOne` | `{ item, options? }` | `Promise` | Delete a document by `_id` | +| `deleteMany` | `{ items, options? }` | `Promise` | Delete multiple documents | + +## Document Mapping + +Override the mapping methods to control how documents are converted between MongoDB format and your entity format: + +```typescript +import { Document, ObjectId } from "mongodb"; + +class UserRepository extends MongoDBRepository { + constructor() { + super( + () => mongoManager.getCollection("myDatabase", "users"), + User, + "User" + ); + } + + // Convert MongoDB document to entity + override _mapDocumentToEntity(doc: Document): User { + return User.parse({ + _id: doc._id.toString(), + email: doc.email, + firstName: doc.firstName, + lastName: doc.lastName, + createdAt: doc.createdAt.toISOString(), + updatedAt: doc.updatedAt.toISOString(), + }); + } + + // Convert entity to MongoDB document + override _mapEntityToDocument(user: User): Document { + return { + _id: new ObjectId(user._id), + email: user.email, + firstName: user.firstName, + lastName: user.lastName, + createdAt: new Date(user.createdAt), + updatedAt: new Date(user.updatedAt), + }; + } +} +``` + +## Query Examples + +### Finding Documents + +```typescript +// Find one by filter +const user = await userRepository.findOne({ + filter: { email: "john@example.com" }, +}); + +// Find multiple with options +const users = await userRepository.find({ + filter: { lastName: "Doe" }, + limit: 10, + skip: 0, + sort: { createdAt: -1 }, +}); + +// Count documents +const count = await userRepository.countDocuments({ + filter: { lastName: "Doe" }, +}); +``` + +### Creating Documents + +```typescript +// Insert one +const newUser = await userRepository.insertOne({ + item: { + _id: new ObjectId().toString(), + email: "jane@example.com", + firstName: "Jane", + lastName: "Doe", + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + }, +}); + +// Insert many +const users = await userRepository.insertMany({ + items: [user1, user2, user3], +}); +``` + +### Updating Documents + +```typescript +// Replace a document (must include _id) +const updatedUser = await userRepository.replaceOne({ + item: { + ...existingUser, + firstName: "Jonathan", + updatedAt: new Date().toISOString(), + }, +}); +``` + +### Deleting Documents + +```typescript +// Delete one +const deletedUser = await userRepository.deleteOne({ + item: user, +}); + +// Delete many +const deletedUsers = await userRepository.deleteMany({ + items: [user1, user2], +}); +``` + +## Error Handling + +The repository automatically throws `NotFoundException` when: + +- `findOne` returns no results +- `replaceOne` matches no documents +- `deleteOne` deletes no documents + +The exception includes entity-specific error codes: + +```typescript +try { + const user = await userRepository.findOne({ + filter: { _id: "nonexistent" }, + }); +} catch (error) { + // NotFoundException with: + // - message: "User not found" + // - details.errorCode: "USER_NOT_FOUND" +} +``` + +## Add Custom Methods + +Just add new methods to the repository class as needed for your use cases. +For example if you need specific access patterns and want the consumer to be able to use them explicitly without having to provide filter logic. + +Also aggregation pipelines can be added to the repository as custom methods. + +::: tip +User the [CRUD+](/guide/mongodb/crud) methods provided by Nimbus to still have observability and error handling features baked in. +::: + +```typescript +import { aggregate, MongoDBRepository } from "@nimbus/mongodb"; + +class UserRepository extends MongoDBRepository { + // ... existing code ... + + // Add custom methods to find a user by email + public async findByEmail(email: string): Promise { + return this.findOne({ filter: { email } }); + } + + // Add custom methods which uses an aggregation pipeline + public async getUserGroups(): Promise { + const collection = await this._getCollection(); + + const result = await aggregate({ + collection, + aggregation: [ + { + $group: { + _id: "$group", + users: { $push: "$$ROOT" }, + }, + }, + ], + mapDocument: (doc: Document) => { + return { + name: doc._id, + users: doc.users.map((user: Document) => + this._mapDocumentToEntity(user) + ), + }; + }, + outputType: UserGroup, + }); + + return result; + } +} + +export const userRepository = new UserRepository(); +``` diff --git a/docs/guide/utils/get-env.md b/docs/guide/utils/get-env.md index dcdfe7e..830ad2f 100644 --- a/docs/guide/utils/get-env.md +++ b/docs/guide/utils/get-env.md @@ -1 +1,133 @@ -# getEnv() +--- +prev: + text: "Nimbus Utils" + link: "/guide/utils" + +next: false +--- + +# getEnv + +The `getEnv` function retrieves environment variables with validation. It throws an exception if any requested variables are missing, ensuring your application fails fast with clear error messages. + +## Basic Usage + +```typescript +import { getEnv } from "@nimbus/utils"; + +const env = getEnv({ + variables: ["DATABASE_URL", "API_KEY", "PORT"], +}); + +console.log(env.PORT); +``` + +## Function Signature + +```typescript +getEnv({ variables: string[] }): Record +``` + +| Parameter | Type | Description | +| ----------- | ---------- | ----------------------------------- | +| `variables` | `string[]` | Array of environment variable names | + +| Returns | Description | +| ------------------------ | --------------------------------------------- | +| `Record` | Object with variable names as keys and values | + +## Error Handling + +If any requested variables are undefined, `getEnv` throws a `GenericException` with details about all missing variables: + +```typescript +import { getEnv } from "@nimbus/utils"; + +try { + const env = getEnv({ + variables: ["MISSING_VAR_1", "MISSING_VAR_2"], + }); +} catch (error) { + // GenericException with: + // - message: "Undefined environment variables" + // - details: { undefinedVariables: ["MISSING_VAR_1", "MISSING_VAR_2"] } +} +``` + +The error is also logged before throwing: + +``` +[Nimbus] ERROR :: Undefined environment variables +{ undefinedVariables: ["MISSING_VAR_1", "MISSING_VAR_2"] } +``` + +## Use Cases + +### Application Configuration + +```typescript +import { getEnv } from "@nimbus/utils"; + +const env = getEnv({ + variables: ["NODE_ENV", "PORT", "DATABASE_URL", "REDIS_URL", "JWT_SECRET"], +}); + +export const config = { + nodeEnv: env.NODE_ENV, + port: parseInt(env.PORT, 10), + databaseUrl: env.DATABASE_URL, + redisUrl: env.REDIS_URL, + jwtSecret: env.JWT_SECRET, +}; +``` + +### MongoDB Connection + +```typescript +import { MongoConnectionManager } from "@nimbus/mongodb"; +import { getEnv } from "@nimbus/utils"; + +const env = getEnv({ + variables: ["MONGO_URL", "MONGO_DB"], +}); + +const mongoManager = MongoConnectionManager.getInstance(env.MONGO_URL, { + mongoClientOptions: { appName: "my-app" }, +}); + +export const getCollection = (name: string) => + mongoManager.getCollection(env.MONGO_DB, name); +``` + +### Repository Configuration + +```typescript +import { MongoDBRepository } from "@nimbus/mongodb"; +import { getEnv } from "@nimbus/utils"; +import { mongoManager } from "./mongodb.ts"; + +class UserRepository extends MongoDBRepository { + constructor() { + const env = getEnv({ variables: ["MONGO_DB"] }); + + super( + () => mongoManager.getCollection(env.MONGO_DB, "users"), + UserSchema, + "User" + ); + } +} +``` + +### External Service Configuration + +```typescript +import { getEnv } from "@nimbus/utils"; + +const env = getEnv({ + variables: ["STRIPE_API_KEY", "STRIPE_WEBHOOK_SECRET"], +}); + +export const stripeClient = new Stripe(env.STRIPE_API_KEY); +export const webhookSecret = env.STRIPE_WEBHOOK_SECRET; +``` diff --git a/docs/guide/utils/index.md b/docs/guide/utils/index.md index 6ebe4cb..eae9871 100644 --- a/docs/guide/utils/index.md +++ b/docs/guide/utils/index.md @@ -1,7 +1,7 @@ --- prev: - text: "CRUD+" - link: "/guide/mongodb/crud" + text: "handleMongoError" + link: "/guide/mongodb/handle-mongo-error" next: text: "getEnv" diff --git a/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts b/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts index 0219a23..4c01f24 100644 --- a/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts +++ b/examples/hono-demo/src/iam/users/core/commands/addUser.command.ts @@ -9,6 +9,7 @@ export const addUserInputSchema = z.object({ email: z.email(), firstName: z.string(), lastName: z.string(), + group: z.string(), }); export const addUserCommandSchema = commandSchema.extend({ @@ -33,6 +34,7 @@ export const addUser = ( email: email, firstName: command.data.firstName, lastName: command.data.lastName, + group: command.data.group, createdAt: new Date().toISOString(), updatedAt: new Date().toISOString(), }; diff --git a/examples/hono-demo/src/iam/users/core/domain/user.ts b/examples/hono-demo/src/iam/users/core/domain/user.ts index f064353..bfb8f2c 100644 --- a/examples/hono-demo/src/iam/users/core/domain/user.ts +++ b/examples/hono-demo/src/iam/users/core/domain/user.ts @@ -3,6 +3,7 @@ import { z } from 'zod'; export const User = z.object({ _id: z.string().length(24), email: z.email(), + group: z.string(), firstName: z.string(), lastName: z.string(), createdAt: z.iso.datetime(), diff --git a/examples/hono-demo/src/iam/users/core/domain/userGroup.ts b/examples/hono-demo/src/iam/users/core/domain/userGroup.ts new file mode 100644 index 0000000..5160298 --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/domain/userGroup.ts @@ -0,0 +1,9 @@ +import { z } from 'zod'; +import { User } from './user.ts'; + +export const UserGroup = z.object({ + name: z.string(), + users: z.array(User), +}); + +export type UserGroup = z.infer; diff --git a/examples/hono-demo/src/iam/users/core/queries/getUserGroups.ts b/examples/hono-demo/src/iam/users/core/queries/getUserGroups.ts new file mode 100644 index 0000000..31bddb4 --- /dev/null +++ b/examples/hono-demo/src/iam/users/core/queries/getUserGroups.ts @@ -0,0 +1,10 @@ +import { querySchema } from '@nimbus/core'; +import { z } from 'zod'; + +export const GET_USER_GROUPS_QUERY_TYPE = 'at.overlap.nimbus.get-user-groups'; + +export const getUserGroupsQuerySchema = querySchema.extend({ + type: z.literal(GET_USER_GROUPS_QUERY_TYPE), + data: z.object({}), +}); +export type GetUserGroupsQuery = z.infer; diff --git a/examples/hono-demo/src/iam/users/shell/http/router.ts b/examples/hono-demo/src/iam/users/shell/http/router.ts index 8e50c72..874561c 100644 --- a/examples/hono-demo/src/iam/users/shell/http/router.ts +++ b/examples/hono-demo/src/iam/users/shell/http/router.ts @@ -9,6 +9,10 @@ import { GET_USER_QUERY_TYPE, GetUserQuery, } from '../../core/queries/getUser.query.ts'; +import { + GET_USER_GROUPS_QUERY_TYPE, + GetUserGroupsQuery, +} from '../../core/queries/getUserGroups.ts'; const usersRouter = new Hono(); @@ -31,6 +35,24 @@ usersRouter.post( }, ); +usersRouter.get( + '/groups', + async (c) => { + const correlationId = getCorrelationId(c); + + const query = createQuery({ + type: GET_USER_GROUPS_QUERY_TYPE, + source: 'nimbus.overlap.at', + correlationid: correlationId, + data: {}, + }); + + const result = await getRouter('default').route(query); + + return c.json(result); + }, +); + usersRouter.get( '/:id', async (c) => { diff --git a/examples/hono-demo/src/iam/users/shell/messages/queries/getUserGroups.query.ts b/examples/hono-demo/src/iam/users/shell/messages/queries/getUserGroups.query.ts new file mode 100644 index 0000000..34e819a --- /dev/null +++ b/examples/hono-demo/src/iam/users/shell/messages/queries/getUserGroups.query.ts @@ -0,0 +1,8 @@ +import { GetUserGroupsQuery } from '../../../core/queries/getUserGroups.ts'; +import { userRepository } from '../../mongodb/user.repository.ts'; + +export const getUserGroupsQueryHandler = async (query: GetUserGroupsQuery) => { + const result = await userRepository.getUserGroups(); + + return result; +}; diff --git a/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts index 5367481..a5fdeea 100644 --- a/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts +++ b/examples/hono-demo/src/iam/users/shell/messages/registerUserMessages.ts @@ -8,9 +8,14 @@ import { GET_USER_QUERY_TYPE, getUserQuerySchema, } from '../../core/queries/getUser.query.ts'; +import { + GET_USER_GROUPS_QUERY_TYPE, + getUserGroupsQuerySchema, +} from '../../core/queries/getUserGroups.ts'; import { addUserCommandHandler } from './commands/addUser.command.ts'; import { userAddedEventHandler } from './events/userAdded.event.ts'; import { getUserQueryHandler } from './queries/getUser.query.ts'; +import { getUserGroupsQueryHandler } from './queries/getUserGroups.query.ts'; export const registerUserMessages = () => { const eventBus = getEventBus('default'); @@ -32,4 +37,9 @@ export const registerUserMessages = () => { getUserQueryHandler, getUserQuerySchema, ); + router.register( + GET_USER_GROUPS_QUERY_TYPE, + getUserGroupsQueryHandler, + getUserGroupsQuerySchema, + ); }; diff --git a/examples/hono-demo/src/iam/users/shell/mongodb/user.collection.ts b/examples/hono-demo/src/iam/users/shell/mongodb/user.collection.ts index 7206616..516c396 100644 --- a/examples/hono-demo/src/iam/users/shell/mongodb/user.collection.ts +++ b/examples/hono-demo/src/iam/users/shell/mongodb/user.collection.ts @@ -10,6 +10,7 @@ export const USERS_COLLECTION: MongoCollectionDefinition = { 'email', 'firstName', 'lastName', + 'group', 'createdAt', 'updatedAt', ], @@ -23,6 +24,9 @@ export const USERS_COLLECTION: MongoCollectionDefinition = { lastName: { bsonType: 'string', }, + group: { + bsonType: 'string', + }, createdAt: { bsonType: 'date', }, @@ -37,6 +41,7 @@ export const USERS_COLLECTION: MongoCollectionDefinition = { { key: { email: 1 }, unique: true }, { key: { firstName: 1 } }, { key: { lastName: 1 } }, + { key: { group: 1 } }, { key: { createdAt: 1 } }, { key: { updatedAt: 1 } }, ], diff --git a/examples/hono-demo/src/iam/users/shell/mongodb/user.repository.ts b/examples/hono-demo/src/iam/users/shell/mongodb/user.repository.ts index e972e3f..fc1c3bd 100644 --- a/examples/hono-demo/src/iam/users/shell/mongodb/user.repository.ts +++ b/examples/hono-demo/src/iam/users/shell/mongodb/user.repository.ts @@ -1,8 +1,9 @@ -import { MongoDBRepository } from '@nimbus/mongodb'; +import { aggregate, MongoDBRepository } from '@nimbus/mongodb'; import { getEnv } from '@nimbus/utils'; import { Document, ObjectId } from 'mongodb'; import { mongoManager } from '../../../../shared/shell/mongodb.ts'; import { User } from '../../core/domain/user.ts'; +import { UserGroup } from '../../core/domain/userGroup.ts'; import { USERS_COLLECTION } from './user.collection.ts'; class UserRepository extends MongoDBRepository { @@ -27,6 +28,7 @@ class UserRepository extends MongoDBRepository { email: doc.email, firstName: doc.firstName, lastName: doc.lastName, + group: doc.group, createdAt: doc.createdAt.toISOString(), updatedAt: doc.updatedAt.toISOString(), }); @@ -38,10 +40,38 @@ class UserRepository extends MongoDBRepository { email: user.email, firstName: user.firstName, lastName: user.lastName, + group: user.group, createdAt: new Date(user.createdAt), updatedAt: new Date(user.updatedAt), }; } + + public async getUserGroups(): Promise { + const collection = await this._getCollection(); + + const result = await aggregate({ + collection, + aggregation: [ + { + $group: { + _id: '$group', + users: { $push: '$$ROOT' }, + }, + }, + ], + mapDocument: (doc: Document) => { + return { + name: doc._id, + users: doc.users.map((user: Document) => + this._mapDocumentToEntity(user) + ), + }; + }, + outputType: UserGroup, + }); + + return result; + } } export const userRepository = new UserRepository(); diff --git a/packages/hono/src/lib/middleware/logger.ts b/packages/hono/src/lib/middleware/logger.ts index 182ae86..2581119 100644 --- a/packages/hono/src/lib/middleware/logger.ts +++ b/packages/hono/src/lib/middleware/logger.ts @@ -62,6 +62,7 @@ const time = (start: number) => { * ``` */ export const logger = (options?: LoggerOptions): MiddlewareHandler => { + const enableTracing = options?.enableTracing ?? true; const tracerName = options?.tracerName ?? 'nimbus'; const tracer = trace.getTracer(tracerName); @@ -75,7 +76,7 @@ export const logger = (options?: LoggerOptions): MiddlewareHandler => { correlationId, }); - if (options?.enableTracing) { + if (enableTracing) { // Extract trace context from incoming headers (traceparent, tracestate) const parentContext = propagation.extract( context.active(), diff --git a/packages/mongodb/src/index.ts b/packages/mongodb/src/index.ts index 47a50b5..de221a9 100644 --- a/packages/mongodb/src/index.ts +++ b/packages/mongodb/src/index.ts @@ -1,3 +1,4 @@ +export * from './lib/crud/aggregate.ts'; export * from './lib/crud/bulkWrite.ts'; export * from './lib/crud/countDocuments.ts'; export * from './lib/crud/deleteMany.ts'; diff --git a/packages/mongodb/src/lib/crud/aggregate.ts b/packages/mongodb/src/lib/crud/aggregate.ts index c6d502c..60b4ace 100644 --- a/packages/mongodb/src/lib/crud/aggregate.ts +++ b/packages/mongodb/src/lib/crud/aggregate.ts @@ -35,14 +35,14 @@ export type Aggregate = ( * * @returns {Promise} The aggregated documents. */ -export const aggregate: Aggregate = ({ +export const aggregate: Aggregate = ({ collection, aggregation, mapDocument, outputType, options, -}) => { - return withSpan('aggregate', collection, async () => { +}: AggregateInput) => { + return withSpan('aggregate', collection, async () => { let res: Document[] = []; try { @@ -53,7 +53,7 @@ export const aggregate: Aggregate = ({ } try { - return res.map((item) => outputType.parse(mapDocument(item))); + return res.map((item) => outputType.parse(mapDocument(item)) as TData); } catch (error) { const exception = error instanceof Error ? new GenericException().fromError(error) From 96c2898ac67943541e8d9ea5c55a715c4673172f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 13:15:47 +0100 Subject: [PATCH 26/37] complete docs --- docs/guide/core/index.md | 8 ++--- docs/guide/what-is-nimbus.md | 60 +++++++++++++++++++++++++++++++++--- 2 files changed, 59 insertions(+), 9 deletions(-) diff --git a/docs/guide/core/index.md b/docs/guide/core/index.md index 9ec8846..c1efa02 100644 --- a/docs/guide/core/index.md +++ b/docs/guide/core/index.md @@ -1,11 +1,11 @@ --- prev: - text: "Project Structure" - link: "/guide/project-structure" + text: "Quickstart" + link: "/guide/quickstart" next: text: "Observability" - link: "/guide/core/observability" + link: "/guide/observability" --- # Nimbus Core Package @@ -24,7 +24,7 @@ You can find the full example on GitHub: [hono-demo](https://github.com/overlap- The core package provides: -- **[Observability](/guide/core/observability)** - Built-in logging, tracing, and metrics using OpenTelemetry +- **[Observability](/guide/observability)** - Built-in logging, tracing, and metrics using OpenTelemetry - **[Commands](/guide/core/commands)** - Write operations following the CloudEvents specification - **[Queries](/guide/core/queries)** - Read operations for fetching data - **[Events](/guide/core/events)** - Domain events for reactive architectures diff --git a/docs/guide/what-is-nimbus.md b/docs/guide/what-is-nimbus.md index 51e020c..26080d9 100644 --- a/docs/guide/what-is-nimbus.md +++ b/docs/guide/what-is-nimbus.md @@ -1,20 +1,20 @@ # What is Nimbus? -Yet another framework to build software ... +Nimbus is a lightweight TypeScript framework for building event-driven applications. It provides type-safe messaging patterns (Commands, Queries, Events) following the [CloudEvents](https://cloudevents.io/) specification, with built-in observability powered by [OpenTelemetry](https://opentelemetry.io/). ## Philosophy -... with some concepts to think things differently. +Nimbus is built on a few core principles that set it apart from other TypeScript frameworks. -### Simplicity +**Simplicity** Nimbus aims to keep things simple and to avoid overly complex OOP or FP principles. No complex inheritance hierarchies, no dependency injection, no decorators. Just explicit code that is easy to understand and reason about. -### No Framework Magic +**No Framework Magic** Three lines of code to build a whole API is great, until something goes wrong and you have no clue why the magic stopped working. -### Flat and easy learning curve +**Flat and easy learning curve** There are already great Frameworks like [NestJS](https://nestjs.com/) and [Effect](https://effect.website/) out there for building TypeScript applications. @@ -22,6 +22,56 @@ While those frameworks heavily emphasize either object-oriented or functional pr Be productive right from the start. +## Who Is This For? + +Nimbus is a good fit if you are: + +- Building event-driven applications +- Looking for explicit, traceable code without hidden magic +- Wanting built-in observability without complex setup +- Preferring a lightweight framework over heavyweight solutions + +## Key Features + +- **CloudEvents-based messaging** - Commands, Queries, and Events following the industry-standard [CloudEvents](https://cloudevents.io/) specification +- **Built-in observability** - Logging, tracing, and metrics via [OpenTelemetry](https://opentelemetry.io/) with zero boilerplate +- **Type-safe validation** - Message validation with [Zod](https://zod.dev/) schemas +- **MongoDB integration** - Repository pattern and CRUD operations with automatic tracing +- **Hono middleware** - Ready-to-use middleware for HTTP APIs +- **Runtime flexibility** - Deno-first with NPM and Bun support + +## A Taste of Nimbus + +Here's a quick look at how you define and handle a command in Nimbus: + +```typescript +import { commandSchema, createCommand, getRouter } from "@nimbus/core"; +import { z } from "zod"; + +// Define a type-safe command schema +const addUserCommandSchema = commandSchema.extend({ + type: z.literal("com.example.add-user"), + data: z.object({ + email: z.string().email(), + name: z.string(), + }), +}); + +type AddUserCommand = z.infer; + +// Register a handler with automatic validation and tracing +const router = getRouter("MyRouter"); + +router.register( + "com.example.add-user", + async (command: AddUserCommand) => { + // Your business logic here + return { userId: "123", email: command.data.email }; + }, + addUserCommandSchema +); +``` + ## Architecture Recommendation It would be valuable to build your application around the idea of a **Pure Core** and an **Imperative Shell**. It aligns well with Hexagonal Architecture (Ports & Adapters) and is a good foundation for patterns like CQRS and Event Sourcing. From c6ccb336f084f63063745bb32bb460d861d8c89c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 13:49:43 +0100 Subject: [PATCH 27/37] removes oak package --- deno.json | 27 ++- packages/oak/README.md | 14 -- packages/oak/deno.json | 39 ---- packages/oak/src/index.ts | 1 - packages/oak/src/lib/router.ts | 325 --------------------------------- 5 files changed, 25 insertions(+), 381 deletions(-) delete mode 100644 packages/oak/README.md delete mode 100644 packages/oak/deno.json delete mode 100644 packages/oak/src/index.ts delete mode 100644 packages/oak/src/lib/router.ts diff --git a/deno.json b/deno.json index 003be18..cf24954 100644 --- a/deno.json +++ b/deno.json @@ -5,10 +5,33 @@ "workspace": [ "./packages/core", "./packages/mongodb", - "./packages/oak", "./packages/hono", "./packages/utils", "./examples/hono-demo" ], - "nodeModulesDir": "none" + "nodeModulesDir": "none", + "fmt": { + "include": [ + "examples/", + "packages/" + ] + }, + "lint": { + "include": [ + "examples/", + "packages/" + ] + }, + "check": { + "include": [ + "examples/", + "packages/" + ] + }, + "test": { + "include": [ + "examples/", + "packages/" + ] + } } \ No newline at end of file diff --git a/packages/oak/README.md b/packages/oak/README.md deleted file mode 100644 index 1d7dfc6..0000000 --- a/packages/oak/README.md +++ /dev/null @@ -1,14 +0,0 @@ -Nimbus - -# Nimbus Oak - -The Oak package of the Nimbus framework. - -Refer to the [Nimbus main repository](https://github.com/overlap-dev/Nimbus) or the [Nimbus documentation](https://nimbus.overlap.at) for more information about the Nimbus framework. - -# License - -The MIT License (MIT) diff --git a/packages/oak/deno.json b/packages/oak/deno.json deleted file mode 100644 index 712d36d..0000000 --- a/packages/oak/deno.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "@nimbus/oak", - "version": "0.17.0", - "license": "MIT", - "author": "Daniel Gördes (https://overlap.at)", - "repository": { - "type": "git", - "url": "https://github.com/overlap-dev/Nimbus" - }, - "bugs": { - "url": "https://github.com/overlap-dev/Nimbus/issues" - }, - "homepage": "https://nimbus.overlap.at", - "exports": "./src/index.ts", - "fmt": { - "include": ["src/"], - "useTabs": false, - "lineWidth": 80, - "indentWidth": 4, - "semiColons": true, - "singleQuote": true, - "proseWrap": "always" - }, - "lint": { - "include": ["src/"], - "rules": { - "exclude": ["no-explicit-any"] - } - }, - "test": { - "include": ["src/"] - }, - "imports": { - "@std/ulid": "jsr:@std/ulid@^1.0.0", - "@oak/oak": "jsr:@oak/oak@^17.1.6", - "ajv": "npm:ajv@^8.17.1", - "zod": "npm:zod@^3.24.1" - } -} diff --git a/packages/oak/src/index.ts b/packages/oak/src/index.ts deleted file mode 100644 index 02c4506..0000000 --- a/packages/oak/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './lib/router.ts'; diff --git a/packages/oak/src/lib/router.ts b/packages/oak/src/lib/router.ts deleted file mode 100644 index 2e67601..0000000 --- a/packages/oak/src/lib/router.ts +++ /dev/null @@ -1,325 +0,0 @@ -import { - getLogger, - type MessageRouter, -} from '@nimbus/core'; -import type { Context } from '@oak/oak/context'; -import { - Router as OakRouter, - type RouterContext, - type RouterOptions, -} from '@oak/oak/router'; -import { ulid } from '@std/ulid'; - -/** - * Options for handling errors in Oak routes. - */ -export type OakErrorHandlerOptions = { - onError?: (error: any, ctx: Context) => void; -}; - -/** - * Function to extract data from Oak RouterContext for a message. - */ -export type DataExtractor = (ctx: RouterContext) => TData | Promise; - -/** - * Options for registering a command route. - */ -export type CommandRouteOptions = { - path: string; - messageType: string; - router: MessageRouter; - extractData?: DataExtractor; - dataschema?: string; - onError?: (error: any, ctx: Context) => void; -}; - -/** - * Options for registering a query route. - */ -export type QueryRouteOptions = { - path: string; - messageType: string; - router: MessageRouter; - extractData: DataExtractor; - dataschema?: string; - onError?: (error: any, ctx: Context) => void; -}; - -/** - * Default error handler for Oak routes that maps Nimbus exceptions to HTTP responses. - */ -export function handleOakError( - error: any, - ctx: Context, - customHandler?: (error: any, ctx: Context) => void, -): void { - if (customHandler) { - customHandler(error, ctx); - return; - } - - getLogger().error({ - category: 'Nimbus', - message: error.message, - error, - }); - - const statusCode = error.statusCode ?? 500; - ctx.response.status = statusCode; - - if (statusCode < 500) { - ctx.response.body = { - statusCode, - ...(error.details ? { code: error.name } : {}), - ...(error.message ? { message: error.message } : {}), - ...(error.details ? { details: error.details } : {}), - }; - } else { - ctx.response.body = { - message: 'Internal server error', - }; - } -} - -/** - * The NimbusOakRouter extends Oak Router to provide convenient methods - * for routing HTTP requests to Nimbus MessageRouter handlers. - * - * It automatically constructs CloudEvents message envelopes from HTTP requests - * and handles response mapping. - * - * @example - * ```ts - * import { NimbusOakRouter } from "@nimbus/oak"; - * import { MessageRouter } from "@nimbus/core"; - * - * const queryRouter = new MessageRouter('query'); - * queryRouter.register('at.overlap.nimbus.get-recipe', getRecipeHandler); - * - * const commandRouter = new MessageRouter('command'); - * commandRouter.register('at.overlap.nimbus.add-recipe', addRecipeHandler); - * - * const httpRouter = new NimbusOakRouter(); - * - * // Query route (GET) - * httpRouter.query({ - * path: '/recipes/:slug', - * messageType: 'at.overlap.nimbus.get-recipe', - * router: queryRouter, - * extractData: (ctx) => ({ slug: ctx.params.slug }), - * }); - * - * // Command route (POST) - * httpRouter.command({ - * path: '/recipes', - * messageType: 'at.overlap.nimbus.add-recipe', - * router: commandRouter, - * }); - * ``` - */ -export class NimbusOakRouter extends OakRouter { - constructor(opts: RouterOptions = {}) { - super(opts); - } - - /** - * Register a GET route that maps to a query message. - * - * Automatically constructs a Query CloudEvents message from the HTTP request - * and routes it through the provided MessageRouter. - * - * @param options - Route configuration - * - * @example - * ```ts - * router.query({ - * path: '/recipes/:slug', - * messageType: 'at.overlap.nimbus.get-recipe', - * router: queryRouter, - * extractData: (ctx) => ({ slug: ctx.params.slug }), - * }); - * ``` - */ - query(options: QueryRouteOptions): void { - super.get(options.path, async (ctx: RouterContext) => { - try { - const data = await options.extractData(ctx); - - const query = { - specversion: '1.0' as const, - id: ulid(), - correlationid: ctx.state.correlationId ?? ulid(), - time: new Date().toISOString(), - source: ctx.request.url.origin, - type: options.messageType, - data, - datacontenttype: 'application/json' as const, - ...(options.dataschema && { dataschema: options.dataschema }), - }; - - const result = await options.router.route(query); - - ctx.response.status = 200; - ctx.response.body = result as any; - } catch (error: any) { - handleOakError(error, ctx, options.onError); - } - }); - } - - /** - * Register a POST route that maps to a command message. - * - * Automatically constructs a Command CloudEvents message from the HTTP request - * and routes it through the provided MessageRouter. - * - * By default, extracts data from the request body as JSON. - * - * @param options - Route configuration - * - * @example - * ```ts - * // Using default body extraction - * router.command({ - * path: '/recipes', - * messageType: 'at.overlap.nimbus.add-recipe', - * router: commandRouter, - * }); - * - * // Custom data extraction - * router.command({ - * path: '/recipes/:slug', - * messageType: 'at.overlap.nimbus.update-recipe', - * router: commandRouter, - * extractData: async (ctx) => ({ - * slug: ctx.params.slug, - * ...await ctx.request.body.json() - * }), - * }); - * ``` - */ - command(options: CommandRouteOptions): void { - const extractData = options.extractData ?? - (async (ctx: RouterContext) => await ctx.request.body.json()); - - super.post(options.path, async (ctx: RouterContext) => { - try { - const data = await extractData(ctx); - - const command = { - specversion: '1.0' as const, - id: ulid(), - correlationid: ctx.state.correlationId ?? ulid(), - time: new Date().toISOString(), - source: ctx.request.url.origin, - type: options.messageType, - data, - datacontenttype: 'application/json' as const, - ...(options.dataschema && { dataschema: options.dataschema }), - }; - - const result = await options.router.route(command); - - ctx.response.status = 201; - ctx.response.body = result as any; - } catch (error: any) { - handleOakError(error, ctx, options.onError); - } - }); - } - - /** - * Register a PUT route that maps to a command message. - * - * Similar to command() but uses PUT method (for updates/replacements). - * - * @param options - Route configuration - * - * @example - * ```ts - * router.commandPut({ - * path: '/recipes/:slug', - * messageType: 'at.overlap.nimbus.update-recipe', - * router: commandRouter, - * extractData: async (ctx) => ({ - * slug: ctx.params.slug, - * ...await ctx.request.body.json() - * }), - * }); - * ``` - */ - commandPut(options: CommandRouteOptions): void { - const extractData = options.extractData ?? - (async (ctx: RouterContext) => await ctx.request.body.json()); - - super.put(options.path, async (ctx: RouterContext) => { - try { - const data = await extractData(ctx); - - const command = { - specversion: '1.0' as const, - id: ulid(), - correlationid: ctx.state.correlationId ?? ulid(), - time: new Date().toISOString(), - source: ctx.request.url.origin, - type: options.messageType, - data, - datacontenttype: 'application/json' as const, - ...(options.dataschema && { dataschema: options.dataschema }), - }; - - const result = await options.router.route(command); - - ctx.response.status = 200; - ctx.response.body = result as any; - } catch (error: any) { - handleOakError(error, ctx, options.onError); - } - }); - } - - /** - * Register a DELETE route that maps to a command message. - * - * @param options - Route configuration - * - * @example - * ```ts - * router.commandDelete({ - * path: '/recipes/:slug', - * messageType: 'at.overlap.nimbus.delete-recipe', - * router: commandRouter, - * extractData: (ctx) => ({ slug: ctx.params.slug }), - * }); - * ``` - */ - commandDelete(options: CommandRouteOptions): void { - const extractData = options.extractData ?? - ((ctx: RouterContext) => ({ id: ctx.params.id })); - - super.delete(options.path, async (ctx: RouterContext) => { - try { - const data = await extractData(ctx); - - const command = { - specversion: '1.0' as const, - id: ulid(), - correlationid: ctx.state.correlationId ?? ulid(), - time: new Date().toISOString(), - source: ctx.request.url.origin, - type: options.messageType, - data, - datacontenttype: 'application/json' as const, - ...(options.dataschema && { dataschema: options.dataschema }), - }; - - await options.router.route(command); - - ctx.response.status = 204; - } catch (error: any) { - handleOakError(error, ctx, options.onError); - } - }); - } -} From 60a8e08427ae719584f5b7425135e8e84f200407 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 13:52:38 +0100 Subject: [PATCH 28/37] fixes formatting --- .github/workflows/checks.yaml | 46 ++++--------------- .github/workflows/publish.yaml | 8 ++-- .zed/settings.json | 24 +++++----- README.md | 13 ++++-- RELEASE.md | 12 ++--- .../messages/queries/getUserGroups.query.ts | 3 +- packages/core/deno.json | 3 +- packages/mongodb/src/lib/crud/aggregate.ts | 4 +- .../mongodb/src/lib/crud/findOneAndUpdate.ts | 6 ++- 9 files changed, 48 insertions(+), 71 deletions(-) diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml index 91e476a..e408d0e 100644 --- a/.github/workflows/checks.yaml +++ b/.github/workflows/checks.yaml @@ -12,7 +12,7 @@ concurrency: jobs: deploy: - name: Build & Deploy + name: Format, Lint, Type Check & Test runs-on: ubuntu-latest env: DENO_DIR: deno_cache_dir @@ -28,46 +28,16 @@ jobs: - uses: denoland/setup-deno@v2 with: - deno-version: v2.3.x + deno-version: v2 - # Check if the code is formatted correctly. - - name: Check formatting (core) + - name: Check formatting run: deno fmt --check - working-directory: ./packages/core - - name: Check formatting (mongodb) - run: deno fmt --check - working-directory: ./packages/mongodb - - name: Check formatting (oak) - run: deno fmt --check - working-directory: ./packages/oak - - name: Check formatting (utils) - run: deno fmt --check - working-directory: ./packages/utils - # Scan the code for syntax errors and style issues. - - name: Lint (core) - run: deno lint - working-directory: ./packages/core - - name: Lint (mongodb) - run: deno lint - working-directory: ./packages/mongodb - - name: Lint (oak) + - name: Lint run: deno lint - working-directory: ./packages/oak - - name: Lint (utils) - run: deno lint - working-directory: ./packages/utils - # Run all test files in the repository and collect code coverage. - - name: Test (core) - run: deno test --allow-all --permit-no-files - working-directory: ./packages/core - - name: Test (mongodb) - run: deno test --allow-all --permit-no-files - working-directory: ./packages/mongodb - - name: Test (oak) - run: deno test --allow-all --permit-no-files - working-directory: ./packages/oak - - name: Test (utils) + - name: Type check + run: deno check + + - name: Test run: deno test --allow-all --permit-no-files - working-directory: ./packages/utils diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index e1f0c92..2895c49 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -17,18 +17,18 @@ jobs: - uses: denoland/setup-deno@v2 with: - deno-version: v2.3.x + deno-version: v2 - name: Publish core package working-directory: ./packages/core - run: deno publish --allow-slow-types + run: deno publish - name: Publish mongodb package working-directory: ./packages/mongodb run: deno publish - - name: Publish oak package - working-directory: ./packages/oak + - name: Publish hono package + working-directory: ./packages/hono run: deno publish - name: Publish utils package diff --git a/.zed/settings.json b/.zed/settings.json index 1ac2386..ed5f873 100644 --- a/.zed/settings.json +++ b/.zed/settings.json @@ -3,10 +3,10 @@ "deno": { "settings": { "deno": { - "enable": true, - }, - }, - }, + "enable": true + } + } + } }, "languages": { "JavaScript": { @@ -14,27 +14,27 @@ "deno", "!typescript-language-server", "!vtsls", - "!eslint", + "!eslint" ], - "formatter": "language_server", + "formatter": "language_server" }, "TypeScript": { "language_servers": [ "deno", "!typescript-language-server", "!vtsls", - "!eslint", + "!eslint" ], - "formatter": "language_server", + "formatter": "language_server" }, "TSX": { "language_servers": [ "deno", "!typescript-language-server", "!vtsls", - "!eslint", + "!eslint" ], - "formatter": "language_server", - }, - }, + "formatter": "language_server" + } + } } diff --git a/README.md b/README.md index d37f31c..2512c27 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,20 @@ -Nimbus # Installation -Find all packages and installation instructions at [jsr.io/@nimbus](https://jsr.io/@nimbus). +Find all packages and installation instructions at +[jsr.io/@nimbus](https://jsr.io/@nimbus). # Documentation -Find the full documentation at [https://nimbus.overlap.at/](https://nimbus.overlap.at/). +Find the full documentation at +[https://nimbus.overlap.at/](https://nimbus.overlap.at/). -In addition code documentation is automatically generated and can be found for each package at [jsr.io/@nimbus](https://jsr.io/@nimbus). +In addition code documentation is automatically generated and can be found for +each package at [jsr.io/@nimbus](https://jsr.io/@nimbus). # License diff --git a/RELEASE.md b/RELEASE.md index da82d5e..3ecb5ee 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,6 +1,8 @@ # How to release a new version of the packages -For each package make sure the version in the `packages//deno.json` is set correctly and stick to semantic versioning. +For each package make sure the version in the +`packages//deno.json` is set correctly and stick to semantic +versioning. Once everything is ready make a new commit with a message of this type: @@ -8,14 +10,12 @@ Once everything is ready make a new commit with a message of this type: chore: release 0.0.0 ``` -Push to `main` and create a new release on GitHub from there on the GitHub workflow will take care of the rest. +Push to `main` and create a new release on GitHub from there on the GitHub +workflow will take care of the rest. ## Manually publish to JSR ``` cd packages/ -deno publish --allow-slow-types +deno publish ``` - -**Slow Types** -Because of some Zod inferred types, the `--allow-slow-types` flag is required to publish the package to JSR. diff --git a/examples/hono-demo/src/iam/users/shell/messages/queries/getUserGroups.query.ts b/examples/hono-demo/src/iam/users/shell/messages/queries/getUserGroups.query.ts index 34e819a..2844f3c 100644 --- a/examples/hono-demo/src/iam/users/shell/messages/queries/getUserGroups.query.ts +++ b/examples/hono-demo/src/iam/users/shell/messages/queries/getUserGroups.query.ts @@ -1,7 +1,6 @@ -import { GetUserGroupsQuery } from '../../../core/queries/getUserGroups.ts'; import { userRepository } from '../../mongodb/user.repository.ts'; -export const getUserGroupsQueryHandler = async (query: GetUserGroupsQuery) => { +export const getUserGroupsQueryHandler = async () => { const result = await userRepository.getUserGroups(); return result; diff --git a/packages/core/deno.json b/packages/core/deno.json index 1ef8d6e..d735a70 100644 --- a/packages/core/deno.json +++ b/packages/core/deno.json @@ -29,8 +29,7 @@ ], "rules": { "exclude": [ - "no-explicit-any", - "no-slow-types" + "no-explicit-any" ] } }, diff --git a/packages/mongodb/src/lib/crud/aggregate.ts b/packages/mongodb/src/lib/crud/aggregate.ts index 60b4ace..5807e63 100644 --- a/packages/mongodb/src/lib/crud/aggregate.ts +++ b/packages/mongodb/src/lib/crud/aggregate.ts @@ -53,7 +53,9 @@ export const aggregate: Aggregate = ({ } try { - return res.map((item) => outputType.parse(mapDocument(item)) as TData); + return res.map((item) => + outputType.parse(mapDocument(item)) as TData + ); } catch (error) { const exception = error instanceof Error ? new GenericException().fromError(error) diff --git a/packages/mongodb/src/lib/crud/findOneAndUpdate.ts b/packages/mongodb/src/lib/crud/findOneAndUpdate.ts index 5381c66..ce1a626 100644 --- a/packages/mongodb/src/lib/crud/findOneAndUpdate.ts +++ b/packages/mongodb/src/lib/crud/findOneAndUpdate.ts @@ -57,7 +57,11 @@ export const findOneAndUpdate: FindOneAndUpdate = ({ try { if (options) { - res = await collection.findOneAndUpdate(filter, update, options); + res = await collection.findOneAndUpdate( + filter, + update, + options, + ); } else { res = await collection.findOneAndUpdate(filter, update); } From f125aff02997cd3c17edb850f87ac739b150e87c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 14:09:48 +0100 Subject: [PATCH 29/37] fixes some linting and type issues --- CLAUDE.md | 10 ++ deno.json | 3 + deno.lock | 98 +------------------ packages/core/src/lib/message/command.ts | 31 +++++- packages/core/src/lib/message/event.ts | 31 +++++- packages/core/src/lib/message/query.ts | 30 +++++- packages/hono/src/lib/onError.ts | 5 +- packages/mongodb/src/lib/crud/find.ts | 8 +- packages/mongodb/src/lib/crud/findOne.ts | 6 +- .../mongodb/src/lib/crud/findOneAndDelete.ts | 6 +- .../mongodb/src/lib/crud/findOneAndReplace.ts | 6 +- .../mongodb/src/lib/crud/findOneAndUpdate.ts | 6 +- packages/mongodb/src/lib/repository.ts | 2 +- 13 files changed, 125 insertions(+), 117 deletions(-) create mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..4b873a9 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,10 @@ +# Format, Lint, Type Check & Test + +Whenever something is changed in the examples or packages, run the following commands to format, lint, type check and test the code. Running these commands from the repository root will check all examples and packages. + +``` +deno fmt --check +deno lint +deno check +deno test +``` diff --git a/deno.json b/deno.json index cf24954..fa10ed8 100644 --- a/deno.json +++ b/deno.json @@ -2,6 +2,9 @@ "compilerOptions": { "strict": true }, + "exclude": [ + "docs/" + ], "workspace": [ "./packages/core", "./packages/mongodb", diff --git a/deno.lock b/deno.lock index 3157030..e30337d 100644 --- a/deno.lock +++ b/deno.lock @@ -1,70 +1,27 @@ { "version": "5", "specifiers": { - "jsr:@oak/commons@1": "1.0.1", - "jsr:@oak/oak@^17.1.6": "17.2.0", - "jsr:@std/assert@1": "1.0.15", "jsr:@std/assert@^1.0.10": "1.0.15", - "jsr:@std/bytes@1": "1.0.6", - "jsr:@std/crypto@1": "1.0.5", "jsr:@std/dotenv@*": "0.225.3", "jsr:@std/dotenv@~0.225.6": "0.225.6", - "jsr:@std/encoding@1": "1.0.10", - "jsr:@std/encoding@^1.0.10": "1.0.10", "jsr:@std/fmt@^1.0.4": "1.0.5", "jsr:@std/fmt@^1.0.5": "1.0.5", - "jsr:@std/http@1": "1.0.21", - "jsr:@std/internal@^1.0.10": "1.0.12", "jsr:@std/internal@^1.0.12": "1.0.12", - "jsr:@std/media-types@1": "1.1.0", - "jsr:@std/path@1": "1.1.2", "jsr:@std/text@^1.0.10": "1.0.10", "jsr:@std/ulid@1": "1.0.0", "npm:@opentelemetry/api@^1.9.0": "1.9.0", "npm:@types/node@*": "22.5.4", - "npm:ajv@^8.17.1": "8.17.1", "npm:hono@^4.11.4": "4.11.4", "npm:mongodb@7": "7.0.0", - "npm:path-to-regexp@^6.3.0": "6.3.0", - "npm:zod@^3.24.1": "3.25.76", "npm:zod@^4.3.5": "4.3.5" }, "jsr": { - "@oak/commons@1.0.1": { - "integrity": "889ff210f0b4292591721be07244ecb1b5c118742f5273c70cf30d7cd4184d0c", - "dependencies": [ - "jsr:@std/assert@1", - "jsr:@std/bytes", - "jsr:@std/crypto", - "jsr:@std/encoding@1", - "jsr:@std/http", - "jsr:@std/media-types" - ] - }, - "@oak/oak@17.2.0": { - "integrity": "938537a92fc7922a46a9984696c65fb189c9baad164416ac3e336768a9ff0cd1", - "dependencies": [ - "jsr:@oak/commons", - "jsr:@std/assert@1", - "jsr:@std/bytes", - "jsr:@std/http", - "jsr:@std/media-types", - "jsr:@std/path", - "npm:path-to-regexp" - ] - }, "@std/assert@1.0.15": { "integrity": "d64018e951dbdfab9777335ecdb000c0b4e3df036984083be219ce5941e4703b", "dependencies": [ - "jsr:@std/internal@^1.0.12" + "jsr:@std/internal" ] }, - "@std/bytes@1.0.6": { - "integrity": "f6ac6adbd8ccd99314045f5703e23af0a68d7f7e58364b47d2c7f408aeb5820a" - }, - "@std/crypto@1.0.5": { - "integrity": "0dcfbb319fe0bba1bd3af904ceb4f948cde1b92979ec1614528380ed308a3b40" - }, "@std/dotenv@0.225.3": { "integrity": "a95e5b812c27b0854c52acbae215856d9cce9d4bbf774d938c51d212711e8d4a" }, @@ -74,33 +31,15 @@ "@std/dotenv@0.225.6": { "integrity": "1d6f9db72f565bd26790fa034c26e45ecb260b5245417be76c2279e5734c421b" }, - "@std/encoding@1.0.10": { - "integrity": "8783c6384a2d13abd5e9e87a7ae0520a30e9f56aeeaa3bdf910a3eaaf5c811a1" - }, "@std/fmt@1.0.4": { "integrity": "e14fe5bedee26f80877e6705a97a79c7eed599e81bb1669127ef9e8bc1e29a74" }, "@std/fmt@1.0.5": { "integrity": "0cfab43364bc36650d83c425cd6d99910fc20c4576631149f0f987eddede1a4d" }, - "@std/http@1.0.21": { - "integrity": "abb5c747651ee6e3ea6139858fd9b1810d2c97f53a5e6722f3b6d27a6d263edc", - "dependencies": [ - "jsr:@std/encoding@^1.0.10" - ] - }, "@std/internal@1.0.12": { "integrity": "972a634fd5bc34b242024402972cd5143eac68d8dffaca5eaa4dba30ce17b027" }, - "@std/media-types@1.1.0": { - "integrity": "c9d093f0c05c3512932b330e3cc1fe1d627b301db33a4c2c2185c02471d6eaa4" - }, - "@std/path@1.1.2": { - "integrity": "c0b13b97dfe06546d5e16bf3966b1cadf92e1cc83e56ba5476ad8b498d9e3038", - "dependencies": [ - "jsr:@std/internal@^1.0.10" - ] - }, "@std/text@1.0.10": { "integrity": "9dcab377450253c0efa9a9a0c731040bfd4e1c03f8303b5934381467b7954338" }, @@ -133,30 +72,12 @@ "@types/webidl-conversions" ] }, - "ajv@8.17.1": { - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "dependencies": [ - "fast-deep-equal", - "fast-uri", - "json-schema-traverse", - "require-from-string" - ] - }, "bson@7.0.0": { "integrity": "sha512-Kwc6Wh4lQ5OmkqqKhYGKIuELXl+EPYSCObVE6bWsp1T/cGkOCBN0I8wF/T44BiuhHyNi1mmKVPXk60d41xZ7kw==" }, - "fast-deep-equal@3.1.3": { - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" - }, - "fast-uri@3.1.0": { - "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==" - }, "hono@4.11.4": { "integrity": "sha512-U7tt8JsyrxSRKspfhtLET79pU8K+tInj5QZXs1jSugO1Vq5dFj3kmZsRldo29mTBfcjDRVRXrEZ6LS63Cog9ZA==" }, - "json-schema-traverse@1.0.0": { - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, "memory-pager@1.5.0": { "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" }, @@ -175,15 +96,9 @@ "mongodb-connection-string-url" ] }, - "path-to-regexp@6.3.0": { - "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==" - }, "punycode@2.3.1": { "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==" }, - "require-from-string@2.0.2": { - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" - }, "sparse-bitfield@3.0.3": { "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", "dependencies": [ @@ -209,9 +124,6 @@ "webidl-conversions" ] }, - "zod@3.25.76": { - "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==" - }, "zod@4.3.5": { "integrity": "sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==" } @@ -290,14 +202,6 @@ "npm:mongodb@7", "npm:zod@^4.3.5" ] - }, - "packages/oak": { - "dependencies": [ - "jsr:@oak/oak@^17.1.6", - "jsr:@std/ulid@1", - "npm:ajv@^8.17.1", - "npm:zod@^3.24.1" - ] } } } diff --git a/packages/core/src/lib/message/command.ts b/packages/core/src/lib/message/command.ts index ff2ed55..e008abd 100644 --- a/packages/core/src/lib/message/command.ts +++ b/packages/core/src/lib/message/command.ts @@ -52,6 +52,35 @@ export type Command = { dataschema?: string; }; +/** + * Type alias for the command data field schema. + */ +type CommandDataSchema = z.ZodUnion< + [ + z.ZodRecord, + z.ZodString, + z.ZodNumber, + z.ZodArray, + z.ZodBoolean, + ] +>; + +/** + * Type alias for the command schema shape. + */ +export type CommandSchemaType = z.ZodObject<{ + specversion: z.ZodLiteral<'1.0'>; + id: z.ZodString; + correlationid: z.ZodString; + time: z.ZodISODateTime; + source: z.ZodString; + type: z.ZodString; + subject: z.ZodOptional; + data: CommandDataSchema; + datacontenttype: z.ZodOptional; + dataschema: z.ZodOptional; +}>; + /** * The Zod schema matching the Command type. * @@ -60,7 +89,7 @@ export type Command = { * We do not infer the Command type from this schema because of * slow type issues see https://jsr.io/docs/about-slow-types for more details. */ -export const commandSchema = z.object({ +export const commandSchema: CommandSchemaType = z.object({ specversion: z.literal('1.0'), id: z.string(), correlationid: z.string(), diff --git a/packages/core/src/lib/message/event.ts b/packages/core/src/lib/message/event.ts index 57728b4..ce8464e 100644 --- a/packages/core/src/lib/message/event.ts +++ b/packages/core/src/lib/message/event.ts @@ -55,6 +55,35 @@ export type Event = { dataschema?: string; }; +/** + * Type alias for the event data field schema. + */ +type EventDataSchema = z.ZodUnion< + [ + z.ZodRecord, + z.ZodString, + z.ZodNumber, + z.ZodArray, + z.ZodBoolean, + ] +>; + +/** + * Type alias for the event schema shape. + */ +export type EventSchemaType = z.ZodObject<{ + specversion: z.ZodLiteral<'1.0'>; + id: z.ZodString; + correlationid: z.ZodString; + time: z.ZodISODateTime; + source: z.ZodString; + type: z.ZodString; + subject: z.ZodString; + data: EventDataSchema; + datacontenttype: z.ZodOptional; + dataschema: z.ZodOptional; +}>; + /** * The Zod schema matching the Event type. * @@ -63,7 +92,7 @@ export type Event = { * We do not infer the Event type from this schema because of * slow type issues see https://jsr.io/docs/about-slow-types for more details. */ -export const eventSchema = z.object({ +export const eventSchema: EventSchemaType = z.object({ specversion: z.literal('1.0'), id: z.string(), correlationid: z.string(), diff --git a/packages/core/src/lib/message/query.ts b/packages/core/src/lib/message/query.ts index 01d71a6..c5ac52d 100644 --- a/packages/core/src/lib/message/query.ts +++ b/packages/core/src/lib/message/query.ts @@ -48,6 +48,34 @@ export type Query = { dataschema?: string; }; +/** + * Type alias for the query data field schema. + */ +type QueryDataSchema = z.ZodUnion< + [ + z.ZodRecord, + z.ZodString, + z.ZodNumber, + z.ZodArray, + z.ZodBoolean, + ] +>; + +/** + * Type alias for the query schema shape. + */ +export type QuerySchemaType = z.ZodObject<{ + specversion: z.ZodLiteral<'1.0'>; + id: z.ZodString; + correlationid: z.ZodString; + time: z.ZodISODateTime; + source: z.ZodString; + type: z.ZodString; + data: QueryDataSchema; + datacontenttype: z.ZodOptional; + dataschema: z.ZodOptional; +}>; + /** * The Zod schema matching the Query type. * @@ -56,7 +84,7 @@ export type Query = { * We do not infer the Query type from this schema because of * slow type issues see https://jsr.io/docs/about-slow-types for more details. */ -export const querySchema = z.object({ +export const querySchema: QuerySchemaType = z.object({ specversion: z.literal('1.0'), id: z.string(), correlationid: z.string(), diff --git a/packages/hono/src/lib/onError.ts b/packages/hono/src/lib/onError.ts index a43a6d1..66dd4dc 100644 --- a/packages/hono/src/lib/onError.ts +++ b/packages/hono/src/lib/onError.ts @@ -18,7 +18,10 @@ import type { HTTPResponseError } from 'hono/types'; * app.onError(handleError); * ``` */ -export const handleError = (error: Error | HTTPResponseError, c: Context) => { +export const handleError = ( + error: Error | HTTPResponseError, + c: Context, +): Response => { let statusCode = 500; let response: Record = { error: 'INTERNAL_SERVER_ERROR', diff --git a/packages/mongodb/src/lib/crud/find.ts b/packages/mongodb/src/lib/crud/find.ts index b094320..6fd5e34 100644 --- a/packages/mongodb/src/lib/crud/find.ts +++ b/packages/mongodb/src/lib/crud/find.ts @@ -50,7 +50,7 @@ export type Find = ( * * @returns {Promise} The found documents. */ -export const find: Find = ({ +export const find: Find = ({ collection, filter, limit, @@ -60,7 +60,7 @@ export const find: Find = ({ mapDocument, outputType, options, -}) => { +}: FindInput) => { return withSpan('find', collection, async () => { let res: WithId[] = []; @@ -89,7 +89,9 @@ export const find: Find = ({ } try { - return res.map((item) => outputType.parse(mapDocument(item))); + return res.map((item) => + outputType.parse(mapDocument(item)) + ) as TData[]; } catch (error) { const exception = error instanceof Error ? new GenericException().fromError(error) diff --git a/packages/mongodb/src/lib/crud/findOne.ts b/packages/mongodb/src/lib/crud/findOne.ts index 3a489a9..028477d 100644 --- a/packages/mongodb/src/lib/crud/findOne.ts +++ b/packages/mongodb/src/lib/crud/findOne.ts @@ -33,12 +33,12 @@ export type FindOne = ( * * @returns {Promise} The found document. */ -export const findOne: FindOne = ({ +export const findOne: FindOne = ({ collection, filter, mapDocument, outputType, -}) => { +}: FindOneInput) => { return withSpan('findOne', collection, async () => { let res: WithId | null = null; @@ -53,7 +53,7 @@ export const findOne: FindOne = ({ } try { - return outputType.parse(mapDocument(res)); + return outputType.parse(mapDocument(res)) as TData; } catch (error) { const exception = error instanceof Error ? new GenericException().fromError(error) diff --git a/packages/mongodb/src/lib/crud/findOneAndDelete.ts b/packages/mongodb/src/lib/crud/findOneAndDelete.ts index 41d9393..3f176d6 100644 --- a/packages/mongodb/src/lib/crud/findOneAndDelete.ts +++ b/packages/mongodb/src/lib/crud/findOneAndDelete.ts @@ -41,13 +41,13 @@ export type FindOneAndDelete = ( * * @returns {Promise} The found and deleted document. */ -export const findOneAndDelete: FindOneAndDelete = ({ +export const findOneAndDelete: FindOneAndDelete = ({ collection, filter, mapDocument, outputType, options, -}) => { +}: FindOneAndDeleteInput) => { return withSpan('findOneAndDelete', collection, async () => { let res: WithId | null = null; @@ -66,7 +66,7 @@ export const findOneAndDelete: FindOneAndDelete = ({ } try { - return outputType.parse(mapDocument(res)); + return outputType.parse(mapDocument(res)) as TData; } catch (error) { const exception = error instanceof Error ? new GenericException().fromError(error) diff --git a/packages/mongodb/src/lib/crud/findOneAndReplace.ts b/packages/mongodb/src/lib/crud/findOneAndReplace.ts index 8be2283..6af7857 100644 --- a/packages/mongodb/src/lib/crud/findOneAndReplace.ts +++ b/packages/mongodb/src/lib/crud/findOneAndReplace.ts @@ -44,14 +44,14 @@ export type FindOneAndReplace = ( * * @returns {Promise} The found and replaced document. */ -export const findOneAndReplace: FindOneAndReplace = ({ +export const findOneAndReplace: FindOneAndReplace = ({ collection, filter, replacement, mapDocument, outputType, options, -}) => { +}: FindOneAndReplaceInput) => { return withSpan('findOneAndReplace', collection, async () => { let res: WithId | null = null; @@ -74,7 +74,7 @@ export const findOneAndReplace: FindOneAndReplace = ({ } try { - return outputType.parse(mapDocument(res)); + return outputType.parse(mapDocument(res)) as TData; } catch (error) { const exception = error instanceof Error ? new GenericException().fromError(error) diff --git a/packages/mongodb/src/lib/crud/findOneAndUpdate.ts b/packages/mongodb/src/lib/crud/findOneAndUpdate.ts index ce1a626..74a6e55 100644 --- a/packages/mongodb/src/lib/crud/findOneAndUpdate.ts +++ b/packages/mongodb/src/lib/crud/findOneAndUpdate.ts @@ -44,14 +44,14 @@ export type FindOneAndUpdate = ( * * @returns {Promise} The found and updated document. */ -export const findOneAndUpdate: FindOneAndUpdate = ({ +export const findOneAndUpdate: FindOneAndUpdate = ({ collection, filter, update, mapDocument, outputType, options, -}) => { +}: FindOneAndUpdateInput) => { return withSpan('findOneAndUpdate', collection, async () => { let res: WithId | null = null; @@ -74,7 +74,7 @@ export const findOneAndUpdate: FindOneAndUpdate = ({ } try { - return outputType.parse(mapDocument(res)); + return outputType.parse(mapDocument(res)) as TData; } catch (error) { const exception = error instanceof Error ? new GenericException().fromError(error) diff --git a/packages/mongodb/src/lib/repository.ts b/packages/mongodb/src/lib/repository.ts index f55abc0..c3793de 100644 --- a/packages/mongodb/src/lib/repository.ts +++ b/packages/mongodb/src/lib/repository.ts @@ -108,7 +108,7 @@ export class MongoDBRepository< * ZodType.parse is used to ensure the data is valid and type-safe. */ protected _mapDocumentToEntity(doc: Document): TEntity { - return this._entityType.parse(doc); + return this._entityType.parse(doc) as TEntity; } /** From 5d839c1dc5a1a9b8f52d643f69227b4698b82c58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 14:52:07 +0100 Subject: [PATCH 30/37] pin shas for github actions --- .github/workflows/checks.yaml | 6 +++--- .github/workflows/docs.yaml | 10 +++++----- .github/workflows/publish.yaml | 4 ++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml index e408d0e..8949f4f 100644 --- a/.github/workflows/checks.yaml +++ b/.github/workflows/checks.yaml @@ -18,15 +18,15 @@ jobs: DENO_DIR: deno_cache_dir steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - name: Cache Deno dependencies - uses: actions/cache@v4 + uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 with: path: ${{ env.DENO_DIR }} key: deno_cache - - uses: denoland/setup-deno@v2 + - uses: denoland/setup-deno@e95548e56dfa95d4e1a28d6f422fafe75c4c26fb with: deno-version: v2 diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 218dbbd..22df3bf 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -22,17 +22,17 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - name: Setup Node - uses: actions/setup-node@v4 + uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 with: node-version: 20 cache: npm cache-dependency-path: "./docs/package-lock.json" - name: Setup Pages - uses: actions/configure-pages@v4 + uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b - name: Install dependencies working-directory: ./docs @@ -43,7 +43,7 @@ jobs: run: npm run build - name: Upload artifact - uses: actions/upload-pages-artifact@v3 + uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b with: path: docs/.vitepress/dist @@ -59,4 +59,4 @@ jobs: steps: - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v4 + uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 2895c49..7e5d0fe 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -13,9 +13,9 @@ jobs: publish: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - - uses: denoland/setup-deno@v2 + - uses: denoland/setup-deno@e95548e56dfa95d4e1a28d6f422fafe75c4c26fb with: deno-version: v2 From c8ecfcdd9debbe3f02a1d20824acf5554cbb9633 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 15:09:26 +0100 Subject: [PATCH 31/37] fix workflows --- .github/workflows/checks.yaml | 3 +++ .github/workflows/docs.yaml | 11 +++++------ .github/workflows/publish.yaml | 9 +++++---- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml index 8949f4f..f604d06 100644 --- a/.github/workflows/checks.yaml +++ b/.github/workflows/checks.yaml @@ -30,6 +30,9 @@ jobs: with: deno-version: v2 + - name: Install dependencies + run: deno install + - name: Check formatting run: deno fmt --check diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 22df3bf..609267b 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -5,12 +5,6 @@ on: release: types: [created] -# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages -permissions: - contents: read - pages: write - id-token: write - # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. concurrency: @@ -20,6 +14,8 @@ concurrency: jobs: build: runs-on: ubuntu-latest + permissions: + contents: read steps: - name: Checkout uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd @@ -51,6 +47,9 @@ jobs: name: Deploy needs: build runs-on: ubuntu-latest + permissions: + pages: write + id-token: write environment: name: github-pages diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 7e5d0fe..0a1af16 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -5,13 +5,14 @@ on: release: types: [created] -permissions: - contents: read - id-token: write # The OIDC ID token is used for authentication with JSR. - jobs: publish: + permissions: + contents: read + id-token: write # The OIDC ID token is used for authentication with JSR. + runs-on: ubuntu-latest + steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd From 3d39ae27ea677f1ed7e4ea7e6b2fa6cae51e03f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 15:18:05 +0100 Subject: [PATCH 32/37] fix workflow? --- .github/workflows/checks.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml index f604d06..a57203c 100644 --- a/.github/workflows/checks.yaml +++ b/.github/workflows/checks.yaml @@ -30,8 +30,7 @@ jobs: with: deno-version: v2 - - name: Install dependencies - run: deno install + - run: deno cache - name: Check formatting run: deno fmt --check From 693e381dd0e94a3db1af3abf037f7c671c1861d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 15:21:50 +0100 Subject: [PATCH 33/37] ci --- .github/workflows/checks.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml index a57203c..8949f4f 100644 --- a/.github/workflows/checks.yaml +++ b/.github/workflows/checks.yaml @@ -30,8 +30,6 @@ jobs: with: deno-version: v2 - - run: deno cache - - name: Check formatting run: deno fmt --check From 6c2fa1b45d67ee920fcfdaf9785c759f4cd42319 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 15:41:56 +0100 Subject: [PATCH 34/37] fix some code smells --- .../messages/commands/addUser.command.ts | 10 +- examples/hono-demo/src/main.ts | 2 +- examples/hono-demo/src/shared/shell/http.ts | 2 - examples/hono-demo/start-with-otel.sh | 4 +- .../core/src/lib/eventBus/eventBus.test.ts | 10 +- packages/core/src/lib/eventBus/eventBus.ts | 156 ++++++++++-------- packages/core/src/lib/message/router.ts | 10 +- packages/hono/src/lib/middleware/logger.ts | 2 +- packages/mongodb/src/lib/crud/find.ts | 8 +- 9 files changed, 119 insertions(+), 85 deletions(-) diff --git a/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts b/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts index 7bb4abb..8b5d0b9 100644 --- a/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts +++ b/examples/hono-demo/src/iam/users/shell/messages/commands/addUser.command.ts @@ -1,4 +1,4 @@ -import { createEvent, getEventBus } from '@nimbus/core'; +import { createEvent, getEventBus, NotFoundException } from '@nimbus/core'; import { addUser, AddUserCommand, @@ -19,12 +19,16 @@ export const addUserCommandHandler = async (command: AddUserCommand) => { filter: { email: command.data.email }, }); } catch (_error) { - state = null; + if (_error instanceof NotFoundException) { + state = null; + } else { + throw _error; + } } state = addUser(state, command); - if (state) { + if (state !== null) { state = await userRepository.insertOne({ item: state, }); diff --git a/examples/hono-demo/src/main.ts b/examples/hono-demo/src/main.ts index 14c03ee..1f5be91 100644 --- a/examples/hono-demo/src/main.ts +++ b/examples/hono-demo/src/main.ts @@ -66,7 +66,7 @@ initMessages(); initMongoConnectionManager(); if (process.env.PORT) { - const port = parseInt(process.env.PORT); + const port = Number.parseInt(process.env.PORT); Deno.serve({ hostname: '0.0.0.0', port }, app.fetch); diff --git a/examples/hono-demo/src/shared/shell/http.ts b/examples/hono-demo/src/shared/shell/http.ts index bcf0594..37f872b 100644 --- a/examples/hono-demo/src/shared/shell/http.ts +++ b/examples/hono-demo/src/shared/shell/http.ts @@ -7,10 +7,8 @@ import usersRouter from '../../iam/users/shell/http/router.ts'; export const app = new Hono(); -// TODO: document correlationId middleware app.use(correlationId()); -// TODO: document logger middleware and observability in general now also added to mongodb app.use(logger({ enableTracing: true, tracerName: 'api', diff --git a/examples/hono-demo/start-with-otel.sh b/examples/hono-demo/start-with-otel.sh index 07c7c98..db328f1 100644 --- a/examples/hono-demo/start-with-otel.sh +++ b/examples/hono-demo/start-with-otel.sh @@ -3,10 +3,10 @@ export OTEL_EXPORTER_OTLP_PROTOCOL="http/protobuf" export OTEL_EXPORTER_OTLP_ENDPOINT="https://otlp-gateway-prod-eu-west-2.grafana.net/otlp" # Read OTLP headers from secret file -if [ -f "./.otel_token" ]; then +if [[ -f "./.otel_token" ]]; then export OTEL_EXPORTER_OTLP_HEADERS="Authorization=Basic $(cat ./.otel_token)" else - echo "Error: .otel_token file not found." + echo "Error: .otel_token file not found." >&2 exit 1 fi diff --git a/packages/core/src/lib/eventBus/eventBus.test.ts b/packages/core/src/lib/eventBus/eventBus.test.ts index ccf2aec..c94172f 100644 --- a/packages/core/src/lib/eventBus/eventBus.test.ts +++ b/packages/core/src/lib/eventBus/eventBus.test.ts @@ -1,4 +1,4 @@ -import { assertEquals, assertInstanceOf } from '@std/assert'; +import { assertEquals, assertExists, assertInstanceOf } from '@std/assert'; import { GenericException } from '../exception/genericException.ts'; import type { Event } from '../message/event.ts'; import { getEventBus, NimbusEventBus, setupEventBus } from './eventBus.ts'; @@ -70,9 +70,10 @@ Deno.test('EventBus delivers event to subscriber', async () => { // Wait for async handler to complete await new Promise((r) => setTimeout(r, 50)); - assertEquals(receivedEvent!.type, 'test.event.deliver'); + assertExists(receivedEvent); + assertEquals(receivedEvent.type, 'test.event.deliver'); assertEquals( - (receivedEvent!.data as Record).message, + (receivedEvent.data as Record).message, 'hello', ); }); @@ -174,7 +175,8 @@ Deno.test('EventBus exhausts retries and invokes onError callback', async () => // Initial attempt + 2 retries = 3 total attempts assertEquals(attempts, 3); assertInstanceOf(errorReceived, GenericException); - assertEquals(eventReceived!.id, testEvent.id); + assertExists(eventReceived); + assertEquals(eventReceived.id, testEvent.id); }); Deno.test('EventBus onError callback receives error and event', async () => { diff --git a/packages/core/src/lib/eventBus/eventBus.ts b/packages/core/src/lib/eventBus/eventBus.ts index 8ccbce0..f406971 100644 --- a/packages/core/src/lib/eventBus/eventBus.ts +++ b/packages/core/src/lib/eventBus/eventBus.ts @@ -158,13 +158,13 @@ export type SubscribeEventInput = { * ``` */ export class NimbusEventBus { - private _eventEmitter: EventEmitter; - private _name: string; - private _maxRetries: number; - private _baseDelay: number; - private _maxDelay: number; - private _useJitter: boolean; - private _logPublish?: (event: Event) => void; + private readonly _eventEmitter: EventEmitter; + private readonly _name: string; + private readonly _maxRetries: number; + private readonly _baseDelay: number; + private readonly _maxDelay: number; + private readonly _useJitter: boolean; + private readonly _logPublish?: (event: Event) => void; /** * Create a new NimbusEventBus instance. @@ -424,79 +424,43 @@ export class NimbusEventBus { try { await handler(event); - // Record success metrics - eventsDeliveredCounter.add(1, { - ...metricLabels, - status: 'success', - }); - handlingDuration.record( - (performance.now() - startTime) / 1000, + this._recordDeliveryMetrics( metricLabels, + 'success', + startTime, ); + span.end(); return; - } catch (error: any) { + } catch (error: unknown) { attempt++; if (attempt > maxRetries) { - // Record error metrics - eventsDeliveredCounter.add(1, { - ...metricLabels, - status: 'error', - }); - handlingDuration.record( - (performance.now() - startTime) / 1000, + this._handleFinalFailure( + error, + event, + span, metricLabels, + startTime, + maxRetries, + baseDelay, + maxDelay, ); - - span.setStatus({ - code: SpanStatusCode.ERROR, - message: error instanceof Error - ? error.message - : 'Unknown error', - }); - span.recordException( - error instanceof Error - ? error - : new Error('Unknown error'), - ); - span.end(); - - const exception = new GenericException( - `Failed to handle event: ${event.type} from ${event.source}`, - { - retryAttempts: maxRetries, - baseDelay, - maxDelay, - }, - ); - - if (error.stack) { - exception.stack = error.stack; - } - - throw exception; } - // Record retry metric retryAttemptsCounter.add(1, metricLabels); - // Exponential backoff with optional jitter - const delay = Math.min( - baseDelay * Math.pow(2, attempt - 1), + const delayMs = this._calculateRetryDelay( + attempt, + baseDelay, maxDelay, + useJitter, ); - const jitter = useJitter - ? Math.random() * delay * 0.1 - : 0; - span.addEvent('retry', { - attempt, - delay_ms: delay + jitter, - }); + span.addEvent('retry', { attempt, delay_ms: delayMs }); await new Promise((resolve) => - setTimeout(resolve, delay + jitter) + setTimeout(resolve, delayMs) ); } } @@ -504,6 +468,64 @@ export class NimbusEventBus { ); } + private _recordDeliveryMetrics( + metricLabels: { eventbus_name: string; event_type: string }, + status: 'success' | 'error', + startTime: number, + ): void { + eventsDeliveredCounter.add(1, { ...metricLabels, status }); + handlingDuration.record( + (performance.now() - startTime) / 1000, + metricLabels, + ); + } + + private _handleFinalFailure( + error: unknown, + event: Event, + span: ReturnType, + metricLabels: { eventbus_name: string; event_type: string }, + startTime: number, + maxRetries: number, + baseDelay: number, + maxDelay: number, + ): never { + this._recordDeliveryMetrics(metricLabels, 'error', startTime); + + const errorMessage = error instanceof Error + ? error.message + : 'Unknown error'; + const errorInstance = error instanceof Error + ? error + : new Error('Unknown error'); + + span.setStatus({ code: SpanStatusCode.ERROR, message: errorMessage }); + span.recordException(errorInstance); + span.end(); + + const exception = new GenericException( + `Failed to handle event: ${event.type} from ${event.source}`, + { retryAttempts: maxRetries, baseDelay, maxDelay }, + ); + + if (error instanceof Error && error.stack) { + exception.stack = error.stack; + } + + throw exception; + } + + private _calculateRetryDelay( + attempt: number, + baseDelay: number, + maxDelay: number, + useJitter: boolean, + ): number { + const delay = Math.min(baseDelay * Math.pow(2, attempt - 1), maxDelay); + const jitter = useJitter ? Math.random() * delay * 0.1 : 0; + return delay + jitter; + } + /** * Validate the size of the event and return the size in bytes. * @@ -619,8 +641,12 @@ export const setupEventBus = ( * ``` */ export const getEventBus = (name: string = 'default'): NimbusEventBus => { - if (!eventBusRegistry.has(name)) { - eventBusRegistry.set(name, new NimbusEventBus({ name })); + let eventBus = eventBusRegistry.get(name); + + if (!eventBus) { + eventBus = new NimbusEventBus({ name }); + eventBusRegistry.set(name, eventBus); } - return eventBusRegistry.get(name)!; + + return eventBus; }; diff --git a/packages/core/src/lib/message/router.ts b/packages/core/src/lib/message/router.ts index 39f19e5..e5ad800 100644 --- a/packages/core/src/lib/message/router.ts +++ b/packages/core/src/lib/message/router.ts @@ -401,8 +401,12 @@ export const setupRouter = ( * ``` */ export const getRouter = (name: string = 'default'): MessageRouter => { - if (!routerRegistry.has(name)) { - routerRegistry.set(name, new MessageRouter({ name })); + let router = routerRegistry.get(name); + + if (!router) { + router = new MessageRouter({ name }); + routerRegistry.set(name, router); } - return routerRegistry.get(name)!; + + return router; }; diff --git a/packages/hono/src/lib/middleware/logger.ts b/packages/hono/src/lib/middleware/logger.ts index 2581119..3092d24 100644 --- a/packages/hono/src/lib/middleware/logger.ts +++ b/packages/hono/src/lib/middleware/logger.ts @@ -30,7 +30,7 @@ const humanize = (times: string[]) => { const [delimiter, separator] = [',', '.']; const orderTimes = times.map((v) => - v.replace(/(\d)(?=(\d\d\d)+(?!\d))/g, '$1' + delimiter) + v.replaceAll(/(\d)(?=(\d\d\d)+(?!\d))/g, '$1' + delimiter) ); return orderTimes.join(separator); diff --git a/packages/mongodb/src/lib/crud/find.ts b/packages/mongodb/src/lib/crud/find.ts index 6fd5e34..e266ad4 100644 --- a/packages/mongodb/src/lib/crud/find.ts +++ b/packages/mongodb/src/lib/crud/find.ts @@ -67,19 +67,19 @@ export const find: Find = ({ try { const findRes = collection.find(filter, options); - if (typeof limit !== 'undefined') { + if (limit !== undefined) { findRes.limit(limit); } - if (typeof skip !== 'undefined') { + if (skip !== undefined) { findRes.skip(skip); } - if (typeof sort !== 'undefined') { + if (sort !== undefined) { findRes.sort(sort); } - if (typeof project !== 'undefined') { + if (project !== undefined) { findRes.project(project); } From b857cb023ef50fc36febec2f11dca9daaf63af14 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 15:46:54 +0100 Subject: [PATCH 35/37] fix code smell --- packages/core/src/lib/eventBus/eventBus.ts | 31 +++++++++++----------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/packages/core/src/lib/eventBus/eventBus.ts b/packages/core/src/lib/eventBus/eventBus.ts index f406971..b929a8d 100644 --- a/packages/core/src/lib/eventBus/eventBus.ts +++ b/packages/core/src/lib/eventBus/eventBus.ts @@ -436,16 +436,14 @@ export class NimbusEventBus { attempt++; if (attempt > maxRetries) { - this._handleFinalFailure( + this._handleFinalFailure({ error, event, span, metricLabels, startTime, - maxRetries, - baseDelay, - maxDelay, - ); + retryConfig: { maxRetries, baseDelay, maxDelay }, + }); } retryAttemptsCounter.add(1, metricLabels); @@ -480,16 +478,17 @@ export class NimbusEventBus { ); } - private _handleFinalFailure( - error: unknown, - event: Event, - span: ReturnType, - metricLabels: { eventbus_name: string; event_type: string }, - startTime: number, - maxRetries: number, - baseDelay: number, - maxDelay: number, - ): never { + private _handleFinalFailure(options: { + error: unknown; + event: Event; + span: ReturnType; + metricLabels: { eventbus_name: string; event_type: string }; + startTime: number; + retryConfig: { maxRetries: number; baseDelay: number; maxDelay: number }; + }): never { + const { error, event, span, metricLabels, startTime, retryConfig } = + options; + this._recordDeliveryMetrics(metricLabels, 'error', startTime); const errorMessage = error instanceof Error @@ -505,7 +504,7 @@ export class NimbusEventBus { const exception = new GenericException( `Failed to handle event: ${event.type} from ${event.source}`, - { retryAttempts: maxRetries, baseDelay, maxDelay }, + retryConfig, ); if (error instanceof Error && error.stack) { From 4d5e2ceb6b58a2fb524cc02eb6e3e312646d91a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 15:49:31 +0100 Subject: [PATCH 36/37] fmt --- packages/core/src/lib/eventBus/eventBus.ts | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/packages/core/src/lib/eventBus/eventBus.ts b/packages/core/src/lib/eventBus/eventBus.ts index b929a8d..74992f7 100644 --- a/packages/core/src/lib/eventBus/eventBus.ts +++ b/packages/core/src/lib/eventBus/eventBus.ts @@ -442,7 +442,11 @@ export class NimbusEventBus { span, metricLabels, startTime, - retryConfig: { maxRetries, baseDelay, maxDelay }, + retryConfig: { + maxRetries, + baseDelay, + maxDelay, + }, }); } @@ -484,7 +488,11 @@ export class NimbusEventBus { span: ReturnType; metricLabels: { eventbus_name: string; event_type: string }; startTime: number; - retryConfig: { maxRetries: number; baseDelay: number; maxDelay: number }; + retryConfig: { + maxRetries: number; + baseDelay: number; + maxDelay: number; + }; }): never { const { error, event, span, metricLabels, startTime, retryConfig } = options; From 3f15944de284ce2a423138e04462d6a16a0fb3dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Go=CC=88rdes?= Date: Mon, 26 Jan 2026 15:56:18 +0100 Subject: [PATCH 37/37] bust the caching issues? --- .github/workflows/checks.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml index 8949f4f..6ac1c12 100644 --- a/.github/workflows/checks.yaml +++ b/.github/workflows/checks.yaml @@ -24,7 +24,9 @@ jobs: uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 with: path: ${{ env.DENO_DIR }} - key: deno_cache + key: deno-${{ runner.os }}-${{ hashFiles('deno.lock') }} + restore-keys: | + deno-${{ runner.os }}- - uses: denoland/setup-deno@e95548e56dfa95d4e1a28d6f422fafe75c4c26fb with: