diff --git a/GEMINI.md b/GEMINI.md index 3fc416fe8e..7a6117167b 100644 --- a/GEMINI.md +++ b/GEMINI.md @@ -81,18 +81,14 @@ const a = (ctx) => { - Prefer self-documenting code (clear variable names, etc.). - If you must add a comment, keep it minimal and explain the _why_, not the _what_. -## Running db Tests +## Running Tests -The following instructions apply only to the `@based/db` package. Other packages have their own test runners. +The project uses a custom test runner. **Always use `npm test`** to run tests. Do **NOT** use `npx tsx` or `node` directly on test files. -The `db` package uses a custom test runner. To run the tests, you can use `npm run test` from the `packages/db` directory, or `npm run test -w @based/db` from the root of the project. You can pass arguments to filter the tests. +You can run tests from the root simply with `npm test`. You can pass arguments to filter the tests. ```bash -# From packages/db -npm run test -- [filters] - -# From root -npm run test -w @based/db -- [filters] +npm test [filter] ``` There is also a `test-fast` script that skips the build step. @@ -111,28 +107,23 @@ You can filter which tests to run by providing one or more filter arguments. - **Run all tests:** ```bash - # from packages/db - npm run test + npm test ``` -- **Run all tests and stop on the first failure:** +- **Run tests for a specific file or folder (e.g. `query/ast`):** ```bash - # from packages/db - npm run test -- stopOnFail + npm test query/ast ``` -- **Run tests in files with "view" in the path:** +- **Run all tests and stop on the first failure:** ```bash - # from packages/db - npm run test -- view + npm test stopOnFail ``` - **Run tests with "observe" in the name, in files with "view" in the path:** ```bash - # from packages/db - npm run test -- view:observe + npm test view:observe ``` - **Run all tests 10 times:** ```bash - # from packages/db - npm run test -- 10 + npm test 10 ``` ## Zig version diff --git a/src/db-client/modify/Ctx.ts b/_modify/Ctx.ts similarity index 81% rename from src/db-client/modify/Ctx.ts rename to _modify/Ctx.ts index 16b3ae1a75..1873fcb9a4 100644 --- a/src/db-client/modify/Ctx.ts +++ b/_modify/Ctx.ts @@ -8,7 +8,7 @@ export const MODIFY_HEADER_SIZE = 1 + 4 + 8 + 4 export class Ctx { constructor(schemaChecksum: number, buf: Uint8Array) { this.buf = buf - buf[4] = OpType.modify // make enum later 1 means normal MODIFY + buf[4] = OpType.modify writeUint64(buf, schemaChecksum, 5) this.reset() } @@ -19,18 +19,18 @@ export class Ctx { this.cursor = {} this.batch = {} } - start: number - index: number - schema: SchemaTypeDef + start!: number + index!: number + schema!: SchemaTypeDef buf: Uint8Array - max: number - size: number + max!: number + size!: number unsafe?: boolean - operation: ModOpEnum + operation!: ModOpEnum main: Map = new Map() - draining: Promise + draining!: Promise scheduled: Promise | undefined - locale: LangCodeEnum + locale!: LangCodeEnum sort: number = 0 sortText: number = 0 defaults: number = 0 @@ -39,7 +39,6 @@ export class Ctx { prop?: number main?: number operation?: ModOpEnum - upserting?: boolean } = {} batch: { count?: number diff --git a/src/db-client/modify/Tmp.ts b/_modify/Tmp.ts similarity index 96% rename from src/db-client/modify/Tmp.ts rename to _modify/Tmp.ts index aa14f30083..7cb6fb0b9f 100644 --- a/src/db-client/modify/Tmp.ts +++ b/_modify/Tmp.ts @@ -43,10 +43,10 @@ export class Tmp implements Promise { this.batch = ctx.batch this.tmpId = ctx.batch.count++ } - [Symbol.toStringTag]: 'ModifyPromise' + [Symbol.toStringTag]!: 'ModifyPromise' #schema: SchemaTypeDef - #id: number - #err: number + #id!: number + #err!: number get error(): Error | undefined { if (this.batch.ready && !this.id) { if (this.#err in errorMap) { diff --git a/src/db-client/modify/create/index.ts b/_modify/create/index.ts similarity index 100% rename from src/db-client/modify/create/index.ts rename to _modify/create/index.ts diff --git a/src/db-client/modify/create/mark.ts b/_modify/create/mark.ts similarity index 100% rename from src/db-client/modify/create/mark.ts rename to _modify/create/mark.ts diff --git a/src/db-client/modify/cursor.ts b/_modify/cursor.ts similarity index 100% rename from src/db-client/modify/cursor.ts rename to _modify/cursor.ts diff --git a/src/db-client/modify/delete/index.ts b/_modify/delete/index.ts similarity index 100% rename from src/db-client/modify/delete/index.ts rename to _modify/delete/index.ts diff --git a/src/db-client/modify/drain.ts b/_modify/drain.ts similarity index 100% rename from src/db-client/modify/drain.ts rename to _modify/drain.ts diff --git a/src/db-client/modify/edges/binary.ts b/_modify/edges/binary.ts similarity index 100% rename from src/db-client/modify/edges/binary.ts rename to _modify/edges/binary.ts diff --git a/src/db-client/modify/edges/cardinality.ts b/_modify/edges/cardinality.ts similarity index 100% rename from src/db-client/modify/edges/cardinality.ts rename to _modify/edges/cardinality.ts diff --git a/src/db-client/modify/edges/header.ts b/_modify/edges/header.ts similarity index 100% rename from src/db-client/modify/edges/header.ts rename to _modify/edges/header.ts diff --git a/src/db-client/modify/edges/index.ts b/_modify/edges/index.ts similarity index 100% rename from src/db-client/modify/edges/index.ts rename to _modify/edges/index.ts diff --git a/src/db-client/modify/edges/reference.ts b/_modify/edges/reference.ts similarity index 100% rename from src/db-client/modify/edges/reference.ts rename to _modify/edges/reference.ts diff --git a/src/db-client/modify/edges/references.ts b/_modify/edges/references.ts similarity index 100% rename from src/db-client/modify/edges/references.ts rename to _modify/edges/references.ts diff --git a/src/db-client/modify/edges/separate.ts b/_modify/edges/separate.ts similarity index 100% rename from src/db-client/modify/edges/separate.ts rename to _modify/edges/separate.ts diff --git a/src/db-client/modify/edges/string.ts b/_modify/edges/string.ts similarity index 100% rename from src/db-client/modify/edges/string.ts rename to _modify/edges/string.ts diff --git a/src/db-client/modify/error.ts b/_modify/error.ts similarity index 100% rename from src/db-client/modify/error.ts rename to _modify/error.ts diff --git a/src/db-client/modify/expire/index.ts b/_modify/expire/index.ts similarity index 100% rename from src/db-client/modify/expire/index.ts rename to _modify/expire/index.ts diff --git a/src/db-client/modify/props/alias.ts b/_modify/props/alias.ts similarity index 100% rename from src/db-client/modify/props/alias.ts rename to _modify/props/alias.ts diff --git a/src/db-client/modify/props/binary.ts b/_modify/props/binary.ts similarity index 100% rename from src/db-client/modify/props/binary.ts rename to _modify/props/binary.ts diff --git a/src/db-client/modify/props/cardinality.ts b/_modify/props/cardinality.ts similarity index 100% rename from src/db-client/modify/props/cardinality.ts rename to _modify/props/cardinality.ts diff --git a/src/db-client/modify/props/delete.ts b/_modify/props/delete.ts similarity index 100% rename from src/db-client/modify/props/delete.ts rename to _modify/props/delete.ts diff --git a/src/db-client/modify/props/fixed.ts b/_modify/props/fixed.ts similarity index 100% rename from src/db-client/modify/props/fixed.ts rename to _modify/props/fixed.ts diff --git a/src/db-client/modify/props/increment.ts b/_modify/props/increment.ts similarity index 100% rename from src/db-client/modify/props/increment.ts rename to _modify/props/increment.ts diff --git a/src/db-client/modify/props/json.ts b/_modify/props/json.ts similarity index 100% rename from src/db-client/modify/props/json.ts rename to _modify/props/json.ts diff --git a/src/db-client/modify/props/main.ts b/_modify/props/main.ts similarity index 100% rename from src/db-client/modify/props/main.ts rename to _modify/props/main.ts diff --git a/src/db-client/modify/props/object.ts b/_modify/props/object.ts similarity index 100% rename from src/db-client/modify/props/object.ts rename to _modify/props/object.ts diff --git a/src/db-client/modify/props/reference.ts b/_modify/props/reference.ts similarity index 100% rename from src/db-client/modify/props/reference.ts rename to _modify/props/reference.ts diff --git a/src/db-client/modify/props/references.ts b/_modify/props/references.ts similarity index 66% rename from src/db-client/modify/props/references.ts rename to _modify/props/references.ts index 041116a1e8..886af6be5a 100644 --- a/src/db-client/modify/props/references.ts +++ b/_modify/props/references.ts @@ -5,6 +5,19 @@ import type { Ctx } from '../Ctx.js' import { PROP_CURSOR_SIZE, writePropCursor } from '../cursor.js' import { reserve } from '../resize.js' import { writeU32, writeU8 } from '../uint.js' +import { writeObject } from './object.js' + +type Edges = Record | undefined +const getEdges = (obj: Record): Edges => { + let edges: Edges + for (const i in obj) { + if (i[0] === '$' && i !== '$index') { + edges ??= {} + edges[i] = obj[i] + } + } + return edges +} export const writeReferences = (ctx: Ctx, def: PropDef, val: any) => { if (typeof val !== 'object') { @@ -27,40 +40,51 @@ export const writeReferences = (ctx: Ctx, def: PropDef, val: any) => { writeU8(ctx, RefOp.clear) } - let op: RefOpEnum | undefined + let prevOp: RefOpEnum | undefined let pos = 0 let len = 0 for (const item of val) { let id: number - let nextOp: RefOpEnum + let op: RefOpEnum + let edges: Edges if (typeof item === 'number') { id = item - nextOp = RefOp.set + op = RefOp.set } else if (typeof item === 'object' && item !== null && item.id) { id = item.id - nextOp = RefOp.set + edges = getEdges(item) + if (edges) { + op = RefOp.setEdge + } else { + op = RefOp.set + } } else { // not handled yet throw new Error('references payload, not handled yet - wip') } - if (op !== nextOp!) { - if (op) { + if (prevOp !== op!) { + if (prevOp) { // write previous len writeUint32(ctx.buf, len * 4, pos) } - writeU8(ctx, (op = nextOp!)) + writeU8(ctx, (prevOp = op!)) pos = ctx.index len = 0 ctx.index += 4 } writeU32(ctx, id) + if (edges) { + writeObject(ctx, def.edges as any, edges) + writeU8(ctx, ModOp.end) + } + len++ } - writeUint32(ctx.buf, len * 4, pos) + writeUint32(ctx.buf, len, pos) writeU8(ctx, RefOp.end) return } diff --git a/src/db-client/modify/props/separate.ts b/_modify/props/separate.ts similarity index 100% rename from src/db-client/modify/props/separate.ts rename to _modify/props/separate.ts diff --git a/src/db-client/modify/props/string.ts b/_modify/props/string.ts similarity index 100% rename from src/db-client/modify/props/string.ts rename to _modify/props/string.ts diff --git a/src/db-client/modify/props/text.ts b/_modify/props/text.ts similarity index 100% rename from src/db-client/modify/props/text.ts rename to _modify/props/text.ts diff --git a/src/db-client/modify/props/vector.ts b/_modify/props/vector.ts similarity index 100% rename from src/db-client/modify/props/vector.ts rename to _modify/props/vector.ts diff --git a/src/db-client/modify/resize.ts b/_modify/resize.ts similarity index 100% rename from src/db-client/modify/resize.ts rename to _modify/resize.ts diff --git a/_modify/types.ts b/_modify/types.ts new file mode 100644 index 0000000000..43426e1df9 --- /dev/null +++ b/_modify/types.ts @@ -0,0 +1,35 @@ +import { LangCode, ModOp } from '../../zigTsExports.js' + +export const RANGE_ERR = 1 +export const MOD_OPS_TO_STRING = { + [ModOp.createProp]: 'create', + [ModOp.updateProp]: 'update', + [ModOp.increment]: 'update', + [ModOp.expire]: 'update', +} as const + +export const enum SIZE { + DEFAULT_CURSOR = 11, +} + +export type ModifyOpts = { + unsafe?: boolean + locale?: keyof typeof LangCode +} + +export const NOEDGE_NOINDEX_REALID = 0 +export const EDGE_NOINDEX_REALID = 1 +export const EDGE_INDEX_REALID = 2 +export const NOEDGE_INDEX_REALID = 3 +export const NOEDGE_NOINDEX_TMPID = 4 +export const EDGE_NOINDEX_TMPID = 5 +export const EDGE_INDEX_TMPID = 6 +export const NOEDGE_INDEX_TMPID = 7 + +// export const REF_OP_OVERWRITE = 0 +// export const REF_OP_UPDATE = 1 +// export const REF_OP_DELETE = 2 +// export const REF_OP_PUT_OVERWRITE = 3 +// export const REF_OP_PUT_ADD = 4 + +// export type RefOp = typeof REF_OP_OVERWRITE | typeof REF_OP_UPDATE diff --git a/src/db-client/modify/uint.ts b/_modify/uint.ts similarity index 100% rename from src/db-client/modify/uint.ts rename to _modify/uint.ts diff --git a/src/db-client/modify/update/index.ts b/_modify/update/index.ts similarity index 82% rename from src/db-client/modify/update/index.ts rename to _modify/update/index.ts index 144bced155..ce416a74da 100644 --- a/src/db-client/modify/update/index.ts +++ b/_modify/update/index.ts @@ -114,3 +114,72 @@ export function update( return handleError(db, ctx, update, arguments, e) } } + +// const simpleUpdate = () => { + +// } + +/* + CREATE + + article { + body: 'xxx', + age: 10, + rating: 4 + } + + ==> + + create|article|main:10,4|body:xxx + + UPDATE + + article 10 { + body: 'xxx', + age: 10, + rating: 4 + } + + ==> + + update|article|id:10|main:0:10|main:1:4|body:xxx + + update(u8)|size(u32)|type(u8)|id(u32)|...[prop(u8)] + + + propType,size,value + // age:number:81 + 1,4,81 + + + + name,string,value + + + UPSERT + + article 'abc', { + body: 'xxx', + age: 10, + rating: 4 + } + + ==> + + upsert|article|abc|main:0:10|main:1:4|body:xxx + + + + + + + + + + -------------------------------------------------------------------- + + + + + +*/ diff --git a/src/db-client/modify/upsert/index.ts b/_modify/upsert/index.ts similarity index 100% rename from src/db-client/modify/upsert/index.ts rename to _modify/upsert/index.ts diff --git a/src/db-client/modify/validate.ts b/_modify/validate.ts similarity index 100% rename from src/db-client/modify/validate.ts rename to _modify/validate.ts diff --git a/clibs/common.mk b/clibs/common.mk index 825a49a307..e6a72c64f1 100644 --- a/clibs/common.mk +++ b/clibs/common.mk @@ -50,6 +50,8 @@ endif ifeq ($(uname_S),Linux) CFLAGS += -g -ggdb3 -fno-math-errno -ftree-vectorize -Wstrict-aliasing=3 + # Mute the gcc bug when `auto` is used to infer the type + CFLAGS += -Wno-old-style-declaration #CFLAGS += -opt-info-vec-optimized #CFLAGS += -ftree-vectorizer-verbose=5 -fopt-info-vec-missed diff --git a/clibs/include/cdefs.h b/clibs/include/cdefs.h index caf7141522..69edeb4348 100644 --- a/clibs/include/cdefs.h +++ b/clibs/include/cdefs.h @@ -204,7 +204,8 @@ * unsigned int len; * char buf[] __counted_by(len); * }; - * __builtin_dynamic_object_size(p->buf) == p->len * sizeof(*p->buf) + * *__builtin_counted_by_ref(p->puf) = len; + * __builtin_dynamic_object_size(p->buf) == p->len * sizeof(*p->buf); */ #define __counted_by(member) __attribute__((__counted_by__(member))) #else @@ -213,13 +214,16 @@ #endif #ifndef __pcounted_by -#if __has_attribute(__counted_by__) && !defined(__clang__) +#if __has_attribute(__counted_by__) && \ + !defined(__clang__) && \ + (__GNUC__ > 15 || (__GNUC__ == 14 && __GNUC_MINOR__ > 2)) /** * struct foo { * unsigned int len; * char *buf __pcounted_by(len); * }; - * __builtin_dynamic_object_size(p->buf) == p->len * sizeof(*p->buf) + * *__builtin_counted_by_ref(p->puf) = len; + * __builtin_dynamic_object_size(p->buf) == p->len * sizeof(*p->buf); */ #define __pcounted_by(member) __attribute__((__counted_by__(member))) #else diff --git a/clibs/include/selva/colvec.h b/clibs/include/selva/colvec.h index 221e7ae05f..9112923338 100644 --- a/clibs/include/selva/colvec.h +++ b/clibs/include/selva/colvec.h @@ -67,5 +67,8 @@ void *colvec_get_vec(struct SelvaTypeEntry *te, node_id_t node_id, const struct SELVA_EXPORT void colvec_set_vec(struct SelvaTypeEntry *te, node_id_t node_id, const struct SelvaFieldSchema *fs, const void *vec); +SELVA_EXPORT +void colvec_clear_vec(struct SelvaTypeEntry *te, node_id_t node_id, const struct SelvaFieldSchema *fs); + SELVA_EXPORT int colvec_foreach(struct SelvaTypeEntry *te, const struct SelvaFieldSchema *fs, node_id_t start, uint32_t len, void (*cb)(node_id_t node_id, void *vec, void *arg), void *arg); diff --git a/clibs/include/selva/db.h b/clibs/include/selva/db.h index 38bd497f7b..d21f87abb9 100644 --- a/clibs/include/selva/db.h +++ b/clibs/include/selva/db.h @@ -51,7 +51,7 @@ struct selva_dump_common_data { * Create a new DB instance. */ SELVA_EXPORT -struct SelvaDb *selva_db_create(void); +struct SelvaDb *selva_db_create(size_t len, uint8_t schema[len]); /** * Destroy a DB instance. @@ -66,11 +66,11 @@ SELVA_EXPORT int selva_db_chdir(struct SelvaDb *db, const char *pathname_str, size_t pathname_len) __attribute__((nonnull)); /** - * Create a new node type with a schema. - * @param type must not exist before. + * Set a hook to the dirty marking system. + * The hook function will be called every time a node is marked dirty. */ SELVA_EXPORT -int selva_db_create_type(struct SelvaDb *db, node_type_t type, const uint8_t *schema_buf, size_t schema_len) __attribute__((nonnull)); +void selva_db_set_dirty_hook(struct SelvaDb *db, selva_db_dirty_hook_t dirty_hook, void *ctx); /** * Save the common/shared data of the database. @@ -90,101 +90,56 @@ int selva_dump_load_common(struct SelvaDb *db, struct selva_dump_common_data *co SELVA_EXPORT int selva_dump_load_block(struct SelvaDb *db, struct SelvaTypeEntry *te, block_id_t block_i, char *errlog_buf, size_t errlog_size) __attribute__((nonnull)); +SELVA_EXPORT +inline node_type_t selva_get_max_type(const struct SelvaDb *db) [[reproducible]]; + /** * Find a type by type id. */ SELVA_EXPORT -struct SelvaTypeEntry *selva_get_type_by_index(const struct SelvaDb *db, node_type_t type) __attribute__((nonnull)); +inline struct SelvaTypeEntry *selva_get_type_by_index(struct SelvaDb *db, node_type_t type) [[reproducible]]; /** * Get the type for node. */ SELVA_EXPORT -struct SelvaTypeEntry *selva_get_type_by_node(const struct SelvaDb *db, struct SelvaNode *node) __attribute__((nonnull, pure)); +inline struct SelvaTypeEntry *selva_get_type_by_node(struct SelvaDb *db, struct SelvaNode *node) [[reproducible]]; SELVA_EXPORT -inline node_type_t selva_get_type(const struct SelvaTypeEntry *te) -#ifndef __zig -{ - return te->type; -} -#else -; -#endif +inline node_type_t selva_get_type(const struct SelvaTypeEntry *te) [[reproducible]]; SELVA_EXPORT -void selva_foreach_block(struct SelvaDb *db, enum SelvaTypeBlockStatus or_mask, void (*cb)(void *ctx, struct SelvaDb *db, struct SelvaTypeEntry *te, block_id_t block, node_id_t start), void *ctx); +inline block_id_t selva_get_nr_blocks(const struct SelvaTypeEntry *te); SELVA_EXPORT -inline block_id_t selva_get_nr_blocks(const struct SelvaTypeEntry *te) -#ifndef __zig -{ - return te->blocks->len; -} -#else -; -#endif +inline block_id_t selva_get_block_capacity(const struct SelvaTypeEntry *te); -SELVA_EXPORT -inline block_id_t selva_get_block_capacity(const struct SelvaTypeEntry *te) -#ifndef __zig -{ - return te->blocks->block_capacity; -} -#else -; -#endif +#define SELVA_NODE_ID2BLOCK_I3(block_capacity, node_id) \ + (((node_id - 1) - ((node_id - 1) % block_capacity)) / block_capacity) -inline block_id_t selva_node_id2block_i3(block_id_t block_capacity, node_id_t node_id) +static inline block_id_t selva_node_id2block_i3(block_id_t block_capacity, node_id_t node_id) { assert(node_id > 0); - return ((node_id - 1) - ((node_id - 1) % block_capacity)) / block_capacity; + return SELVA_NODE_ID2BLOCK_I3(block_capacity, node_id); } SELVA_EXPORT -inline block_id_t selva_node_id2block_i(const struct SelvaTypeBlocks *blocks, node_id_t node_id) -#ifndef __zig -{ - return selva_node_id2block_i3(blocks->block_capacity, node_id); -} -#else -; -#endif +inline block_id_t selva_node_id2block_i(const struct SelvaTypeBlocks *blocks, node_id_t node_id); SELVA_EXPORT -inline block_id_t selva_node_id2block_i2(const struct SelvaTypeEntry *te, node_id_t node_id) -#ifndef __zig -{ - return selva_node_id2block_i(te->blocks, node_id); -} -#else -; -#endif +inline block_id_t selva_node_id2block_i2(const struct SelvaTypeEntry *te, node_id_t node_id); SELVA_EXPORT -inline node_id_t selva_block_i2start(const struct SelvaTypeEntry *te, block_id_t block_i) -#ifndef __zig -{ - block_id_t block_capacity = te->blocks->block_capacity; - node_id_t start = block_i * block_capacity + 1; - return start; -} -#else -; -#endif +inline node_id_t selva_block_i2start(const struct SelvaTypeEntry *te, block_id_t block_i); SELVA_EXPORT -inline node_id_t selva_block_i2end(const struct SelvaTypeEntry *te, block_id_t block_i) -#ifndef __zig -{ - block_id_t block_capacity = te->blocks->block_capacity; - node_id_t start = block_i * block_capacity + 1; - node_id_t end = start + block_capacity - 1; - return end; -} -#else -; -#endif +inline node_id_t selva_block_i2end(const struct SelvaTypeEntry *te, block_id_t block_i); + +SELVA_EXPORT +inline void selva_foreach_block( + struct SelvaDb *db, + enum SelvaTypeBlockStatus or_mask, + void (*cb)(void *ctx, struct SelvaDb *db, struct SelvaTypeEntry *te, block_id_t block, node_id_t start), void *ctx); /** * \addtogroup block_status @@ -192,62 +147,27 @@ inline node_id_t selva_block_i2end(const struct SelvaTypeEntry *te, block_id_t b */ SELVA_EXPORT -inline enum SelvaTypeBlockStatus selva_block_status_get(const struct SelvaTypeEntry *te, block_id_t block_i) -#ifndef __zig -{ - return atomic_load(&te->blocks->blocks[block_i].status.atomic); -} -#else -; -#endif +inline enum SelvaTypeBlockStatus selva_block_status_get(const struct SelvaTypeEntry *te, block_id_t block_i); SELVA_EXPORT -inline void selva_block_status_replace(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus status) -#ifndef __zig -{ - atomic_store_explicit(&te->blocks->blocks[block_i].status.atomic, (uint32_t)status, memory_order_seq_cst); -} -#else -; -#endif +inline void selva_block_status_replace(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus status); /** * OR mask to the status. * @returns the previous status. */ SELVA_EXPORT -inline enum SelvaTypeBlockStatus selva_block_status_set(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus mask) -#ifndef __zig -{ - return atomic_fetch_or_explicit(&te->blocks->blocks[block_i].status.atomic, (uint32_t)mask, memory_order_seq_cst); -} -#else -; -#endif +inline enum SelvaTypeBlockStatus selva_block_status_set(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus mask); /** * Reset mask flags from the status. * @returns the previous status. */ SELVA_EXPORT -inline enum SelvaTypeBlockStatus selva_block_status_reset(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus mask) -#ifndef __zig -{ - return atomic_fetch_and_explicit(&te->blocks->blocks[block_i].status.atomic, ~(uint32_t)mask, memory_order_seq_cst); -} -#else -; -#endif +inline enum SelvaTypeBlockStatus selva_block_status_reset(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus mask); SELVA_EXPORT -inline bool selva_block_status_eq(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus mask) -#ifndef __zig -{ - return (atomic_load(&te->blocks->blocks[block_i].status.atomic) & (uint32_t)mask) == (uint32_t)mask; -} -#else -; -#endif +inline bool selva_block_status_eq(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus mask); SELVA_EXPORT size_t selva_get_type_status(const struct SelvaTypeEntry *te, size_t len, uint8_t packed_statuses[len]); @@ -261,90 +181,34 @@ size_t selva_get_type_status(const struct SelvaTypeEntry *te, size_t len, uint8_ */ SELVA_EXPORT __attribute__((nonnull, pure)) -inline const struct SelvaNodeSchema *selva_get_ns_by_te(const struct SelvaTypeEntry *te) -#ifndef __zig -{ - return &te->ns; -} -#else -; -#endif +inline const struct SelvaNodeSchema *selva_get_ns_by_te(const struct SelvaTypeEntry *te); SELVA_EXPORT -inline const struct SelvaFieldSchema *get_fs_by_fields_schema_field(const struct SelvaFieldsSchema *fields_schema, field_t field) -#ifndef __zig -{ - if (!fields_schema || field >= fields_schema->nr_fields) { - return nullptr; - } - - return &fields_schema->field_schemas[field]; -} -#else -; -#endif +inline const struct SelvaFieldSchema *get_fs_by_fields_schema_field(const struct SelvaFieldsSchema *fields_schema, field_t field); /** * Get the field schema for field. */ SELVA_EXPORT __attribute__((nonnull, pure)) -inline const struct SelvaFieldSchema *selva_get_fs_by_te_field(const struct SelvaTypeEntry *te, field_t field) -#ifndef __zig -{ - return get_fs_by_fields_schema_field(&te->ns.fields_schema, field); -} -#else -; -#endif +inline const struct SelvaFieldSchema *selva_get_fs_by_te_field(const struct SelvaTypeEntry *te, field_t field); /** * Get the field schema for field. */ SELVA_EXPORT __attribute__((nonnull, pure)) -inline const struct SelvaFieldSchema *selva_get_fs_by_ns_field(const struct SelvaNodeSchema *ns, field_t field) -#ifndef __zig -{ - return get_fs_by_fields_schema_field(&ns->fields_schema, field); -} -#else -; -#endif +inline const struct SelvaFieldSchema *selva_get_fs_by_ns_field(const struct SelvaNodeSchema *ns, field_t field); /** * Get the field schema for field. */ SELVA_EXPORT __attribute__((nonnull, pure)) -inline const struct SelvaFieldSchema *selva_get_fs_by_node(struct SelvaDb *db, struct SelvaNode *node, field_t field) -#ifndef __zig -{ - struct SelvaTypeEntry *type; - - type = selva_get_type_by_node(db, node); - if (!type) { - return nullptr; - } - - return selva_get_fs_by_ns_field(&type->ns, field); -} -#else -; -#endif +inline const struct SelvaFieldSchema *selva_get_fs_by_node(struct SelvaDb *db, struct SelvaNode *node, field_t field); SELVA_EXPORT -#if __has_c_attribute(reproducible) -[[reproducible]] -#endif -inline enum SelvaFieldType selva_get_fs_type(const struct SelvaFieldSchema *fs) -#ifndef __zig -{ - return fs->type; -} -#else -; -#endif +inline enum SelvaFieldType selva_get_fs_type(const struct SelvaFieldSchema *fs) [[reproducible]]; /** * Get the EdgeFieldConstraint from a ref field schema. @@ -355,28 +219,10 @@ inline enum SelvaFieldType selva_get_fs_type(const struct SelvaFieldSchema *fs) SELVA_EXPORT __attribute__((returns_nonnull)) __attribute__((nonnull)) -inline const struct EdgeFieldConstraint *selva_get_edge_field_constraint(const struct SelvaFieldSchema *fs) -#ifndef __zig -{ - assert(fs->type == SELVA_FIELD_TYPE_REFERENCE || - fs->type == SELVA_FIELD_TYPE_REFERENCES); - return &fs->edge_constraint; -} -#else -; -#endif +inline const struct EdgeFieldConstraint *selva_get_edge_field_constraint(const struct SelvaFieldSchema *fs); SELVA_EXPORT -inline const struct SelvaFieldsSchema *selva_get_edge_field_fields_schema(struct SelvaDb *db, const struct EdgeFieldConstraint *efc) -#ifndef __zig -{ - struct SelvaTypeEntry *te = selva_get_type_by_index(db, efc->edge_node_type); - - return (te) ? &selva_get_ns_by_te(te)->fields_schema : nullptr; -} -#else -; -#endif +inline const struct SelvaFieldsSchema *selva_get_edge_field_fields_schema(struct SelvaDb *db, const struct EdgeFieldConstraint *efc); /** * Strategy for adding new node expires. @@ -480,28 +326,14 @@ size_t selva_node_count(const struct SelvaTypeEntry *type) __attribute__((nonnul */ SELVA_EXPORT __attribute__((nonnull, pure)) -inline node_id_t selva_get_node_id(const struct SelvaNode *node) -#ifndef __zig -{ - return node->node_id; -} -#else -; -#endif +inline node_id_t selva_get_node_id(const struct SelvaNode *node); /** * Get the type of of node. */ SELVA_EXPORT __attribute__((nonnull, pure)) -inline node_type_t selva_get_node_type(const struct SelvaNode *node) -#ifndef __zig -{ - return node->type; -} -#else -; -#endif +inline node_type_t selva_get_node_type(const struct SelvaNode *node); /** * \addtogroup node_hash @@ -561,16 +393,11 @@ struct SelvaNodeRes selva_get_alias(struct SelvaTypeEntry *type, struct SelvaAli /** * Get alias by destination id. - * This may not seem very useful but this is actually the way that allows you to - * traverse all aliases to the given node_id by following the `next` pointer or - * by calling selva_get_next_alias(). */ SELVA_EXPORT const struct SelvaAlias *selva_get_alias_by_dest(struct SelvaAliases *aliases, node_id_t dest); -SELVA_EXPORT -const struct SelvaAlias *selva_get_next_alias(const struct SelvaAlias *alias); - +/* TODO Is this needed as a separate func? */ SELVA_EXPORT const char *selva_get_alias_name(const struct SelvaAlias *alias, size_t *len) __attribute__((nonnull, pure)); @@ -582,3 +409,182 @@ struct SelvaAliases *selva_get_aliases(struct SelvaTypeEntry *type, field_t fiel */ SELVA_EXPORT void selva_remove_all_aliases(struct SelvaTypeEntry *type, node_id_t node_id); + +/* + * Inline functions that can be inlined only in C. + */ +#ifndef __zig +inline node_type_t selva_get_max_type(const struct SelvaDb *db) +{ + assert(db->types[db->nr_types - 1].type == db->nr_types); + return db->nr_types; +} + +inline struct SelvaTypeEntry *selva_get_type_by_index(struct SelvaDb *db, node_type_t type) +{ + if (type == 0) { + return nullptr; + } + assert((size_t)type - 1 < db->nr_types); + return &db->types[type - 1]; +} + +inline struct SelvaTypeEntry *selva_get_type_by_node(struct SelvaDb *db, struct SelvaNode *node) +{ + assert((size_t)node->type - 1 < db->nr_types); + return &db->types[node->type - 1]; +} + +inline node_type_t selva_get_type(const struct SelvaTypeEntry *te) +{ + return te->type; +} + +inline block_id_t selva_get_nr_blocks(const struct SelvaTypeEntry *te) +{ + return te->blocks->len; +} + +inline block_id_t selva_get_block_capacity(const struct SelvaTypeEntry *te) +{ + return te->blocks->block_capacity; +} + +inline block_id_t selva_node_id2block_i(const struct SelvaTypeBlocks *blocks, node_id_t node_id) +{ + return SELVA_NODE_ID2BLOCK_I3(blocks->block_capacity, node_id); +} + +inline block_id_t selva_node_id2block_i2(const struct SelvaTypeEntry *te, node_id_t node_id) +{ + return selva_node_id2block_i(te->blocks, node_id); +} + +inline node_id_t selva_block_i2start(const struct SelvaTypeEntry *te, block_id_t block_i) +{ + block_id_t block_capacity = te->blocks->block_capacity; + node_id_t start = block_i * block_capacity + 1; + return start; +} + +inline node_id_t selva_block_i2end(const struct SelvaTypeEntry *te, block_id_t block_i) +{ + block_id_t block_capacity = te->blocks->block_capacity; + node_id_t start = block_i * block_capacity + 1; + node_id_t end = start + block_capacity - 1; + return end; +} + +inline void selva_foreach_block( + struct SelvaDb *db, + enum SelvaTypeBlockStatus or_mask, + void (*cb)(void *ctx, struct SelvaDb *db, struct SelvaTypeEntry *te, block_id_t block, node_id_t start), void *ctx) +{ + for (size_t ti = 0; ti < db->nr_types; ti++) { + struct SelvaTypeEntry *te = &db->types[ti]; + struct SelvaTypeBlocks *blocks = te->blocks; + + for (block_id_t block_i = 0; block_i < blocks->len; block_i++) { + struct SelvaTypeBlock *block = &blocks->blocks[block_i]; + + /* + * Note that we call it or_mask because the cb() is called if any + * bit of the mask is set in the status. + */ + if (atomic_load_explicit(&block->status.atomic, memory_order_consume) & or_mask) { + cb(ctx, db, te, block_i, selva_block_i2start(te, block_i)); + } + } + } +} + +inline enum SelvaTypeBlockStatus selva_block_status_get(const struct SelvaTypeEntry *te, block_id_t block_i) +{ + return atomic_load(&te->blocks->blocks[block_i].status.atomic); +} + +inline void selva_block_status_replace(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus status) +{ + atomic_store_explicit(&te->blocks->blocks[block_i].status.atomic, (uint32_t)status, memory_order_seq_cst); +} + +inline enum SelvaTypeBlockStatus selva_block_status_set(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus mask) +{ + return atomic_fetch_or_explicit(&te->blocks->blocks[block_i].status.atomic, (uint32_t)mask, memory_order_seq_cst); +} + +inline enum SelvaTypeBlockStatus selva_block_status_reset(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus mask) +{ + return atomic_fetch_and_explicit(&te->blocks->blocks[block_i].status.atomic, ~(uint32_t)mask, memory_order_seq_cst); +} + +inline bool selva_block_status_eq(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus mask) +{ + return (atomic_load(&te->blocks->blocks[block_i].status.atomic) & (uint32_t)mask) == (uint32_t)mask; +} + +inline const struct SelvaNodeSchema *selva_get_ns_by_te(const struct SelvaTypeEntry *te) +{ + return &te->ns; +} + +inline const struct SelvaFieldSchema *get_fs_by_fields_schema_field(const struct SelvaFieldsSchema *fields_schema, field_t field) +{ + if (!fields_schema || field >= fields_schema->nr_fields) { + return nullptr; + } + + return &fields_schema->field_schemas[field]; +} + +inline const struct SelvaFieldSchema *selva_get_fs_by_te_field(const struct SelvaTypeEntry *te, field_t field) +{ + return get_fs_by_fields_schema_field(&te->ns.fields_schema, field); +} + +inline const struct SelvaFieldSchema *selva_get_fs_by_ns_field(const struct SelvaNodeSchema *ns, field_t field) +{ + return get_fs_by_fields_schema_field(&ns->fields_schema, field); +} + +inline const struct SelvaFieldSchema *selva_get_fs_by_node(struct SelvaDb *db, struct SelvaNode *node, field_t field) +{ + struct SelvaTypeEntry *type; + + type = selva_get_type_by_node(db, node); + if (!type) { + return nullptr; + } + + return selva_get_fs_by_ns_field(&type->ns, field); +} + +inline enum SelvaFieldType selva_get_fs_type(const struct SelvaFieldSchema *fs) +{ + return fs->type; +} + +inline const struct EdgeFieldConstraint *selva_get_edge_field_constraint(const struct SelvaFieldSchema *fs) +{ + assert(fs->type == SELVA_FIELD_TYPE_REFERENCE || + fs->type == SELVA_FIELD_TYPE_REFERENCES); + return &fs->edge_constraint; +} + +inline const struct SelvaFieldsSchema *selva_get_edge_field_fields_schema(struct SelvaDb *db, const struct EdgeFieldConstraint *efc) +{ + struct SelvaTypeEntry *te = selva_get_type_by_index(db, efc->edge_node_type); + + return (te) ? &selva_get_ns_by_te(te)->fields_schema : nullptr; +} + +inline node_id_t selva_get_node_id(const struct SelvaNode *node) +{ + return node->node_id; +} + +inline node_type_t selva_get_node_type(const struct SelvaNode *node) +{ + return node->type; +} +#endif diff --git a/clibs/include/selva/fields.h b/clibs/include/selva/fields.h index 314ac2775d..0d181a6191 100644 --- a/clibs/include/selva/fields.h +++ b/clibs/include/selva/fields.h @@ -86,30 +86,18 @@ extern const uint8_t selva_fields_text_tl_empty[_selva_lang_last][8]; #define SELVA_FIELDS_TEXT_TL_EMPTY_LEN 6 -#if __has_c_attribute(unsequenced) -[[unsequenced]] -#else -__purefn -#endif -size_t selva_fields_get_data_size(const struct SelvaFieldSchema *fs); +size_t selva_fields_get_data_size(const struct SelvaFieldSchema *fs) [[unsequenced]]; -#if __has_c_attribute(reproducible) -[[reproducible]] -#endif -void *selva_fields_nfo2p(struct SelvaFields *fields, const struct SelvaFieldInfo *nfo); +void *selva_fields_nfo2p(struct SelvaFields *fields, const struct SelvaFieldInfo *nfo) [[reproducible]]; +/** + * Ensure that we have a reference struct. + * NOTE: This function doesn't create the necessary edge node. + */ struct SelvaNodeLargeReference *selva_fields_ensure_reference( struct SelvaNode *node, const struct SelvaFieldSchema *fs); -SELVA_EXPORT -struct SelvaNode *selva_fields_ensure_ref_edge( - struct SelvaDb *db, - struct SelvaNode *node, - const struct EdgeFieldConstraint *efc, - struct SelvaNodeLargeReference *ref, - node_id_t edge_id); - SELVA_EXPORT int selva_fields_get_mutable_string( struct SelvaNode *node, diff --git a/clibs/include/selva/types.h b/clibs/include/selva/types.h index 5b5e25e895..c1d51cebb5 100644 --- a/clibs/include/selva/types.h +++ b/clibs/include/selva/types.h @@ -41,7 +41,7 @@ enum SelvaFieldType { SELVA_FIELD_TYPE_WEAK_REFERENCE __attribute__((deprecated)) = 6, SELVA_FIELD_TYPE_WEAK_REFERENCES __attribute__((deprecated)) = 7, SELVA_FIELD_TYPE_ALIAS = 8, - SELVA_FIELD_TYPE_ALIASES = 9, + SELVA_FIELD_TYPE_ALIASES __attribute__((deprecated)) = 9, SELVA_FIELD_TYPE_COLVEC = 10, } __packed; @@ -58,28 +58,46 @@ struct EdgeFieldConstraint { struct SelvaFieldSchema { field_t field; enum SelvaFieldType type; + /** + * Offset to the default value in te->schema_buf. + * Only valid for field types that can have defaults. + */ + uint32_t default_off; union { + /** + * SELVA_FIELD_TYPE_STRING. + */ struct { size_t fixed_len; /*!< Greater than zero if the string has a fixed maximum length. */ - uint32_t default_off; /*!< Offset to the default value in te->schema_buf. */ uint32_t default_len; - } string; /*!< SELVA_FIELD_TYPE_STRING */ + } string; struct { uint32_t nr_defaults; /*!< Number of defaults for this text field. */ - uint32_t defaults_off; /*!< Offset to the default values in te->schema_buf. */ } text; /*!< SELVA_FIELD_TYPE_TEXT */ - struct EdgeFieldConstraint edge_constraint; /*!< SELVA_FIELD_TYPE_REFERENCE, SELVA_FIELD_TYPE_REFERENCES, SELVA_FIELD_TYPE_WEAK_REFERENCE, and SELVA_FIELD_TYPE_WEAK_REFERENCES. */ + /** + * SELVA_FIELD_TYPE_REFERENCE, SELVA_FIELD_TYPE_REFERENCES, SELVA_FIELD_TYPE_WEAK_REFERENCE, and SELVA_FIELD_TYPE_WEAK_REFERENCES. + */ + struct EdgeFieldConstraint edge_constraint; + /** + * SELVA_FIELD_TYPE_MICRO_BUFFER. + */ struct { - uint32_t default_off; /*!< Offset to the default in the raw schema buffer. */ uint16_t len; /*!< Size of the smb. */ - } smb; /*!< SELVA_FIELD_TYPE_MICRO_BUFFER */ - size_t alias_index; /*!< Index in aliases for SELVA_FIELD_TYPE_ALIAS and SELVA_FIELD_TYPE_ALIASES. */ + } smb; + /** + * SELVA_FIELD_TYPE_ALIAS. + */ + struct { + size_t index; /*!< Index in aliases. */ + } alias; + /** + * SELVA_FIELD_TYPE_COLVEC. + */ struct { uint16_t vec_len; /*!< Length of a single vector. */ uint16_t comp_size; /*!< Component size in the vector. */ - uint32_t default_off; /*!< Offset to the default value in te->schema_buf. */ field_t index; /*!< Index in te->col_fields.colvec.v. */ - } colvec; /*!< SELVA_FIELD_TYPE_COLVEC */ + } colvec; }; } __designated_init; @@ -100,8 +118,8 @@ struct SelvaFieldsSchema { }; struct SelvaNodeSchema { - size_t nr_aliases; /*!< Number of alias fields in this type. */ - size_t nr_colvecs; /*!< Number of columnar vector fields. */ + size_t nr_alias_fields; /*!< Number of alias fields in this type. */ + size_t nr_colvec_fields; /*!< Number of columnar vector fields. */ struct SelvaFieldsSchema fields_schema; /* Nothing must be put after this line. */ }; @@ -135,6 +153,8 @@ struct SelvaNodeRes { enum SelvaTypeBlockStatus block_status; } __designated_init; +typedef void (*selva_db_dirty_hook_t)(void *ctx, node_type_t type, node_id_t node_id); + SELVA_EXPORT bool selva_is_valid_field_type(enum SelvaFieldType ftype); diff --git a/clibs/lib/selva/alias.c b/clibs/lib/selva/alias.c index e6cb369613..1197ef1a4f 100644 --- a/clibs/lib/selva/alias.c +++ b/clibs/lib/selva/alias.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025 SAULX + * Copyright (c) 2024-2026 SAULX * SPDX-License-Identifier: MIT */ #include @@ -14,34 +14,36 @@ void selva_init_aliases(struct SelvaTypeEntry *type) const struct SelvaFieldsSchema *fields_schema = &type->ns.fields_schema; const size_t nr_fields = fields_schema->nr_fields; - type->aliases = selva_malloc(type->ns.nr_aliases * sizeof(struct SelvaAliases)); + type->aliases = selva_malloc(type->ns.nr_alias_fields * sizeof(struct SelvaAliases)); for (size_t i = 0; i < nr_fields; i++) { const struct SelvaFieldSchema *fs = &fields_schema->field_schemas[i]; - struct SelvaAliases *field_aliases = &type->aliases[fs->alias_index]; - switch (fs->type) { - case SELVA_FIELD_TYPE_ALIAS: - field_aliases->single = true; - __attribute__((__fallthrough__)); - case SELVA_FIELD_TYPE_ALIASES: -#if 0 - assert(fs->alias_index < type->ns.nr_aliases); -#endif + if (fs->type == SELVA_FIELD_TYPE_ALIAS) { + struct SelvaAliases *field_aliases = &type->aliases[fs->alias.index]; + + assert(fs->alias.index < type->ns.nr_alias_fields); field_aliases->field = fs->field; + field_aliases->nr_aliases = 0; RB_INIT(&field_aliases->alias_by_name); RB_INIT(&field_aliases->alias_by_dest); - __attribute__((__fallthrough__)); - default: } } } void selva_destroy_aliases(struct SelvaTypeEntry *type) { - /* We assume that all the aliases in the aliases structs have been freed already. */ +#if 0 + /* + * We assume that all the aliases in the aliases structs have been freed already. + */ + for (size_t i = 0; i < type->ns.nr_alias_fields; i++) { + assert(type->aliases->nr_aliases == 0); + } +#endif + selva_free(type->aliases); - type->ns.nr_aliases = 0; + type->ns.nr_alias_fields = 0; type->aliases = nullptr; } @@ -57,45 +59,28 @@ static struct SelvaAlias *insert_alias_by_name(struct SelvaAliases *aliases, str return old_alias; } -static void remove_alias_by_dest(struct SelvaAliases *aliases, struct SelvaAlias *alias) +static inline void remove_alias_by_name(struct SelvaAliases *aliases, struct SelvaAlias *alias) { - RB_REMOVE(SelvaAliasesByDest, &aliases->alias_by_dest, alias); + struct SelvaAlias *removed = RB_REMOVE(SelvaAliasesByName, &aliases->alias_by_name, alias); + assert(removed); + aliases->nr_aliases--; } -static void remove_alias_by_name(struct SelvaAliases *aliases, struct SelvaAlias *alias) +static inline struct SelvaAlias *insert_alias_by_dest(struct SelvaAliases *aliases, struct SelvaAlias *alias) { - struct SelvaAlias *removed = RB_REMOVE(SelvaAliasesByName, &aliases->alias_by_name, alias); - assert(removed); + return RB_INSERT(SelvaAliasesByDest, &aliases->alias_by_dest, alias); +} + +static inline void remove_alias_by_dest(struct SelvaAliases *aliases, struct SelvaAlias *alias) +{ + (void)RB_REMOVE(SelvaAliasesByDest, &aliases->alias_by_dest, alias); } static void del_alias(struct SelvaAliases *aliases, struct SelvaAlias *alias) { remove_alias_by_name(aliases, alias); - - if (alias->prev) { - /* - * `alias` is in the middle or the last in the chain for this dest. - */ - alias->prev->next = alias->next; - } else { - /* - * `alias` must be the first in alias_by_dest with this destination. - * We must make the `next` the first. - */ - remove_alias_by_dest(aliases, alias); - if (alias->next) { - (void)RB_INSERT(SelvaAliasesByDest, &aliases->alias_by_dest, alias->next); - } - } - if (alias->next) { - /* - * This either sets a new `prev` or nulls it if `alias` was the first. - */ - alias->next->prev = alias->prev; - } - + remove_alias_by_dest(aliases, alias); selva_free(alias); - aliases->nr_aliases--; } size_t selva_alias_count(const struct SelvaAliases *aliases) @@ -106,30 +91,16 @@ size_t selva_alias_count(const struct SelvaAliases *aliases) node_id_t selva_set_alias_p(struct SelvaAliases *aliases, struct SelvaAlias *new_alias) { struct SelvaAlias *old_alias; + struct SelvaAlias *old_by_dest; node_id_t old_dest = 0; - new_alias->prev = nullptr; - new_alias->next = nullptr; - -retry: - old_alias = insert_alias_by_name(aliases, new_alias); - if (old_alias) { + while ((old_alias = insert_alias_by_name(aliases, new_alias))) { old_dest = old_alias->dest; del_alias(aliases, old_alias); - goto retry; } - struct SelvaAlias *prev_by_dest = RB_INSERT(SelvaAliasesByDest, &aliases->alias_by_dest, new_alias); - if (prev_by_dest) { - new_alias->prev = prev_by_dest; - new_alias->next = prev_by_dest->next; - prev_by_dest->next = new_alias; - if (aliases->single) { - /* - * Restrict this field to a single alias, i.e. this is SELVA_FIELD_TYPE_ALIAS. - */ - del_alias(aliases, prev_by_dest); - } + while ((old_by_dest = insert_alias_by_dest(aliases, new_alias))) { + del_alias(aliases, old_by_dest); } return old_dest; @@ -172,33 +143,9 @@ void selva_del_alias_by_dest(struct SelvaAliases *aliases, node_id_t dest) }; struct SelvaAlias *alias = RB_FIND(SelvaAliasesByDest, &aliases->alias_by_dest, &find); - if (!alias) { - return; - } - - remove_alias_by_dest(aliases, alias); - assert(!alias->prev); /* This must be the first one on the list of by_dest aliases. */ - - /* - * Remove this alias from by_name. - */ - remove_alias_by_name(aliases, alias); - - /* - * Remove the rest of aliases by this dest from by_name. - */ - struct SelvaAlias *next = alias->next; - while (next) { - struct SelvaAlias *tmp = next->next; - - assert(next->dest == alias->dest); - remove_alias_by_name(aliases, next); - selva_free(next); - - next = tmp; + if (alias) { + del_alias(aliases, alias); } - - selva_free(alias); } struct SelvaNodeRes selva_get_alias(struct SelvaTypeEntry *type, struct SelvaAliases *aliases, const char *name_str, size_t name_len) @@ -236,11 +183,6 @@ const struct SelvaAlias *selva_get_alias_by_dest(struct SelvaAliases *aliases, n return RB_FIND(SelvaAliasesByDest, &aliases->alias_by_dest, &find); } -const struct SelvaAlias *selva_get_next_alias(const struct SelvaAlias *alias) -{ - return (alias) ? alias->next : nullptr; -} - const char *selva_get_alias_name(const struct SelvaAlias *alias, size_t *len) { *len = alias->name_len; @@ -249,7 +191,7 @@ const char *selva_get_alias_name(const struct SelvaAlias *alias, size_t *len) struct SelvaAliases *selva_get_aliases(struct SelvaTypeEntry *type, field_t field) { - size_t nr_aliases = type->ns.nr_aliases; + const size_t nr_aliases = type->ns.nr_alias_fields; for (size_t i = 0; i < nr_aliases; i++) { if (type->aliases[i].field == field) { @@ -262,7 +204,7 @@ struct SelvaAliases *selva_get_aliases(struct SelvaTypeEntry *type, field_t fiel void selva_remove_all_aliases(struct SelvaTypeEntry *type, node_id_t node_id) { - size_t nr_aliases = type->ns.nr_aliases; + const size_t nr_aliases = type->ns.nr_alias_fields; for (size_t i = 0; i < nr_aliases; i++) { selva_del_alias_by_dest(&type->aliases[i], node_id); diff --git a/clibs/lib/selva/colvec.c b/clibs/lib/selva/colvec.c index c9aef81d3e..e06e5889bb 100644 --- a/clibs/lib/selva/colvec.c +++ b/clibs/lib/selva/colvec.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025 SAULX + * Copyright (c) 2024-2026 SAULX * SPDX-License-Identifier: MIT * * A colvec is a columnar vector field in Selva. Specifically a colvec structure @@ -29,7 +29,7 @@ static inline size_t colvec_slab_off(size_t block_capacity, size_t vec_size, nod void colvec_init_te(struct SelvaTypeEntry *te) { - size_t nr_colvecs = te->ns.nr_colvecs; + size_t nr_colvecs = te->ns.nr_colvec_fields; struct SelvaNodeSchema *ns = &te->ns; size_t nr_blocks = te->blocks->len; size_t block_capacity = selva_get_block_capacity(te); @@ -48,7 +48,7 @@ void colvec_init_te(struct SelvaTypeEntry *te) size_t ci = fs->colvec.index; size_t slab_size = block_capacity * fs->colvec.vec_len * fs->colvec.comp_size; - assert(ci < ns->nr_colvecs); + assert(ci < ns->nr_colvec_fields); te->col_fields.colvec[ci] = (struct SelvaColvec){ .field = i, @@ -62,7 +62,7 @@ void colvec_init_te(struct SelvaTypeEntry *te) void colvec_deinit_te(struct SelvaTypeEntry *te) { - for (size_t i = 0; i < te->ns.nr_colvecs; i++) { + for (size_t i = 0; i < te->ns.nr_colvec_fields; i++) { struct SelvaColvec *colvec = &te->col_fields.colvec[i]; block_id_t blocks_len = te->blocks->len; @@ -100,7 +100,7 @@ void colvec_init_node(struct SelvaTypeEntry *te, struct SelvaNode *node) /* * Initialize each col field of this node. */ - for (size_t i = 0; i < te->ns.nr_colvecs; i++) { + for (size_t i = 0; i < te->ns.nr_colvec_fields; i++) { struct SelvaColvec *colvec = &te->col_fields.colvec[i]; const struct SelvaFieldSchema *fs = get_fs_by_fields_schema_field(&te->ns.fields_schema, colvec->field); uint8_t *slab = colvec_init_slab(colvec, block_i); @@ -108,9 +108,9 @@ void colvec_init_node(struct SelvaTypeEntry *te, struct SelvaNode *node) assert(fs->type == SELVA_FIELD_TYPE_COLVEC); void *vec = slab + colvec_slab_off(selva_get_block_capacity(te), colvec->vec_size, node->node_id); - if (fs->colvec.default_off > 0) { + if (fs->default_off > 0) { const uint8_t *schema_buf = te->schema_buf; - const void *default_vec = schema_buf + fs->colvec.default_off; + const void *default_vec = schema_buf + fs->default_off; memcpy(vec, default_vec, colvec->vec_size); } else { memset(vec, 0, colvec->vec_size); @@ -149,6 +149,17 @@ void colvec_set_vec(struct SelvaTypeEntry *te, node_id_t node_id, const struct S memcpy(dst, vec, colvec->vec_size); } +void colvec_clear_vec(struct SelvaTypeEntry *te, node_id_t node_id, const struct SelvaFieldSchema *fs) +{ + assert(fs->type == SELVA_FIELD_TYPE_COLVEC); + + struct SelvaColvec *colvec = &te->col_fields.colvec[fs->colvec.index]; + uint8_t *slab = (uint8_t *)colvec->v[selva_node_id2block_i2(te, node_id)]; + void *dst = slab + colvec_slab_off(selva_get_block_capacity(te), colvec->vec_size, node_id); + + memset(dst, 0, colvec->vec_size); +} + int colvec_foreach(struct SelvaTypeEntry *te, const struct SelvaFieldSchema *fs, node_id_t start, uint32_t len, void (*cb)(node_id_t node_id, void *vec, void *arg), void *arg) { struct SelvaColvec *colvec = colvec_get(te, fs); diff --git a/clibs/lib/selva/db.c b/clibs/lib/selva/db.c index a3385ce162..b5364e7e42 100644 --- a/clibs/lib/selva/db.c +++ b/clibs/lib/selva/db.c @@ -62,16 +62,9 @@ int SelvaAlias_cmp_dest(const struct SelvaAlias *a, const struct SelvaAlias *b) return node_id_cmp(a->dest, b->dest); } -__attribute__((nonnull)) -static int SelvaTypeEntry_cmp(const struct SelvaTypeEntry *a, const struct SelvaTypeEntry *b) -{ - return (int)((struct SelvaTypeEntryFind *)a)->type - (int)((struct SelvaTypeEntryFind *)b)->type; -} - -RB_GENERATE(SelvaTypeEntryIndex, SelvaTypeEntry, _entry, SelvaTypeEntry_cmp) RB_GENERATE(SelvaNodeIndex, SelvaNode, _index_entry, SelvaNode_cmp) -RB_GENERATE(SelvaAliasesByName, SelvaAlias, _entry1, SelvaAlias_cmp_name) -RB_GENERATE(SelvaAliasesByDest, SelvaAlias, _entry2, SelvaAlias_cmp_dest) +RB_GENERATE(SelvaAliasesByName, SelvaAlias, _entryByName, SelvaAlias_cmp_name) +RB_GENERATE(SelvaAliasesByDest, SelvaAlias, _entryByDest, SelvaAlias_cmp_dest) static bool node_expire_cmp(struct SelvaExpireToken *tok, selva_expire_cmp_arg_t arg) { @@ -122,10 +115,9 @@ void selva_expire_node_cancel(struct SelvaDb *db, node_type_t type, node_id_t no static void expire_cb(struct SelvaExpireToken *tok, void *) { struct SelvaDbExpireToken *token = containerof(tok, typeof(*token), token); - struct SelvaTypeEntry *te; struct SelvaNodeRes res; - te = selva_get_type_by_index(token->db, token->type); + auto te = selva_get_type_by_index(token->db, token->type); assert(te); res = selva_find_node(te, token->node_id); if (res.node) { @@ -148,8 +140,96 @@ void selva_db_expire_tick(struct SelvaDb *db, int64_t now) selva_expire_tick(&db->expiring, nullptr, now); } +static bool eq_type_exists(struct SelvaDb *db, node_type_t type, const uint8_t *schema_buf, size_t schema_len) +{ + auto te = selva_get_type_by_index(db, type); + return (te && te->schema_len == schema_len && !memcmp(te->schema_buf, schema_buf, schema_len)); +} + +static void clone_schema_buf(struct SelvaTypeEntry *te, const uint8_t *schema_buf, size_t schema_len) +{ + te->schema_buf = selva_malloc(schema_len); + memcpy(te->schema_buf, schema_buf, schema_len); + te->schema_len = schema_len; +} -static uint32_t te_slab_size(void) +static int selva_db_create_type(struct SelvaDb *db, node_type_t type, const uint8_t *schema_buf, size_t schema_len) +{ + struct schema_info nfo; + int err; + + if (eq_type_exists(db, type, schema_buf, schema_len)) { + return SELVA_EEXIST; + } + + err = schemabuf_get_info(&nfo, schema_buf, schema_len); + if (err) { + return err; + } + + if (nfo.block_capacity == 0) { + return SELVA_EINVAL; + } + + if (nfo.nr_fields > SELVA_FIELDS_MAX) { + /* schema too large. */ + return SELVA_ENOBUFS; + } + + struct SelvaTypeEntry *te = &db->types[type - 1]; + +#if 0 + fprintf(stderr, "schema_buf: [ "); + for (size_t i = 0; i < schema_len; i++) { + fprintf(stderr, "%x, ", schema_buf[i]); + } + fprintf(stderr, "]\n"); +#endif + + memset(te, 0, sizeof(*te)); + te->type = type; + err = schemabuf_parse_ns(&te->ns, schema_buf, schema_len, db->sdb_version ?: SELVA_SDB_VERSION); + if (err) { + return err; + } + + clone_schema_buf(te, schema_buf, schema_len); + te->blocks = alloc_blocks(nfo.block_capacity); + selva_init_aliases(te); + colvec_init_te(te); + + const size_t node_size = sizeof_wflex(struct SelvaNode, fields.fields_map, nfo.nr_fields); + mempool_init2(&te->nodepool, NODEPOOL_SLAB_SIZE, node_size, alignof(size_t), MEMPOOL_ADV_RANDOM | MEMPOOL_ADV_HP_SOFT); + + return 0; +} + +struct SelvaDbSchemaDesc { + node_type_t type; + uint32_t len; +} __packed; + +static size_t schema_count_types(size_t len, uint8_t schema[len]) +{ + size_t n = 0; + + for (size_t i = 0; i < len;) { + struct SelvaDbSchemaDesc desc; + + if (unlikely(len - i < sizeof(desc))) { + fprintf(stderr, "%s schema too short\n", __func__); + return 0; + } + + memcpy(&desc, schema + i, sizeof(desc)); + i += sizeof(desc) + desc.len; + n++; + } + + return n; +} + +static uint32_t te_size(void) { const size_t te_size = sizeof(struct SelvaTypeEntry); uint32_t slab_size = (1'048'576 / te_size) * te_size; @@ -165,17 +245,51 @@ static uint32_t te_slab_size(void) return slab_size; } -struct SelvaDb *selva_db_create(void) +static void noop_dirty_hook(void *, node_type_t, node_id_t) +{ +} + +struct SelvaDb *selva_db_create(size_t len, uint8_t schema[len]) { - struct SelvaDb *db = selva_calloc(1, sizeof(*db)); + const size_t nr_types = schema_count_types(len, schema); + struct SelvaDb *db; - mempool_init(&db->types.pool, te_slab_size(), sizeof(struct SelvaTypeEntry), alignof(struct SelvaTypeEntry)); + if (nr_types == 0) { + return nullptr; + } + + db = selva_calloc(1, offsetof(typeof(*db), types[0]) + nr_types * te_size()); + db->nr_types = nr_types; db->expiring.expire_cb = expire_cb; db->expiring.cancel_cb = cancel_cb; db->dirfd = AT_FDCWD; + db->dirty_hook_fun = noop_dirty_hook; selva_expire_init(&db->expiring); + for (size_t i = 0; i < len;) { + struct SelvaDbSchemaDesc desc; + int err; + + if (unlikely(len - i < sizeof(desc))) { + fprintf(stderr, "%s schema too short\n", __func__); + goto fail; + } + + memcpy(&desc, schema + i, sizeof(desc)); + + assert(desc.type <= db->nr_types); + err = selva_db_create_type(db, desc.type, schema + i + sizeof(desc), desc.len); + i += sizeof(desc) + desc.len; + if (err) { + fprintf(stderr, "%s failed to create type %u: %s\n", __func__, desc.type, selva_strerror(err)); + goto fail; + } + } + return db; +fail: + selva_db_destroy(db); + return nullptr; } int selva_db_chdir(struct SelvaDb *db, const char *pathname_str, size_t pathname_len) @@ -190,7 +304,11 @@ int selva_db_chdir(struct SelvaDb *db, const char *pathname_str, size_t pathname close(db->dirfd); } - fd = open(buf, O_SEARCH | O_DIRECTORY | O_CLOEXEC); + fd = open(buf, +#ifdef __MACH__ + O_SEARCH | +#endif + O_DIRECTORY | O_CLOEXEC); if (fd == -1) { return SELVA_EIO; } @@ -199,13 +317,19 @@ int selva_db_chdir(struct SelvaDb *db, const char *pathname_str, size_t pathname return 0; } +void selva_db_set_dirty_hook(struct SelvaDb *db, selva_db_dirty_hook_t dirty_hook, void *ctx) +{ + db->dirty_hook_fun = dirty_hook ?: noop_dirty_hook; + db->dirty_hook_ctx = ctx; +} + /** * Delete all nodes of a block. * Pretty safe as long as block_i is within the range. */ static inline void selva_del_block_unsafe(struct SelvaDb *db, struct SelvaTypeEntry *te, block_id_t block_i, bool unload) { - struct SelvaNodeIndex *nodes = &te->blocks->blocks[block_i].nodes; + auto nodes = &te->blocks->blocks[block_i].nodes; struct SelvaNode *node; struct SelvaNode *tmp; @@ -231,19 +355,18 @@ void selva_del_block(struct SelvaDb *db, struct SelvaTypeEntry *te, block_id_t b static void del_all_nodes(struct SelvaDb *db, struct SelvaTypeEntry *te) { struct SelvaTypeBlocks *blocks = te->blocks; - block_id_t blocks_len = blocks->len; - for (block_id_t block_i = 0; block_i < blocks_len; block_i++) { - selva_del_block_unsafe(db, te, block_i, false); - } -} + /* blocks could be uninitialized on early startup fail. */ + if (blocks) { + block_id_t blocks_len = blocks->len; -static inline void clear_type(struct SelvaDb *db, struct SelvaTypeEntry *te) -{ - del_all_nodes(db, te); + for (block_id_t block_i = 0; block_i < blocks_len; block_i++) { + selva_del_block_unsafe(db, te, block_i, false); + } + } } -static void destroy_type(struct SelvaDb *db, struct SelvaTypeEntry *te) +static void destroy_type(struct SelvaTypeEntry *te) { /* * We assume that as the nodes are deleted the aliases are also freed. @@ -253,33 +376,24 @@ static void destroy_type(struct SelvaDb *db, struct SelvaTypeEntry *te) colvec_deinit_te(te); - /* - * Remove this type from the type list. - */ - RB_REMOVE(SelvaTypeEntryIndex, &db->types.index, te); - mempool_destroy(&te->nodepool); selva_free(te->blocks); schemabuf_deinit_fields_schema(&te->ns.fields_schema); + selva_free(te->schema_buf); #if 0 memset(te, 0, sizeof(*te)); #endif - selva_free(te->schema_buf); - mempool_return(&db->types.pool, te); - db->types.count--; + te->type = 0; } static void del_all_types(struct SelvaDb *db) { - struct SelvaTypeEntry *te; - struct SelvaTypeEntry *tmp; - - RB_FOREACH_SAFE(te, SelvaTypeEntryIndex, &db->types.index, tmp) { - clear_type(db, te); + for (size_t ti = 0; ti < db->nr_types; ti++) { + del_all_nodes(db, &db->types[ti]); } - RB_FOREACH_SAFE(te, SelvaTypeEntryIndex, &db->types.index, tmp) { - destroy_type(db, te); + for (size_t ti = 0; ti < db->nr_types; ti++) { + destroy_type(&db->types[ti]); } } @@ -296,14 +410,6 @@ void selva_db_destroy(struct SelvaDb *db) selva_free(db); } -static bool eq_type_exists(struct SelvaDb *db, node_type_t type, const uint8_t *schema_buf, size_t schema_len) -{ - struct SelvaTypeEntry *te; - - te = selva_get_type_by_index(db, type); - return (te && te->schema_len == schema_len && !memcmp(te->schema_buf, schema_buf, schema_len)); -} - /** * Alloc .blocks in a type entry. */ @@ -337,110 +443,11 @@ struct SelvaTypeBlock *selva_get_block(struct SelvaTypeBlocks *blocks, node_id_t return &blocks->blocks[block_i]; } -void selva_foreach_block(struct SelvaDb *db, enum SelvaTypeBlockStatus or_mask, void (*cb)(void *ctx, struct SelvaDb *db, struct SelvaTypeEntry *te, block_id_t block, node_id_t start), void *ctx) -{ - struct SelvaTypeEntry *te; - - RB_FOREACH(te, SelvaTypeEntryIndex, &db->types.index) { - struct SelvaTypeBlocks *blocks = te->blocks; - - for (block_id_t block_i = 0; block_i < blocks->len; block_i++) { - struct SelvaTypeBlock *block = &blocks->blocks[block_i]; - - /* - * Note that we call it or_mask because the cb() is called if any - * bit of the mask is set in the status. - */ - if (atomic_load_explicit(&block->status.atomic, memory_order_consume) & or_mask) { - cb(ctx, db, te, block_i, selva_block_i2start(te, block_i)); - } - } - } -} - -static void clone_schema_buf(struct SelvaTypeEntry *te, const uint8_t *schema_buf, size_t schema_len) -{ - te->schema_buf = selva_malloc(schema_len); - memcpy(te->schema_buf, schema_buf, schema_len); - te->schema_len = schema_len; -} - -int selva_db_create_type(struct SelvaDb *db, node_type_t type, const uint8_t *schema_buf, size_t schema_len) -{ - struct schema_info nfo; - int err; - - if (eq_type_exists(db, type, schema_buf, schema_len)) { - return SELVA_EEXIST; - } - - err = schemabuf_get_info(&nfo, schema_buf, schema_len); - if (err) { - return err; - } - - if (nfo.block_capacity == 0) { - return SELVA_EINVAL; - } - - if (nfo.nr_fields > SELVA_FIELDS_MAX) { - /* schema too large. */ - return SELVA_ENOBUFS; - } +extern inline node_type_t selva_get_max_type(const struct SelvaDb *db); - struct SelvaTypeEntry *te = mempool_get(&db->types.pool); +extern inline struct SelvaTypeEntry *selva_get_type_by_index(struct SelvaDb *db, node_type_t type); -#if 0 - fprintf(stderr, "schema_buf: [ "); - for (size_t i = 0; i < schema_len; i++) { - fprintf(stderr, "%x, ", schema_buf[i]); - } - fprintf(stderr, "]\n"); -#endif - - memset(te, 0, sizeof(*te)); - te->type = type; - err = schemabuf_parse_ns(&te->ns, schema_buf, schema_len, db->sdb_version ?: SELVA_SDB_VERSION); - if (err) { - mempool_return(&db->types.pool, te); - return err; - } - - clone_schema_buf(te, schema_buf, schema_len); - te->blocks = alloc_blocks(nfo.block_capacity); - selva_init_aliases(te); - colvec_init_te(te); - - const size_t node_size = sizeof_wflex(struct SelvaNode, fields.fields_map, nfo.nr_fields); - mempool_init2(&te->nodepool, NODEPOOL_SLAB_SIZE, node_size, alignof(size_t), MEMPOOL_ADV_RANDOM | MEMPOOL_ADV_HP_SOFT); - - if (RB_INSERT(SelvaTypeEntryIndex, &db->types.index, te)) { - db_panic("Schema update not supported"); - } - db->types.count++; - return 0; -} - -struct SelvaTypeEntry *selva_get_type_by_index(const struct SelvaDb *db, node_type_t type) -{ - struct SelvaTypeEntryFind find = { type }; - - if (type == 0) { - return nullptr; - } - - return RB_FIND(SelvaTypeEntryIndex, (typeof_unqual(db->types.index) *)&db->types.index, (struct SelvaTypeEntry *)&find); -} - -struct SelvaTypeEntry *selva_get_type_by_node(const struct SelvaDb *db, struct SelvaNode *node) -{ - struct SelvaTypeEntryFind find = { node->type }; - struct SelvaTypeEntry *te; - - te = RB_FIND(SelvaTypeEntryIndex, (typeof_unqual(db->types.index) *)&db->types.index, (struct SelvaTypeEntry *)&find); - assert(te); - return te; -} +extern inline struct SelvaTypeEntry *selva_get_type_by_node(struct SelvaDb *db, struct SelvaNode *node); extern inline node_type_t selva_get_type(const struct SelvaTypeEntry *te); @@ -456,6 +463,8 @@ extern inline node_id_t selva_block_i2start(const struct SelvaTypeEntry *te, blo extern inline node_id_t selva_block_i2end(const struct SelvaTypeEntry *te, block_id_t block_i); +extern inline void selva_foreach_block(struct SelvaDb *db, enum SelvaTypeBlockStatus or_mask, void (*cb)(void *ctx, struct SelvaDb *db, struct SelvaTypeEntry *te, block_id_t block, node_id_t start), void *ctx); + extern inline enum SelvaTypeBlockStatus selva_block_status_get(const struct SelvaTypeEntry *te, block_id_t block_i); extern inline void selva_block_status_replace(const struct SelvaTypeEntry *te, block_id_t block_i, enum SelvaTypeBlockStatus status); @@ -500,8 +509,8 @@ extern inline const struct SelvaFieldsSchema *selva_get_edge_field_fields_schema static inline void del_node(struct SelvaDb *db, struct SelvaTypeEntry *type, struct SelvaNode *node, bool unload) { - struct SelvaTypeBlock *block = selva_get_block(type->blocks, node->node_id); - struct SelvaNodeIndex *nodes = &block->nodes; + auto block = selva_get_block(type->blocks, node->node_id); + auto nodes = &block->nodes; atomic_fetch_or_explicit(&block->status.atomic, (uint32_t)SELVA_TYPE_BLOCK_STATUS_DIRTY, memory_order_release); @@ -548,7 +557,6 @@ static void selva_unl_node(struct SelvaDb *db, struct SelvaTypeEntry *type, stru void selva_flush_node(struct SelvaDb *db, struct SelvaTypeEntry *type, struct SelvaNode *node) { selva_mark_dirty(type, node->node_id); - selva_remove_all_aliases(type, node->node_id); selva_fields_flush(db, node); } @@ -556,7 +564,10 @@ void selva_flush_node(struct SelvaDb *db, struct SelvaTypeEntry *type, struct Se void selva_mark_dirty(struct SelvaTypeEntry *te, node_id_t node_id) { if (node_id > 0) { + struct SelvaDb *db = containerof(te, typeof(*db), types[te->type - 1]); + selva_block_status_set(te, selva_node_id2block_i2(te, node_id), SELVA_TYPE_BLOCK_STATUS_DIRTY); + db->dirty_hook_fun(db->dirty_hook_ctx, te->type, node_id); } } @@ -566,7 +577,7 @@ struct SelvaNodeRes selva_find_node(struct SelvaTypeEntry *type, node_id_t node_ return (struct SelvaNodeRes){}; } - struct SelvaTypeBlocks *blocks = type->blocks; + auto blocks = type->blocks; struct SelvaNodeRes res = { .block = selva_node_id2block_i(blocks, node_id), }; @@ -577,7 +588,7 @@ struct SelvaNodeRes selva_find_node(struct SelvaTypeEntry *type, node_id_t node_ goto out; } - struct SelvaNodeIndex *nodes = &block->nodes; + auto nodes = &block->nodes; struct SelvaNode find = { .node_id = node_id, }; @@ -589,7 +600,7 @@ struct SelvaNodeRes selva_find_node(struct SelvaTypeEntry *type, node_id_t node_ struct SelvaNodeRes selva_nfind_node(struct SelvaTypeEntry *type, node_id_t node_id) { - struct SelvaTypeBlocks *blocks = type->blocks; + auto blocks = type->blocks; struct SelvaNodeRes res = { .block = selva_node_id2block_i(blocks, node_id), }; @@ -598,7 +609,7 @@ struct SelvaNodeRes selva_nfind_node(struct SelvaTypeEntry *type, node_id_t node goto out; } - struct SelvaTypeBlock *block = &blocks->blocks[res.block]; + auto block = &blocks->blocks[res.block]; res.block_status = atomic_load_explicit(&block->status.atomic, memory_order_acquire); if (!(res.block_status & SELVA_TYPE_BLOCK_STATUS_INMEM)) { goto out; @@ -651,9 +662,7 @@ struct SelvaNodeRes selva_upsert_node(struct SelvaTypeEntry *type, node_id_t nod */ RB_INSERT_NEXT(SelvaNodeIndex, &block->nodes, type->max_node, node); } else { - struct SelvaNode *prev; - - prev = RB_INSERT(SelvaNodeIndex, &block->nodes, node); + auto prev = RB_INSERT(SelvaNodeIndex, &block->nodes, node); if (prev) { mempool_return(&type->nodepool, node); res.node = prev; @@ -683,13 +692,12 @@ struct SelvaNodeRes selva_upsert_node(struct SelvaTypeEntry *type, node_id_t nod */ static struct SelvaNodeRes selva_min_node_from(struct SelvaTypeEntry *type, block_id_t start) { - struct SelvaTypeBlocks *blocks = type->blocks; + auto blocks = type->blocks; const size_t len = blocks->len; struct SelvaNodeRes res = {}; for (size_t i = start; i < len; i++) { - struct SelvaTypeBlock *block = &blocks->blocks[i]; - struct SelvaNode *node; + auto block = &blocks->blocks[i]; res.block = i; res.block_status = atomic_load_explicit(&block->status.atomic, memory_order_acquire); @@ -697,7 +705,7 @@ static struct SelvaNodeRes selva_min_node_from(struct SelvaTypeEntry *type, bloc break; } - node = RB_MIN(SelvaNodeIndex, &block->nodes); + auto node = RB_MIN(SelvaNodeIndex, &block->nodes); if (node) { res.node = node; break; @@ -717,12 +725,11 @@ struct SelvaNodeRes selva_min_node(struct SelvaTypeEntry *type) */ static struct SelvaNodeRes selva_max_node_from(struct SelvaTypeEntry *type, block_id_t start) { - struct SelvaTypeBlocks *blocks = type->blocks; + auto blocks = type->blocks; struct SelvaNodeRes res = {}; for (ssize_t i = start; i >= 0; i--) { - struct SelvaTypeBlock *block = &blocks->blocks[i]; - struct SelvaNode *node; + auto block = &blocks->blocks[i]; res.block = i; res.block_status = atomic_load_explicit(&block->status.atomic, memory_order_acquire); @@ -730,7 +737,7 @@ static struct SelvaNodeRes selva_max_node_from(struct SelvaTypeEntry *type, bloc break; } - node = RB_MAX(SelvaNodeIndex, &block->nodes); + auto node = RB_MAX(SelvaNodeIndex, &block->nodes); if (node) { res.node = node; break; @@ -776,7 +783,7 @@ struct SelvaNodeRes selva_prev_node(struct SelvaTypeEntry *type, struct SelvaNod struct SelvaNodeRes selva_next_node(struct SelvaTypeEntry *type, struct SelvaNode *node) { - const struct SelvaTypeBlocks *blocks = type->blocks; + const auto blocks = type->blocks; struct SelvaNodeRes res = { .block = selva_node_id2block_i(blocks, node->node_id), .block_status = SELVA_TYPE_BLOCK_STATUS_INMEM, @@ -806,34 +813,13 @@ extern inline node_id_t selva_get_node_id(const struct SelvaNode *node); extern inline node_type_t selva_get_node_type(const struct SelvaNode *node); -/** - * Hash the aliases pointing to the given node. - */ -static void hash_aliases(selva_hash_state_t *hash_state, struct SelvaTypeEntry *type, node_id_t dest) -{ - for (size_t i = 0; i < type->ns.nr_aliases; i++) { - struct SelvaAliases *aliases = &type->aliases[i]; - const struct SelvaAlias *alias; - struct SelvaAlias find = { - .dest = dest, - }; - - alias = RB_FIND(SelvaAliasesByDest, &aliases->alias_by_dest, &find); - while (alias) { - - selva_hash_update(hash_state, alias->name, alias->name_len); - alias = alias->next; - } - } -} - static void hash_col_fields(struct SelvaTypeEntry *type, node_id_t node_id, selva_hash_state_t *tmp_hash_state) { /* * colvec fields. */ - for (size_t i = 0; i < type->ns.nr_colvecs; i++) { - struct SelvaColvec *colvec = &type->col_fields.colvec[i]; + for (size_t i = 0; i < type->ns.nr_colvec_fields; i++) { + auto colvec = &type->col_fields.colvec[i]; colvec_hash_update(type, node_id, colvec, tmp_hash_state); } @@ -846,7 +832,6 @@ selva_hash128_t selva_node_hash_update(struct SelvaDb *db, struct SelvaTypeEntry selva_hash_reset(tmp_hash_state); selva_hash_update(tmp_hash_state, &node->node_id, sizeof(node->node_id)); selva_fields_hash_update(tmp_hash_state, db, &type->ns.fields_schema, node); - hash_aliases(tmp_hash_state, type, node->node_id); hash_col_fields(type, node->node_id, tmp_hash_state); res = selva_hash_digest(tmp_hash_state); @@ -884,7 +869,7 @@ void selva_node_block_hash2(struct SelvaDb *db, struct SelvaTypeEntry *type, str int selva_node_block_hash(struct SelvaDb *db, struct SelvaTypeEntry *type, node_id_t start, selva_hash128_t *hash_out) { - struct SelvaTypeBlock *block = selva_get_block(type->blocks, start); + auto block = selva_get_block(type->blocks, start); if (!block) { return SELVA_ENOENT; diff --git a/clibs/lib/selva/fields.c b/clibs/lib/selva/fields.c index 6110691fe7..65657d9842 100644 --- a/clibs/lib/selva/fields.c +++ b/clibs/lib/selva/fields.c @@ -24,6 +24,7 @@ #define selva_sallocx(p, v) 0 #endif +static struct SelvaNode *next_ref_edge_node(struct SelvaTypeEntry *edge_type); static void reference_edge_destroy( struct SelvaDb *db, const struct EdgeFieldConstraint *efc, @@ -41,7 +42,6 @@ static const size_t selva_field_data_size[] = { [SELVA_FIELD_TYPE_REFERENCES] = sizeof(struct SelvaNodeReferences), [SELVA_FIELD_TYPE_MICRO_BUFFER] = 0, /* check fs. */ [SELVA_FIELD_TYPE_ALIAS] = 0, /* Aliases are stored separately under the type struct. */ - [SELVA_FIELD_TYPE_ALIASES] = 0, [SELVA_FIELD_TYPE_COLVEC] = sizeof(void *), }; @@ -66,7 +66,7 @@ size_t selva_fields_get_data_size(const struct SelvaFieldSchema *fs) static struct SelvaFieldInfo alloc_block(struct SelvaFields *fields, const struct SelvaFieldSchema *fs) { - char *data = (char *)fields->data; + auto data = (char *)fields->data; const size_t off = fields->data_len; const size_t field_data_size = selva_fields_get_data_size(fs); const size_t new_size = ALIGNED_SIZE(off + field_data_size, SELVA_FIELDS_DATA_ALIGN); @@ -90,12 +90,9 @@ static struct SelvaFieldInfo alloc_block(struct SelvaFields *fields, const struc }; } -#if __has_c_attribute(reproducible) -[[reproducible]] -#endif -static inline void *nfo2p(const struct SelvaFields *fields, const struct SelvaFieldInfo *nfo) +static inline void *nfo2p(const struct SelvaFields *fields, const struct SelvaFieldInfo *nfo) [[reproducible]] { - char *data = (char *)fields->data; + auto data = (char *)fields->data; void *p = data + (nfo->off << SELVA_FIELDS_OFF); if (unlikely((char *)p > data + fields->data_len)) { @@ -141,8 +138,8 @@ struct SelvaNodeLargeReference *selva_fields_ensure_reference( struct SelvaNode *node, const struct SelvaFieldSchema *fs) { - struct SelvaFields *fields = &node->fields; - struct SelvaFieldInfo *nfo = ensure_field(fields, fs); + auto fields = &node->fields; + auto nfo = ensure_field(fields, fs); return nfo2p(fields, nfo); } @@ -153,7 +150,7 @@ struct SelvaNodeLargeReference *selva_fields_ensure_reference( */ static struct SelvaFieldInfo *ensure_field_references(struct SelvaFields *fields, const struct SelvaFieldSchema *fs, enum SelvaNodeReferenceType type) { - struct SelvaFieldInfo *nfo = ensure_field(fields, fs); + auto nfo = ensure_field(fields, fs); if (fs->type == SELVA_FIELD_TYPE_REFERENCES) { struct SelvaNodeReferences *refs = nfo2p(fields, nfo); @@ -167,49 +164,36 @@ static struct SelvaFieldInfo *ensure_field_references(struct SelvaFields *fields /** * Get a mutable string in fields at fs/nfo. + * @param unsafe Get a mutable string in fields at fs/nfo without initializing the buffer. */ -static struct selva_string *get_mutable_string(struct SelvaFields *fields, const struct SelvaFieldSchema *fs, struct SelvaFieldInfo *nfo, size_t len) -{ - struct selva_string *s = nfo2p(fields, nfo); - - assert(nfo->in_use); - assert(s && ((uintptr_t)s & 7) == 0); - - if (!(s->flags & SELVA_STRING_STATIC)) { /* Previously initialized. */ - int err; - - if (fs->string.fixed_len == 0) { - err = selva_string_init(s, nullptr, len, SELVA_STRING_MUTABLE | SELVA_STRING_CRC); - } else { - assert(len <= fs->string.fixed_len); - err = selva_string_init(s, nullptr, fs->string.fixed_len, SELVA_STRING_MUTABLE_FIXED | SELVA_STRING_CRC); - } - if (err) { - s = nullptr; - } - } - - return s; -} - -/** - * Get a mutable string in fields at fs/nfo without initializing the buffer. - */ -static struct selva_string *get_mutable_string_unsafe(struct SelvaFields *fields, const struct SelvaFieldSchema *fs, struct SelvaFieldInfo *nfo, size_t len) +static inline struct selva_string *get_mutable_string( + struct SelvaFields *fields, + const struct SelvaFieldSchema *fs, + struct SelvaFieldInfo *nfo, + size_t initial_len, + bool unsafe) { struct selva_string *s = nfo2p(fields, nfo); assert(nfo->in_use); +#if 0 assert(s && ((uintptr_t)s & 7) == 0); +#endif - if (!(s->flags & SELVA_STRING_STATIC)) { /* Previously initialized. */ + if (!(s->flags & SELVA_STRING_STATIC)) { int err; if (fs->string.fixed_len == 0) { - err = selva_string_init(s, nullptr, len, SELVA_STRING_MUTABLE | SELVA_STRING_CRC | SELVA_STRING_NOZERO); + const enum selva_string_flags flags = + SELVA_STRING_MUTABLE | SELVA_STRING_CRC | + (unsafe ? SELVA_STRING_NOZERO : 0); + err = selva_string_init(s, nullptr, initial_len, flags); } else { - assert(len <= fs->string.fixed_len); - err = selva_string_init(s, nullptr, fs->string.fixed_len, SELVA_STRING_MUTABLE_FIXED | SELVA_STRING_CRC | SELVA_STRING_NOZERO); + const enum selva_string_flags flags = + SELVA_STRING_MUTABLE_FIXED | SELVA_STRING_CRC | + (unsafe ? SELVA_STRING_NOZERO : 0); + assert(initial_len <= fs->string.fixed_len); + err = selva_string_init(s, nullptr, fs->string.fixed_len, flags); } if (err) { s = nullptr; @@ -226,13 +210,13 @@ static int set_field_string(struct SelvaFields *fields, const struct SelvaFieldS assert(len >= 2 + sizeof(uint32_t)); assume(len >= 2 + sizeof(uint32_t)); - if (fs->string.fixed_len && len > fs->string.fixed_len) { + if (fs->string.fixed_len > 0 && len > fs->string.fixed_len) { return SELVA_ENOBUFS; } uint32_t crc; memcpy(&crc, str + len - sizeof(crc), sizeof(crc)); - s = get_mutable_string_unsafe(fields, fs, nfo, len - sizeof(crc)); + s = get_mutable_string(fields, fs, nfo, len - sizeof(crc), true); (void)selva_string_replace_crc(s, str, len - sizeof(crc), crc); if (str[1] == 1) selva_string_set_compress(s); @@ -274,7 +258,7 @@ static void print_refs(struct SelvaNode *node, const struct SelvaFieldSchema *fs static field_t refs_get_nr_fields(struct SelvaDb *db, const struct EdgeFieldConstraint *efc) { - const struct SelvaFieldsSchema *efc_fields_schema = selva_get_edge_field_fields_schema(db, efc); + auto efc_fields_schema = selva_get_edge_field_fields_schema(db, efc); const field_t nr_fields = efc_fields_schema ? efc_fields_schema->nr_fields - efc_fields_schema->nr_virtual_fields : 0; return nr_fields; @@ -302,15 +286,15 @@ static const struct SelvaFieldSchema *get_edge_dst_fs( const struct SelvaDb *db, const struct SelvaFieldSchema *fs_src) { - const struct EdgeFieldConstraint *efc = &fs_src->edge_constraint; - struct SelvaTypeEntry *type_dst; + const auto efc = &fs_src->edge_constraint; if (fs_src->type != SELVA_FIELD_TYPE_REFERENCE && fs_src->type != SELVA_FIELD_TYPE_REFERENCES) { return nullptr; } - type_dst = selva_get_type_by_index(db, efc->dst_node_type); + /* TODO This could be also handled with a Generic */ + auto type_dst = selva_get_type_by_index((typeof_unqual(*db) *)db, efc->dst_node_type); assert(type_dst->type == efc->dst_node_type); return selva_get_fs_by_te_field(type_dst, efc->inverse_field); @@ -338,13 +322,20 @@ static void remove_refs_offset(struct SelvaNodeReferences *refs) /** * Write a ref to the fields data. * Note that this function doesn't touch the destination node. + * @param edge can be null. */ -static void write_ref(struct SelvaNode * restrict node, const struct SelvaFieldSchema *fs, struct SelvaNode * restrict dst, struct SelvaNodeLargeReference **ref_out) +static void write_ref( + struct SelvaNode * restrict node, + const struct SelvaFieldSchema *fs, + struct SelvaNode * restrict dst, + struct SelvaNode * restrict edge, + struct SelvaNodeLargeReference **ref_out) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; struct SelvaFieldInfo *nfo; struct SelvaNodeLargeReference ref = { .dst = dst->node_id, + .edge = edge ? edge->node_id : 0, }; nfo = ensure_field(fields, fs); @@ -362,10 +353,17 @@ static void write_ref(struct SelvaNode * restrict node, const struct SelvaFieldS /** * Write a ref to the fields data. * Note that this function doesn't touch the destination node. + * @param edge can be null. */ -static void write_refs(struct SelvaNode * restrict node, const struct SelvaFieldSchema *fs, ssize_t index, struct SelvaNode * restrict dst, struct SelvaNodeReferenceAny *ref_out) +static void write_refs( + struct SelvaNode * restrict node, + const struct SelvaFieldSchema *fs, + ssize_t index, + struct SelvaNode * restrict dst, + struct SelvaNode * restrict edge, + struct SelvaNodeReferenceAny *ref_out) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; void *vp = nfo2p(fields, &fields->fields_map[fs->field]); struct SelvaNodeReferences refs; @@ -394,6 +392,7 @@ static void write_refs(struct SelvaNode * restrict node, const struct SelvaField refs.large--; refs.large[0] = (struct SelvaNodeLargeReference){ .dst = dst->node_id, + .edge = edge ? edge->node_id : 0, }; break; default: @@ -473,6 +472,7 @@ static void write_refs(struct SelvaNode * restrict node, const struct SelvaField case SELVA_NODE_REFERENCE_LARGE: refs.large[index] = (struct SelvaNodeLargeReference){ .dst = dst->node_id, + .edge = edge ? edge->node_id : 0, }; break; default: @@ -507,7 +507,13 @@ static void write_refs(struct SelvaNode * restrict node, const struct SelvaField * A helper for remove_reference(). * @returns the original value. */ -static node_id_t del_single_ref(struct SelvaDb *db, struct SelvaNode *src_node, const struct EdgeFieldConstraint *efc, struct SelvaFields *fields, struct SelvaFieldInfo *nfo, bool ignore_dependent) +static node_id_t del_single_ref( + struct SelvaDb *db, + struct SelvaNode *src_node, + const struct EdgeFieldConstraint *efc, + struct SelvaFields *fields, + struct SelvaFieldInfo *nfo, + bool ignore_dependent) { void *vp = nfo2p(fields, nfo); struct SelvaNodeLargeReference ref; @@ -530,7 +536,13 @@ static node_id_t del_single_ref(struct SelvaDb *db, struct SelvaNode *src_node, /** * This is only a helper for remove_reference(). */ -static node_id_t del_multi_ref(struct SelvaDb *db, struct SelvaNode *src_node, const struct EdgeFieldConstraint *efc, struct SelvaNodeReferences *refs, size_t i) +static node_id_t del_multi_ref( + struct SelvaDb *db, + struct SelvaNode *src_node, + const struct EdgeFieldConstraint *efc, + struct SelvaNodeReferences *refs, + size_t i, + bool ignore_src_dependent) { node_id_t dst_id; size_t id_set_len = refs->nr_refs; @@ -626,7 +638,7 @@ static node_id_t del_multi_ref(struct SelvaDb *db, struct SelvaNode *src_node, c assert(id_set_len == refs->nr_refs); assume(id_set_len == refs->nr_refs); - if ((efc->flags & EDGE_FIELD_CONSTRAINT_FLAG_DEPENDENT) && refs->nr_refs == 0) { + if (!ignore_src_dependent && (efc->flags & EDGE_FIELD_CONSTRAINT_FLAG_DEPENDENT) && refs->nr_refs == 0) { selva_expire_node(db, src_node->type, src_node->node_id, 0, SELVA_EXPIRE_NODE_STRATEGY_CANCEL_OLD); } @@ -642,29 +654,27 @@ static void clear_ref_dst(struct SelvaDb *db, const struct SelvaFieldSchema *fs_ return; } - struct SelvaTypeEntry *dst_type = selva_get_type_by_index(db, fs_src->edge_constraint.dst_node_type); + auto dst_type = selva_get_type_by_index(db, fs_src->edge_constraint.dst_node_type); assert(dst_type); /* TODO Partials */ - struct SelvaNode *dst = selva_find_node(dst_type, dst_node_id).node; + auto dst = selva_find_node(dst_type, dst_node_id).node; if (!dst) { return; } - const struct SelvaFieldSchema *fs_dst; - fs_dst = get_edge_dst_fs(db, fs_src); + auto fs_dst = get_edge_dst_fs(db, fs_src); if (!fs_dst) { db_panic("field schema not found"); } - struct SelvaFields *fields_dst = &dst->fields; - struct SelvaFieldInfo *nfo_dst; + auto fields_dst = &dst->fields; assert(fs_dst->field < fields_dst->nr_fields); assume(fs_src->type == SELVA_FIELD_TYPE_REFERENCE || fs_src->type == SELVA_FIELD_TYPE_REFERENCES); assume(fs_dst->type == SELVA_FIELD_TYPE_REFERENCE || fs_dst->type == SELVA_FIELD_TYPE_REFERENCES); - nfo_dst = &fields_dst->fields_map[fs_dst->field]; + auto nfo_dst = &fields_dst->fields_map[fs_dst->field]; if (!nfo_dst->in_use) { return; } @@ -683,9 +693,10 @@ static void clear_ref_dst(struct SelvaDb *db, const struct SelvaFieldSchema *fs_ ssize_t i = refs_find_node_i(refs, src_node_id); assert(i >= 0); - (void)del_multi_ref(db, dst, &fs_dst->edge_constraint, refs, i); + (void)del_multi_ref(db, dst, &fs_dst->edge_constraint, refs, i, false); } + selva_mark_dirty(selva_get_type_by_index(db, fs_dst->edge_constraint.dst_node_type), src_node_id); selva_mark_dirty(dst_type, dst_node_id); } @@ -693,34 +704,42 @@ static void clear_ref_dst(struct SelvaDb *db, const struct SelvaFieldSchema *fs_ * add_to_refs_index() must be called before this function. */ static inline void write_ref_2way( + struct SelvaDb *db, struct SelvaNode * restrict src, const struct SelvaFieldSchema *fs_src, ssize_t index, struct SelvaNode * restrict dst, const struct SelvaFieldSchema *fs_dst, struct SelvaNodeReferenceAny *ref_out) { + struct SelvaNode *edge = nullptr; + + if (refs_get_type(db, &fs_src->edge_constraint) == SELVA_NODE_REFERENCE_LARGE) { + auto edge_type = selva_get_type_by_index(db, fs_src->edge_constraint.edge_node_type); + edge = next_ref_edge_node(edge_type); + } + #if 0 assert(fs_src->edge_constraint.dst_node_type == dst->type); assert(fs_dst->edge_constraint.dst_node_type == src->type); #endif if (fs_src->type == SELVA_FIELD_TYPE_REFERENCE) { - write_ref(src, fs_src, dst, ref_out ? &ref_out->large : nullptr); + write_ref(src, fs_src, dst, edge, ref_out ? &ref_out->large : nullptr); ref_out->type = SELVA_NODE_REFERENCE_LARGE; } else { #if 0 assert(fs_src->type == SELVA_FIELD_TYPE_REFERENCES); #endif assume(fs_src->type == SELVA_FIELD_TYPE_REFERENCES); - write_refs(src, fs_src, index, dst, ref_out); + write_refs(src, fs_src, index, dst, edge, ref_out); } if (fs_dst->type == SELVA_FIELD_TYPE_REFERENCE) { - write_ref(dst, fs_dst, src, nullptr); + write_ref(dst, fs_dst, src, edge, nullptr); } else { #if 0 assert(fs_dst->type == SELVA_FIELD_TYPE_REFERENCES); #endif assume(fs_dst->type == SELVA_FIELD_TYPE_REFERENCES); - write_refs(dst, fs_dst, -1, src, nullptr); + write_refs(dst, fs_dst, -1, src, edge, nullptr); } } @@ -733,9 +752,9 @@ static inline void write_ref_2way( */ static node_id_t remove_reference(struct SelvaDb *db, struct SelvaNode *src, const struct SelvaFieldSchema *fs_src, node_id_t orig_dst, ssize_t idx, bool ignore_src_dependent) { - struct SelvaFields *fields_src = &src->fields; - struct SelvaFieldInfo *nfo_src = &fields_src->fields_map[fs_src->field]; - struct SelvaTypeEntry *dst_type = selva_get_type_by_index(db, fs_src->edge_constraint.dst_node_type); + auto fields_src = &src->fields; + auto nfo_src = &fields_src->fields_map[fs_src->field]; + auto dst_type = selva_get_type_by_index(db, fs_src->edge_constraint.dst_node_type); node_id_t dst_node_id = 0; assert(dst_type); @@ -756,7 +775,7 @@ static node_id_t remove_reference(struct SelvaDb *db, struct SelvaNode *src, con ssize_t i = (idx >= 0) ? idx : refs_find_node_i(refs, orig_dst); if (i >= 0 && i < refs->nr_refs) { - dst_node_id = del_multi_ref(db, src, &fs_src->edge_constraint, refs, i); + dst_node_id = del_multi_ref(db, src, &fs_src->edge_constraint, refs, i, ignore_src_dependent); } } } @@ -777,8 +796,8 @@ static size_t remove_references_tail( const struct SelvaFieldSchema *fs, size_t limit) { - struct SelvaFields *fields = &node->fields; - struct SelvaFieldInfo *nfo = &fields->fields_map[fs->field]; + auto fields = &node->fields; + auto nfo = &fields->fields_map[fs->field]; struct SelvaNodeReferences *refs; size_t removed = 0; @@ -798,12 +817,12 @@ static size_t remove_references_tail( return removed; } -static struct SelvaNodeReferences *clear_references(struct SelvaDb *db, struct SelvaNode *node, const struct SelvaFieldSchema *fs) +static struct SelvaNodeReferences *clear_references(struct SelvaDb *db, struct SelvaNode *node, const struct SelvaFieldSchema *fs, bool ignore_src_dependent) { - struct SelvaTypeEntry *te = selva_get_type_by_index(db, node->type); - struct SelvaFields *fields = &node->fields; + auto te = selva_get_type_by_index(db, node->type); + auto fields = &node->fields; assert(fs->field < fields->nr_fields); - struct SelvaFieldInfo *nfo = &fields->fields_map[fs->field]; + auto nfo = &fields->fields_map[fs->field]; struct SelvaNodeReferences *refs; if (!nfo->in_use) { @@ -817,7 +836,7 @@ static struct SelvaNodeReferences *clear_references(struct SelvaDb *db, struct S selva_mark_dirty(te, node->node_id); - struct SelvaTypeEntry *dst_type = selva_get_type_by_index(db, fs->edge_constraint.dst_node_type); + auto dst_type = selva_get_type_by_index(db, fs->edge_constraint.dst_node_type); assert(dst_type); while (refs->nr_refs > 0) { @@ -838,7 +857,7 @@ static struct SelvaNodeReferences *clear_references(struct SelvaDb *db, struct S goto out; } - removed_dst = remove_reference(db, node, fs, dst_node_id, i, false); + removed_dst = remove_reference(db, node, fs, dst_node_id, i, ignore_src_dependent); if (removed_dst != 0) { assert(removed_dst == dst_node_id); } @@ -854,7 +873,7 @@ static struct SelvaNodeReferences *clear_references(struct SelvaDb *db, struct S __attribute__((nonnull(1, 2, 3))) static void remove_references(struct SelvaDb *db, struct SelvaNode *node, const struct SelvaFieldSchema *fs) { - struct SelvaNodeReferences *refs = clear_references(db, node, fs); + auto refs = clear_references(db, node, fs, false); if (refs) { switch (refs->size) { case SELVA_NODE_REFERENCE_SMALL: @@ -878,8 +897,8 @@ static void remove_references(struct SelvaDb *db, struct SelvaNode *node, const __attribute__((nonnull(1, 2))) static void unload_references(struct SelvaNode *node, const struct SelvaFieldSchema *fs) { - struct SelvaFields *fields = &node->fields; - struct SelvaFieldInfo *nfo = &fields->fields_map[fs->field]; + auto fields = &node->fields; + auto nfo = &fields->fields_map[fs->field]; struct SelvaNodeReferences *refs; if (!nfo->in_use) { @@ -905,23 +924,19 @@ static void unload_references(struct SelvaNode *node, const struct SelvaFieldSch static inline int _selva_fields_get_mutable_string(struct SelvaNode *node, const struct SelvaFieldSchema *fs, bool unsafe, size_t len, struct selva_string **s) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; struct SelvaFieldInfo *nfo; if (fs->type != SELVA_FIELD_TYPE_STRING) { return SELVA_EINTYPE; } - if (fs->string.fixed_len && len > fs->string.fixed_len) { + if (fs->string.fixed_len > 0 && len > fs->string.fixed_len) { return SELVA_ENOBUFS; } nfo = ensure_field(fields, fs); - if (unsafe) { - *s = get_mutable_string_unsafe(fields, fs, nfo, len); - } else { - *s = get_mutable_string(fields, fs, nfo, len); - } + *s = get_mutable_string(fields, fs, nfo, len, unsafe); return !*s ? SELVA_EINVAL : 0; } @@ -942,10 +957,10 @@ struct selva_string *selva_fields_ensure_string(struct SelvaNode *node, const st return nullptr; } - struct SelvaFields *fields = &node->fields; - struct SelvaFieldInfo *nfo = ensure_field(fields, fs); + auto fields = &node->fields; + auto nfo = ensure_field(fields, fs); - return get_mutable_string(fields, fs, nfo, initial_len); + return get_mutable_string(fields, fs, nfo, initial_len, false); } static void del_field_text(struct SelvaFields *fields, struct SelvaFieldInfo *nfo) @@ -1107,7 +1122,7 @@ int selva_fields_get_text( const char **str, size_t *len) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; const struct SelvaFieldInfo *nfo; const struct SelvaTextField *text; struct selva_string *s; @@ -1143,7 +1158,7 @@ int selva_fields_get_text( void *selva_fields_ensure_micro_buffer(struct SelvaNode *node, const struct SelvaFieldSchema *fs) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; struct SelvaFieldInfo *nfo; if (fs->type != SELVA_FIELD_TYPE_MICRO_BUFFER) { @@ -1240,9 +1255,9 @@ static bool add_to_refs_index( const struct SelvaFieldSchema * restrict fs_src, const struct SelvaFieldSchema * restrict fs_dst) { - const enum SelvaNodeReferenceType type = refs_get_type(db, &fs_src->edge_constraint); - struct SelvaFieldInfo *nfo_src = ensure_field_references(&src->fields, fs_src, type); - struct SelvaFieldInfo *nfo_dst = ensure_field_references(&dst->fields, fs_dst, type); + const auto type = refs_get_type(db, &fs_src->edge_constraint); + auto nfo_src = ensure_field_references(&src->fields, fs_src, type); + auto nfo_dst = ensure_field_references(&dst->fields, fs_dst, type); const bool added_src = add_to_refs_index_(src, fs_src, nfo_src, dst->node_id); const bool added_dst = add_to_refs_index_(dst, fs_dst, nfo_dst, src->node_id); @@ -1288,16 +1303,16 @@ int selva_fields_references_insert( } assume(fs->type == SELVA_FIELD_TYPE_REFERENCES); - write_ref_2way(node, fs, index, dst, fs_dst, ref_out); + write_ref_2way(db, node, fs, index, dst, fs_dst, ref_out); if (fs->edge_constraint.limit > 0) { (void)remove_references_tail(db, node, fs, fs->edge_constraint.limit); } return 0; } else if (reorder) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; assert(fs->field < fields->nr_fields); - struct SelvaFieldInfo *nfo = &fields->fields_map[fs->field]; + auto nfo = &fields->fields_map[fs->field]; struct SelvaNodeReferences *refs = nfo2p(fields, nfo); ssize_t index_old; int err = 0; @@ -1328,7 +1343,7 @@ int selva_fields_references_insert( return err; } else { if (ref_out) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; assert(fs->field < fields->nr_fields); struct SelvaFieldInfo *nfo = &fields->fields_map[fs->field]; struct SelvaNodeReferences *refs = nfo2p(fields, nfo); @@ -1357,15 +1372,13 @@ int selva_fields_reference_set( struct SelvaNode * restrict dst, struct SelvaNodeReferenceAny *ref_out) { - const struct SelvaFieldSchema *fs_dst; - if (fs_src->type != SELVA_FIELD_TYPE_REFERENCE || fs_src->edge_constraint.dst_node_type != dst->type || !dst || src == dst) { return SELVA_EINVAL; } - fs_dst = get_edge_dst_fs(db, fs_src); + auto fs_dst = get_edge_dst_fs(db, fs_src); if (!fs_dst) { return SELVA_EINTYPE; } @@ -1381,7 +1394,7 @@ int selva_fields_reference_set( /* * Remove previous refs. */ - struct SelvaTypeEntry *te_dst = selva_get_type_by_index(db, fs_src->edge_constraint.dst_node_type); + auto te_dst = selva_get_type_by_index(db, fs_src->edge_constraint.dst_node_type); (void)remove_reference(db, src, fs_src, 0, -1, true); selva_mark_dirty(te_dst, dst->node_id); @@ -1392,7 +1405,7 @@ int selva_fields_reference_set( } assume(fs_src->type == SELVA_FIELD_TYPE_REFERENCE); - write_ref_2way(src, fs_src, -1, dst, fs_dst, ref_out); + write_ref_2way(db, src, fs_src, -1, dst, fs_dst, ref_out); if (fs_dst->type == SELVA_FIELD_TYPE_REFERENCES && fs_dst->edge_constraint.limit > 0) { (void)remove_references_tail(db, dst, fs_dst, fs_dst->edge_constraint.limit); } @@ -1402,14 +1415,14 @@ int selva_fields_reference_set( size_t selva_fields_prealloc_refs(struct SelvaDb *db, struct SelvaNode *node, const struct SelvaFieldSchema *fs, size_t nr_refs_min) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; if (unlikely(fs->type != SELVA_FIELD_TYPE_REFERENCES)) { db_panic("Invalid type: %s", selva_str_field_type(fs->type)); } const enum SelvaNodeReferenceType type = refs_get_type(db, selva_get_edge_field_constraint(fs)); - struct SelvaFieldInfo *nfo = ensure_field_references(fields, fs, type); + auto nfo = ensure_field_references(fields, fs, type); struct SelvaNodeReferences *refs = nfo2p(fields, nfo); if (refs->nr_refs >= nr_refs_min) { @@ -1480,9 +1493,9 @@ static void selva_fields_references_insert_tail_nonempty_src_field( size_t nr_ids, selva_fields_references_insert_tail_cb_t fn) { - const struct SelvaFields *fields = &src->fields; + auto fields = &src->fields; assert(fs_src->field < fields->nr_fields); - const struct SelvaFieldInfo *nfo = &fields->fields_map[fs_src->field]; + const auto nfo = &fields->fields_map[fs_src->field]; typeof_field(struct SelvaNodeReferences, nr_refs) *index_len = (typeof(index_len))((char *)nfo2p(fields, nfo) + offsetof(struct SelvaNodeReferences, nr_refs)); typeof_field(struct SelvaNodeReferences, index) *index = (typeof(index))((char *)nfo2p(fields, nfo) + offsetof(struct SelvaNodeReferences, index)); ssize_t index_lower_bound = node_id_set_bsearch(*index, *index_len, ids[0]); @@ -1526,7 +1539,7 @@ static void selva_fields_references_insert_tail_insert_refs( assert(fs_dst->type == SELVA_FIELD_TYPE_REFERENCES); #endif assume(fs_dst->type == SELVA_FIELD_TYPE_REFERENCES); - write_ref_2way(src, fs_src, -1, dst, fs_dst, nullptr); + write_ref_2way(db, src, fs_src, -1, dst, fs_dst, nullptr); if (fs_dst->edge_constraint.limit > 0) { (void)remove_references_tail(db, dst, fs_dst, fs_dst->edge_constraint.limit); } @@ -1545,7 +1558,7 @@ static void selva_fields_references_insert_tail_insert_ref( #endif assume(fs_dst->type == SELVA_FIELD_TYPE_REFERENCE); (void)remove_reference(db, dst, fs_dst, 0, -1, false); - write_ref_2way(src, fs_src, -1, dst, fs_dst, nullptr); + write_ref_2way(db, src, fs_src, -1, dst, fs_dst, nullptr); } int selva_fields_references_insert_tail( @@ -1556,7 +1569,6 @@ int selva_fields_references_insert_tail( const node_id_t ids[], size_t nr_ids) { - const struct SelvaFieldSchema *fs_dst; node_type_t type_dst = te_dst->type; if (fs->type != SELVA_FIELD_TYPE_REFERENCES || @@ -1577,7 +1589,7 @@ int selva_fields_references_insert_tail( } } - fs_dst = selva_get_fs_by_te_field(te_dst, fs->edge_constraint.inverse_field); + auto fs_dst = selva_get_fs_by_te_field(te_dst, fs->edge_constraint.inverse_field); if (!fs_dst) { return SELVA_EINTYPE; } @@ -1802,107 +1814,21 @@ static struct SelvaNode *next_ref_edge_node(struct SelvaTypeEntry *edge_type) edge = selva_upsert_node(edge_type, next_id); /* TODO Support partial edges */ - assert(edge.node && edge.block_status & SELVA_TYPE_BLOCK_STATUS_INMEM); + + constexpr enum SelvaTypeBlockStatus mask = SELVA_TYPE_BLOCK_STATUS_INMEM | SELVA_TYPE_BLOCK_STATUS_DIRTY; + assert(edge.node && (edge.block_status & mask) == mask); +#if 0 selva_mark_dirty(edge_type, next_id); +#endif return edge.node; } -/** - * Create edgeNode if it's not initialized yet. - * Most importantly this function makes sure that the object is shared between - * both ends of the edge. - * @param edge_id Node id of the edge edgeNode. 0 if a new one should be assigned. - */ -struct SelvaNode *selva_fields_ensure_ref_edge( - struct SelvaDb *db, - struct SelvaNode *node, - const struct EdgeFieldConstraint *efc, - struct SelvaNodeLargeReference *ref, - node_id_t edge_id) -{ - struct SelvaTypeEntry *edge_type = selva_get_type_by_index(db, efc->edge_node_type); - struct SelvaNode *edge = nullptr; - - if (!edge_type) { - return nullptr; - } - - /* RFE what to do if there was an existing edge? */ - if (ref->edge != 0 && edge_id == 0) { - /* TODO Partials will require upsert here! */ - edge = selva_find_node(edge_type, ref->edge).node; - assert(edge); - } else if (ref->edge == 0 || edge_id != 0) { - edge = (edge_id != 0) - ? selva_upsert_node(edge_type, edge_id).node /* TODO Partials */ - : next_ref_edge_node(edge_type); - if (!edge) { - return nullptr; - } - - edge_id = edge->node_id; - ref->edge = edge_id; - /* FIXME Do a little refcount +2 */ - - struct SelvaTypeEntry *type_dst = selva_get_type_by_index(db, efc->dst_node_type); - const struct SelvaFieldSchema *fs_dst = selva_get_fs_by_te_field(type_dst, efc->inverse_field); - const struct SelvaNodeRes dst_res = selva_find_node(type_dst, ref->dst); - constexpr enum SelvaTypeBlockStatus mask = SELVA_TYPE_BLOCK_STATUS_FS | SELVA_TYPE_BLOCK_STATUS_INMEM; - if ((dst_res.block_status & mask) == SELVA_TYPE_BLOCK_STATUS_FS) { - /* TODO load the block instead of crashing. partials */ - db_panic("Block %u:%u needs to be loaded", - (unsigned)type_dst->type, (unsigned)dst_res.block); - } else if (!dst_res.node) { - db_panic("FIXME dangling reference"); - } - - struct SelvaNode *dst = dst_res.node; - struct SelvaFields *dst_fields = &dst->fields; - assert(efc->inverse_field < dst_fields->nr_fields); - const struct SelvaFieldInfo *dst_nfo = &dst_fields->fields_map[efc->inverse_field]; - - if (unlikely(!dst_nfo->in_use)) { - db_panic("dst field missing"); - } - - /* - * Share the edge fields with the destination node - * i.e. set it at the other end of the edge. - */ - if (fs_dst->type == SELVA_FIELD_TYPE_REFERENCE) { - struct SelvaNodeLargeReference *dst_ref = nfo2p(dst_fields, dst_nfo); - - dst_ref->edge = ref->edge; - } else if (fs_dst->type == SELVA_FIELD_TYPE_REFERENCES) { - struct SelvaNodeReferences refs; - node_id_t src_node_id = node->node_id; - ssize_t i; - - memcpy(&refs, nfo2p(dst_fields, dst_nfo), sizeof(refs)); - assert(refs.size == SELVA_NODE_REFERENCE_LARGE); - - i = fast_linear_search_references_large(refs.large, refs.nr_refs, src_node_id); - if (unlikely(i < 0)) { - db_panic("src not found in dst"); - } - - refs.large[i].edge = ref->edge; - } else { - db_panic("Invalid inverse field type: %d", fs_dst->type); - } - - selva_mark_dirty(selva_get_type_by_index(db, efc->dst_node_type), ref->dst); - } - - return edge; -} - struct SelvaNodeLargeReference *selva_fields_get_reference(struct SelvaNode *node, const struct SelvaFieldSchema *fs) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; assert(fs->field < fields->nr_fields); - const struct SelvaFieldInfo *nfo = &fields->fields_map[fs->field]; + const auto nfo = &fields->fields_map[fs->field]; return (fs->type != SELVA_FIELD_TYPE_REFERENCE || !nfo->in_use) ? nullptr @@ -1911,7 +1837,7 @@ struct SelvaNodeLargeReference *selva_fields_get_reference(struct SelvaNode *nod struct SelvaNodeReferences *selva_fields_get_references(struct SelvaNode *node, const struct SelvaFieldSchema *fs) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; assert(fs->field < fields->nr_fields); const struct SelvaFieldInfo *nfo = &fields->fields_map[fs->field]; @@ -1922,7 +1848,7 @@ struct SelvaNodeReferences *selva_fields_get_references(struct SelvaNode *node, struct selva_string *selva_fields_get_selva_string(struct SelvaNode *node, const struct SelvaFieldSchema *fs) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; const struct SelvaFieldInfo *nfo; assert(fs->type == SELVA_FIELD_TYPE_STRING); @@ -1930,12 +1856,18 @@ struct selva_string *selva_fields_get_selva_string(struct SelvaNode *node, const assert(fs->field < fields->nr_fields); nfo = &fields->fields_map[fs->field]; - return !nfo->in_use ? nullptr : nfo2p(fields, nfo); + if (!nfo->in_use) { + return nullptr; + } + + struct selva_string *s = nfo2p(fields, nfo); + + return (s->flags & SELVA_STRING_STATIC) ? s : nullptr; } struct SelvaFieldsPointer selva_fields_get_raw(struct SelvaNode *node, const struct SelvaFieldSchema *fs) { - struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; const struct SelvaFieldInfo *nfo; enum SelvaFieldType type; @@ -1960,7 +1892,7 @@ struct SelvaFieldsPointer selva_fields_get_raw(struct SelvaNode *node, const str }; case SELVA_FIELD_TYPE_STRING: do { - const struct selva_string *s = (const struct selva_string *)((uint8_t *)fields->data + (nfo->off << 3)); + auto s = (const struct selva_string *)((uint8_t *)fields->data + (nfo->off << 3)); size_t len; const uint8_t *str = selva_string_to_buf(s, &len); return (struct SelvaFieldsPointer){ @@ -1976,7 +1908,6 @@ struct SelvaFieldsPointer selva_fields_get_raw(struct SelvaNode *node, const str .len = selva_fields_get_data_size(fs), }; case SELVA_FIELD_TYPE_ALIAS: - case SELVA_FIELD_TYPE_ALIASES: case SELVA_FIELD_TYPE_COLVEC: return (struct SelvaFieldsPointer){ .ptr = nullptr, @@ -2001,8 +1932,9 @@ static void del_field_string(struct SelvaFields *fields, struct SelvaFieldInfo * } } -static int fields_del(struct SelvaDb *db, struct SelvaNode *node, struct SelvaFields *fields, const struct SelvaFieldSchema *fs, bool unload) +static int fields_del(struct SelvaDb *db, struct SelvaNode *node, const struct SelvaFieldSchema *fs, bool unload) { + auto fields = &node->fields; struct SelvaFieldInfo *nfo; enum SelvaFieldType type; @@ -2018,7 +1950,8 @@ static int fields_del(struct SelvaDb *db, struct SelvaNode *node, struct SelvaFi break; case SELVA_FIELD_TYPE_STRING: del_field_string(fields, nfo); - return 0; /* Don't clear. */ + selva_mark_dirty(selva_get_type_by_index(db, node->type), node->node_id); + return 0; /* Don't clear it. */ case SELVA_FIELD_TYPE_TEXT: del_field_text(fields, nfo); break; @@ -2035,7 +1968,6 @@ static int fields_del(struct SelvaDb *db, struct SelvaNode *node, struct SelvaFi } break; case SELVA_FIELD_TYPE_ALIAS: - case SELVA_FIELD_TYPE_ALIASES: case SELVA_FIELD_TYPE_COLVEC: return SELVA_ENOTSUP; } @@ -2048,9 +1980,7 @@ static int fields_del(struct SelvaDb *db, struct SelvaNode *node, struct SelvaFi int selva_fields_del(struct SelvaDb *db, struct SelvaNode *node, const struct SelvaFieldSchema *fs) { - struct SelvaFields *fields = &node->fields; - - return fields_del(db, node, fields, fs, false); + return fields_del(db, node, fs, false); } int selva_fields_del_ref(struct SelvaDb *db, struct SelvaNode *node, const struct SelvaFieldSchema *fs, node_id_t dst_node_id) @@ -2059,7 +1989,7 @@ int selva_fields_del_ref(struct SelvaDb *db, struct SelvaNode *node, const struc return SELVA_EINTYPE; } - struct SelvaNodeReferences *refs = selva_fields_get_references(node, fs); + auto refs = selva_fields_get_references(node, fs); if (!refs) { return SELVA_ENOENT; } @@ -2071,7 +2001,7 @@ int selva_fields_del_ref(struct SelvaDb *db, struct SelvaNode *node, const struc void selva_fields_clear_references(struct SelvaDb *db, struct SelvaNode *node, const struct SelvaFieldSchema *fs) { assert(fs->type == SELVA_FIELD_TYPE_REFERENCES); - (void)clear_references(db, node, fs); + (void)clear_references(db, node, fs, true); } static void selva_fields_init_defaults(struct SelvaTypeEntry *te, struct SelvaFields *fields, const struct SelvaFieldsSchema *schema) @@ -2085,11 +2015,10 @@ static void selva_fields_init_defaults(struct SelvaTypeEntry *te, struct SelvaFi * Handle defaults that needs to allocate memory per each node. */ for (size_t i = 0; i < schema->nr_fixed_fields; i++) { - const struct SelvaFieldSchema *fs = get_fs_by_fields_schema_field(schema, i); - + auto fs = get_fs_by_fields_schema_field(schema, i); if (fs->type == SELVA_FIELD_TYPE_STRING) { - if (fs->string.default_off > 0) { - const void *default_str = schema_buf + fs->string.default_off; + if (fs->default_off > 0) { + const void *default_str = schema_buf + fs->default_off; size_t default_len = fs->string.default_len; struct SelvaFieldInfo *nfo; int err; @@ -2103,7 +2032,7 @@ static void selva_fields_init_defaults(struct SelvaTypeEntry *te, struct SelvaFi } } else if (fs->type == SELVA_FIELD_TYPE_TEXT) { const size_t nr_defaults = fs->text.nr_defaults; - size_t off = fs->text.defaults_off; + size_t off = fs->default_off; if (nr_defaults > 0 && off > 0) { struct ensure_text_field tf; @@ -2129,7 +2058,7 @@ static void selva_fields_init_defaults(struct SelvaTypeEntry *te, struct SelvaFi static void selva_fields_init(struct SelvaTypeEntry *te, struct SelvaFields *fields, bool set_defaults) { - const struct SelvaFieldsSchema *schema = &te->ns.fields_schema; + const auto schema = &te->ns.fields_schema; fields->nr_fields = schema->nr_fields - schema->nr_virtual_fields; memcpy(fields->fields_map, schema->template.field_map_buf, schema->template.field_map_len); @@ -2153,28 +2082,27 @@ static void selva_fields_init(struct SelvaTypeEntry *te, struct SelvaFields *fie void selva_fields_init_node(struct SelvaTypeEntry *te, struct SelvaNode *node, bool set_defaults) { selva_fields_init(te, &node->fields, set_defaults); - if (te->ns.nr_colvecs > 0) { + if (te->ns.nr_colvec_fields > 0) { colvec_init_node(te, node); } } void selva_fields_flush(struct SelvaDb *db, struct SelvaNode *node) { - const struct SelvaNodeSchema *ns = selva_get_ns_by_te(selva_get_type_by_node(db, node)); - const field_t nr_fields = node->fields.nr_fields; - struct SelvaFields *fields = &node->fields; + auto ns = selva_get_ns_by_te(selva_get_type_by_node(db, node)); + auto fields = &node->fields; + const field_t nr_fields = fields->nr_fields; for (field_t field = 0; field < nr_fields; field++) { if (fields->fields_map[field].in_use) { - const struct SelvaFieldSchema *fs; int err; - fs = selva_get_fs_by_ns_field(ns, field); + auto fs = selva_get_fs_by_ns_field(ns, field); if (unlikely(!fs)) { db_panic("No field schema found"); } - err = fields_del(db, node, fields, fs, false); + err = fields_del(db, node, fs, false); if (unlikely(err)) { db_panic("Failed to remove a field: %s", selva_strerror(err)); } @@ -2194,21 +2122,20 @@ void selva_fields_flush(struct SelvaDb *db, struct SelvaNode *node) static inline void fields_destroy(struct SelvaDb *db, struct SelvaNode *node, bool unload) { - const struct SelvaNodeSchema *ns = selva_get_ns_by_te(selva_get_type_by_node(db, node)); - const field_t nr_fields = node->fields.nr_fields; - struct SelvaFields *fields = &node->fields; + auto ns = selva_get_ns_by_te(selva_get_type_by_node(db, node)); + auto fields = &node->fields; + const field_t nr_fields = fields->nr_fields; for (field_t field = 0; field < nr_fields; field++) { if (fields->fields_map[field].in_use) { - const struct SelvaFieldSchema *fs; int err; - fs = selva_get_fs_by_ns_field(ns, field); + auto fs = selva_get_fs_by_ns_field(ns, field); if (unlikely(!fs)) { db_panic("No field schema found"); } - err = fields_del(db, node, fields, fs, unload); + err = fields_del(db, node, fs, unload); if (unlikely(err)) { db_panic("Failed to remove a field: %s", selva_strerror(err)); } @@ -2246,12 +2173,9 @@ static void reference_edge_destroy( bool keep_edge_node) { if (ref->edge != 0) { - struct SelvaTypeEntry *edge_type; - struct SelvaNode *edge_node; - - edge_type = selva_get_type_by_index(db, efc->edge_node_type); + auto edge_type = selva_get_type_by_index(db, efc->edge_node_type); assert(edge_type); - edge_node = selva_find_node(edge_type, ref->edge).node; /* TODO Partials */ + auto edge_node = selva_find_node(edge_type, ref->edge).node; /* TODO Partials */ ref->edge = 0; if (edge_node && !keep_edge_node) { @@ -2271,12 +2195,12 @@ static inline void hash_ref(selva_hash_state_t *hash_state, const struct SelvaNo void selva_fields_hash_update(selva_hash_state_t *hash_state, struct SelvaDb *, const struct SelvaFieldsSchema *schema, const struct SelvaNode *node) { - const struct SelvaFields *fields = &node->fields; + auto fields = &node->fields; const field_t nr_fields = fields->nr_fields; for (field_t field = 0; field < nr_fields; field++) { - const struct SelvaFieldInfo *nfo = &fields->fields_map[field]; - const struct SelvaFieldSchema *fs = &schema->field_schemas[field]; + auto nfo = &fields->fields_map[field]; + const auto fs = &schema->field_schemas[field]; const void *p = nfo2p(fields, nfo); switch (fs->type) { @@ -2343,7 +2267,6 @@ void selva_fields_hash_update(selva_hash_state_t *hash_state, struct SelvaDb *, } while (0); break; case SELVA_FIELD_TYPE_ALIAS: - case SELVA_FIELD_TYPE_ALIASES: case SELVA_FIELD_TYPE_COLVEC: /* * NOP These are hashed in the node hash in db.c. diff --git a/clibs/lib/selva/hll/hll.c b/clibs/lib/selva/hll/hll.c index 9063a1b9f6..bfb02b8ae8 100644 --- a/clibs/lib/selva/hll/hll.c +++ b/clibs/lib/selva/hll/hll.c @@ -56,6 +56,7 @@ void hll_init(struct selva_string *hllss, uint8_t precision, bool is_sparse) if (is_sparse) { hll = (HyperLogLogPlusPlus *)selva_string_to_mstr(hllss, &len); + assert(len >= HLL_INIT_SIZE); hll->is_sparse = true; hll->precision = precision; hll->num_registers = 0; @@ -65,6 +66,7 @@ void hll_init(struct selva_string *hllss, uint8_t precision, bool is_sparse) (void)selva_string_append(hllss, nullptr, num_registers * sizeof(hll->registers[0])); hll = (HyperLogLogPlusPlus *)selva_string_to_mstr(hllss, &len); + assert(len >= HLL_INIT_SIZE); hll->is_sparse = false; hll->precision = precision; hll->num_registers = num_registers; diff --git a/clibs/lib/selva/include/db.h b/clibs/lib/selva/include/db.h index 87daf77f75..a40c627f69 100644 --- a/clibs/lib/selva/include/db.h +++ b/clibs/lib/selva/include/db.h @@ -15,7 +15,6 @@ struct selva_string; -RB_HEAD(SelvaTypeEntryIndex, SelvaTypeEntry); RB_HEAD(SelvaNodeIndex, SelvaNode); RB_HEAD(SelvaAliasesByName, SelvaAlias); RB_HEAD(SelvaAliasesByDest, SelvaAlias); @@ -50,10 +49,8 @@ struct SelvaNode { static_assert(offsetof(struct SelvaNode, node_id) == 0); struct SelvaAlias { - RB_ENTRY(SelvaAlias) _entry1; - RB_ENTRY(SelvaAlias) _entry2; - struct SelvaAlias *prev; - struct SelvaAlias *next; /*!< Next alias for the same destination. */ + RB_ENTRY(SelvaAlias) _entryByName; + RB_ENTRY(SelvaAlias) _entryByDest; node_id_t dest; uint32_t name_len; char name[] __counted_by(name_len); @@ -78,8 +75,6 @@ struct SelvaTypeBlock { struct SelvaTypeEntry { node_type_t type; - RB_ENTRY(SelvaTypeEntry) _entry; - /** * Node blocks in this type. */ @@ -90,11 +85,10 @@ struct SelvaTypeEntry { } *blocks; struct SelvaAliases { field_t field; /*!< Alias field. */ - bool single; /*!< Only allow a single alias per node + field. */ struct SelvaAliasesByName alias_by_name; struct SelvaAliasesByDest alias_by_dest; size_t nr_aliases; /*!< Number of aliases by name. */ - } *aliases __pcounted_by(ns.nr_aliases); + } *aliases __pcounted_by(ns.nr_alias_fields); size_t nr_nodes; /*!< Number of nodes of this type. */ struct mempool nodepool; /*!< Pool for struct SelvaNode of this type. */ @@ -144,15 +138,6 @@ struct SelvaDbExpireToken { * Database instance. */ struct SelvaDb { - /** - * SelvaTypeEntries. - */ - struct { - struct SelvaTypeEntryIndex index; - struct mempool pool; /*!< types area allocated from here. */ - size_t count; /*!< Total count of types. */ - } types; - /** * Expiring nodes. */ @@ -163,13 +148,17 @@ struct SelvaDb { */ int dirfd; + selva_db_dirty_hook_t dirty_hook_fun; + void *dirty_hook_ctx; + uint32_t sdb_version; /*!< Current SDB version. Set on common load and save. 0 if not saved/loaded. */ + size_t nr_types; + struct SelvaTypeEntry types[] __counted_by(nr_types); }; -RB_PROTOTYPE(SelvaTypeEntryIndex, SelvaTypeEntry, _entry, SelvaTypeEntry_cmp) RB_PROTOTYPE(SelvaNodeIndex, SelvaNode, _index_entry, SelvaNode_cmp) -RB_PROTOTYPE(SelvaAliasesByName, SelvaAlias, _entry1, SelvaAlias_cmp_name) -RB_PROTOTYPE(SelvaAliasesByDest, SelvaAlias, _entry2, SelvaAlias_cmp_dest) +RB_PROTOTYPE(SelvaAliasesByName, SelvaAlias, _entryByName, SelvaAlias_cmp_name) +RB_PROTOTYPE(SelvaAliasesByDest, SelvaAlias, _entryByDest, SelvaAlias_cmp_dest) int SelvaNode_cmp(const struct SelvaNode *a, const struct SelvaNode *b); int SelvaAlias_cmp_name(const struct SelvaAlias *a, const struct SelvaAlias *b); int SelvaAlias_cmp_dest(const struct SelvaAlias *a, const struct SelvaAlias *b); diff --git a/clibs/lib/selva/include/io.h b/clibs/lib/selva/include/io.h index 19804d1695..04570631e9 100644 --- a/clibs/lib/selva/include/io.h +++ b/clibs/lib/selva/include/io.h @@ -39,6 +39,8 @@ * - Save block hash at the end of each block dump * - Save block writelog in common.sdb * - Remove support for earlier SDB versions + * - Moved aliases saving from each node to saving all aliases at once + * - Removed schema from common.sdb */ #define SELVA_SDB_VERSION 8 diff --git a/clibs/lib/selva/include/mempool.h b/clibs/lib/selva/include/mempool.h index 822feef256..48e9ff44bc 100644 --- a/clibs/lib/selva/include/mempool.h +++ b/clibs/lib/selva/include/mempool.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2025 SAULX + * Copyright (c) 2020-2026 SAULX * SPDX-License-Identifier: MIT */ #pragma once @@ -105,6 +105,8 @@ void mempool_gc(struct mempool *mempool) */ void mempool_defrag(struct mempool *mempool, int (*obj_compar)(const void *, const void*)); +void mempool_prealloc(struct mempool *mempool, size_t nr_objects); + /** * Get a new object from the pool. */ @@ -140,7 +142,6 @@ static inline struct mempool_chunk *get_first_chunk(struct mempool_slab * restri return (struct mempool_chunk *)p; } - /** * For each slab in the mempool. * The current slab will be available as the pointer variable `slab`. diff --git a/clibs/lib/selva/io/dump.c b/clibs/lib/selva/io/dump.c index 18cde7f51e..14e0d90c5b 100644 --- a/clibs/lib/selva/io/dump.c +++ b/clibs/lib/selva/io/dump.c @@ -37,8 +37,8 @@ /* * Pick 32-bit primes for these. */ -#define DUMP_MAGIC_SCHEMA 3360690301 /* common.sdb */ #define DUMP_MAGIC_EXPIRE 2147483647 /* common.sdb */ +#define DUMP_MAGIC_ALIASES 4019181209 /* common.sdb */ #define DUMP_MAGIC_COMMON_IDS 2974848157 /* common.sdb */ #define DUMP_MAGIC_COMMON_BLOCKS 2734165127 /* common.sdb */ #define DUMP_MAGIC_TYPES 3550908863 /* [block].sdb */ @@ -48,7 +48,6 @@ #define DUMP_MAGIC_FIELD_BEGIN 3734376047 #endif #define DUMP_MAGIC_FIELD_END 2944546091 -#define DUMP_MAGIC_ALIASES 4019181209 #define DUMP_MAGIC_COLVEC 1901731729 #define DUMP_MAGIC_BLOCK_HASH 2898966349 @@ -215,8 +214,8 @@ static void save_node_fields(struct selva_io *io, const struct SelvaFieldsSchema io->sdb_write(&((sdb_nr_fields_t){ fields->nr_fields }), sizeof(sdb_nr_fields_t), 1, io); for (field_t field = 0; field < nr_fields; field++) { - const struct SelvaFieldSchema *fs = get_fs_by_fields_schema_field(schema, field); - struct SelvaFieldInfo *nfo = &fields->fields_map[field]; + auto fs = get_fs_by_fields_schema_field(schema, field); + auto nfo = &fields->fields_map[field]; enum SelvaFieldType type = nfo->in_use ? fs->type : SELVA_FIELD_TYPE_NULL; #if USE_DUMP_MAGIC_FIELD_BEGIN @@ -244,7 +243,6 @@ static void save_node_fields(struct selva_io *io, const struct SelvaFieldsSchema io->sdb_write(selva_fields_nfo2p(fields, nfo), sizeof(uint8_t), fs->smb.len, io); break; case SELVA_FIELD_TYPE_ALIAS: - case SELVA_FIELD_TYPE_ALIASES: case SELVA_FIELD_TYPE_COLVEC: /* NOP */ break; @@ -264,63 +262,10 @@ static void save_node(struct selva_io *io, struct SelvaDb *db, struct SelvaNode save_node_fields(io, schema, node); } -static void save_aliases_node(struct selva_io *io, struct SelvaTypeEntry *te, node_id_t node_id) -{ - const sdb_nr_aliases_t nr_aliases = te->ns.nr_aliases; - - write_dump_magic(io, DUMP_MAGIC_ALIASES); - io->sdb_write(&nr_aliases, sizeof(nr_aliases), 1, io); - - for (size_t i = 0; i < nr_aliases; i++) { - struct SelvaAliases *aliases = &te->aliases[i]; - const struct SelvaAlias *alias_first; - const struct SelvaAlias *alias; - sdb_nr_aliases_t nr_aliases_by_dest = 0; - - alias_first = alias = selva_get_alias_by_dest(aliases, node_id); - while (alias) { - nr_aliases_by_dest++; - alias = alias->next; - } - - io->sdb_write(&nr_aliases_by_dest, sizeof(nr_aliases_by_dest), 1, io); - - alias = alias_first; - while (alias) { - const char *name_str = alias->name; - const sdb_arr_len_t name_len = alias->name_len; - - io->sdb_write(&name_len, sizeof(name_len), 1, io); - io->sdb_write(name_str, sizeof(*name_str), name_len, io); - - alias = alias->next; - } - } -} - -static void save_schema(struct selva_io *io, struct SelvaDb *db) -{ - const sdb_nr_types_t nr_types = db->types.count; - struct SelvaTypeEntry *te; - - write_dump_magic(io, DUMP_MAGIC_SCHEMA); - io->sdb_write(&nr_types, sizeof(nr_types), 1, io); - - RB_FOREACH(te, SelvaTypeEntryIndex, &db->types.index) { - node_type_t type = te->type; - const sdb_arr_len_t schema_len = te->schema_len; - - io->sdb_write(&type, sizeof(type), 1, io); - io->sdb_write(&schema_len, sizeof(schema_len), 1, io); - io->sdb_write(te->schema_buf, sizeof(te->schema_buf[0]), te->schema_len, io); - } -} - static void save_expire(struct selva_io *io, struct SelvaDb *db) { struct SVectorIterator it; - struct SelvaExpireToken *token; const sdb_arr_len_t count = selva_expire_count(&db->expiring); write_dump_magic(io, DUMP_MAGIC_EXPIRE); @@ -328,6 +273,8 @@ static void save_expire(struct selva_io *io, struct SelvaDb *db) SVector_ForeachBegin(&it, &db->expiring.list); while (!SVector_Done(&it)) { + struct SelvaExpireToken *token; + token = SVector_Foreach(&it); do { struct SelvaDbExpireToken *dbToken = containerof(token, typeof(*dbToken), token); @@ -343,6 +290,32 @@ static void save_expire(struct selva_io *io, struct SelvaDb *db) } +static void save_aliases(struct selva_io *io, struct SelvaDb *db) +{ + write_dump_magic(io, DUMP_MAGIC_ALIASES); + + for (size_t ti = 0; ti < db->nr_types; ti++) { + auto te = &db->types[ti]; + const size_t nr_fields = te->ns.nr_alias_fields; + + for (size_t i = 0; i < nr_fields; i++) { + auto aliases = &te->aliases[i]; + sdb_nr_aliases_t nr_aliases_by_name = aliases->nr_aliases; + struct SelvaAlias *alias; + + io->sdb_write(&nr_aliases_by_name, sizeof(nr_aliases_by_name), 1, io); + RB_FOREACH(alias, SelvaAliasesByName, &aliases->alias_by_name) { + const char *name_str = alias->name; + const sdb_arr_len_t name_len = alias->name_len; + + io->sdb_write(&name_len, sizeof(name_len), 1, io); + io->sdb_write(name_str, sizeof(*name_str), name_len, io); + io->sdb_write(&alias->dest, sizeof(alias->dest), 1, io); + } + } + } +} + static void save_common_ids(struct selva_io *io, const node_id_t *ids_data, size_t meta_len) { const sdb_arr_len_t len = meta_len; @@ -394,8 +367,8 @@ int selva_dump_save_common(struct SelvaDb *db, struct selva_dump_common_data *co /* * Save all the common data here that can't be split up. */ - save_schema(&io, db); save_expire(&io, db); + save_aliases(&io, db); save_common_ids(&io, com->ids_data, com->ids_len); save_common_blocks(&io, db, com); @@ -418,8 +391,8 @@ static void selva_dump_save_colvec(struct selva_io *io, struct SelvaDb *, struct io->sdb_write(&block_i, sizeof(block_i), 1, io); static_assert(sizeof(block_i) == sizeof(uint32_t)); - for (size_t i = 0; i < te->ns.nr_colvecs; i++) { - struct SelvaColvec *colvec = &te->col_fields.colvec[i]; + for (size_t i = 0; i < te->ns.nr_colvec_fields; i++) { + auto colvec = &te->col_fields.colvec[i]; uint8_t *slab = (uint8_t *)colvec->v[block_i]; uint8_t slab_present = !!slab; @@ -496,7 +469,6 @@ int selva_dump_save_block(struct SelvaDb *db, struct SelvaTypeEntry *te, block_i selva_hash128_t node_hash = selva_node_hash_update(db, te, node, tmp_hash_state); selva_hash_update(hash_state, &node_hash, sizeof(node_hash)); save_node(&io, db, node); - save_aliases_node(&io, te, node->node_id); } /* @@ -530,69 +502,6 @@ int selva_dump_save_block(struct SelvaDb *db, struct SelvaTypeEntry *te, block_i return err; } -__attribute__((warn_unused_result)) -static int load_schema(struct selva_io *io, struct SelvaDb *db) -{ - sdb_nr_types_t nr_types; - - if (!read_dump_magic(io, DUMP_MAGIC_SCHEMA)) { - selva_io_errlog(io, "Invalid schema magic"); - return SELVA_EINVAL; - } - - if (io->sdb_read(&nr_types, sizeof(nr_types), 1, io) != 1) { - selva_io_errlog(io, "nr_types schema"); - return SELVA_EINVAL; - } - - for (size_t i = 0; i < nr_types; i++) { - node_type_t type; - __selva_autofree uint8_t *schema_buf; - sdb_arr_len_t schema_len; - int err; - - io->sdb_read(&type, sizeof(type), 1, io); - io->sdb_read(&schema_len, sizeof(schema_len), 1, io); - schema_buf = selva_malloc(schema_len); - io->sdb_read(schema_buf, sizeof(schema_buf[0]), schema_len, io); - - err = selva_db_create_type(db, type, schema_buf, schema_len); - if (err) { - selva_io_errlog(io, "Failed to create a node type entry: %s", selva_strerror(err)); - return SELVA_EINVAL; - } - } - - return 0; -} - -__attribute__((warn_unused_result)) -static int load_expire(struct selva_io *io, struct SelvaDb *db) -{ - sdb_arr_len_t count; - - if (!read_dump_magic(io, DUMP_MAGIC_EXPIRE)) { - selva_io_errlog(io, "Ivalid types magic"); - return SELVA_EINVAL; - } - - io->sdb_read(&count, sizeof(count), 1, io); - - for (sdb_arr_len_t i = 0; i < count; i++) { - node_type_t type; - node_id_t node_id; - int64_t expire; - - io->sdb_read(&type, sizeof(type), 1, io); - io->sdb_read(&node_id, sizeof(node_id), 1, io); - io->sdb_read(&expire, sizeof(expire), 1, io); - - selva_expire_node(db, type, node_id, expire, SELVA_EXPIRE_NODE_STRATEGY_IGNORE); - } - - return 0; -} - __attribute__((warn_unused_result)) static int load_string(struct selva_io *io, struct selva_string *s, const struct sdb_string_meta *meta) { @@ -675,7 +584,7 @@ static int load_field_reference(struct selva_io *io, struct SelvaNode *node, con io->sdb_read(&nr_refs, sizeof(nr_refs), 1, io); if (nr_refs) { - struct SelvaNodeLargeReference *ref = selva_fields_ensure_reference(node, fs); + auto ref = selva_fields_ensure_reference(node, fs); io->sdb_read(&ref->dst, sizeof(ref->dst), 1, io); io->sdb_read(&ref->edge, sizeof(ref->edge), 1, io); @@ -696,7 +605,7 @@ static int load_field_references(struct selva_io *io, struct SelvaDb *db, struct } (void)selva_fields_prealloc_refs(db, node, fs, nr_refs); - struct SelvaNodeReferences *refs = selva_fields_get_references(node, fs); + auto refs = selva_fields_get_references(node, fs); if (!refs) { return SELVA_ENOENT; } @@ -723,7 +632,7 @@ static int load_field_references(struct selva_io *io, struct SelvaDb *db, struct __attribute__((warn_unused_result)) static int load_node_fields(struct selva_io *io, struct SelvaDb *db, struct SelvaTypeEntry *te, struct SelvaNode *node) { - struct SelvaNodeSchema *ns = &te->ns; + auto ns = &te->ns; sdb_nr_fields_t nr_fields; int err = 0; @@ -784,7 +693,6 @@ static int load_node_fields(struct selva_io *io, struct SelvaDb *db, struct Selv err = load_field_micro_buffer(io, node, fs); break; case SELVA_FIELD_TYPE_ALIAS: - case SELVA_FIELD_TYPE_ALIASES: /* NOP */ break; case SELVA_FIELD_TYPE_COLVEC: @@ -825,7 +733,7 @@ static node_id_t load_node(struct selva_io *io, struct SelvaDb *db, struct Selva return SELVA_ENOENT; } - struct SelvaNode *node = res.node; + auto node = res.node; assert(node && node->type == te->type); err = load_node_fields(io, db, te, node); if (err) { @@ -835,45 +743,16 @@ static node_id_t load_node(struct selva_io *io, struct SelvaDb *db, struct Selva return node_id; } -__attribute__((warn_unused_result)) -static int load_aliases_node(struct selva_io *io, struct SelvaTypeEntry *te, node_id_t node_id) -{ - sdb_nr_aliases_t nr_aliases; - - if (!read_dump_magic(io, DUMP_MAGIC_ALIASES)) { - selva_io_errlog(io, "Invalid aliases magic for type %d", te->type); - return SELVA_EINVAL; - } - - io->sdb_read(&nr_aliases, sizeof(nr_aliases), 1, io); - for (sdb_nr_aliases_t i = 0; i < nr_aliases; i++) { - sdb_nr_aliases_t nr_aliases_by_dest; - - io->sdb_read(&nr_aliases_by_dest, sizeof(nr_aliases_by_dest), 1, io); - for (size_t j = 0; j < nr_aliases_by_dest; j++) { - sdb_arr_len_t name_len; - struct SelvaAlias *alias; - - io->sdb_read(&name_len, sizeof(name_len), 1, io); - alias = selva_malloc(sizeof_wflex(struct SelvaAlias, name, name_len)); - alias->name_len = name_len; - io->sdb_read(alias->name, sizeof(char), name_len, io); - alias->dest = node_id; - - selva_set_alias_p(&te->aliases[i], alias); - } - } - - return 0; -} - __attribute__((warn_unused_result)) static int load_nodes(struct selva_io *io, struct SelvaDb *db, struct SelvaTypeEntry *te) { - int err; sdb_nr_nodes_t nr_nodes; io->sdb_read(&nr_nodes, sizeof(nr_nodes), 1, io); + if (nr_nodes == 0) { + return 0; + } + for (sdb_nr_nodes_t i = 0; i < nr_nodes; i++) { node_id_t node_id; @@ -881,11 +760,6 @@ static int load_nodes(struct selva_io *io, struct SelvaDb *db, struct SelvaTypeE if (unlikely(node_id == 0)) { return SELVA_EINVAL; } - - err = load_aliases_node(io, te, node_id); - if (err) { - return err; - } } return 0; @@ -903,7 +777,7 @@ static int load_colvec(struct selva_io *io, struct SelvaTypeEntry *te) io->sdb_read(&block_i, sizeof(block_i), 1, io); static_assert(sizeof(block_i) == sizeof(uint32_t)); - for (size_t i = 0; i < te->ns.nr_colvecs; i++) { + for (size_t i = 0; i < te->ns.nr_colvec_fields; i++) { uint8_t slab_present; io->sdb_read(&slab_present, sizeof(slab_present), 1, io); @@ -911,7 +785,7 @@ static int load_colvec(struct selva_io *io, struct SelvaTypeEntry *te) /* * Load the whole slab at once. */ - struct SelvaColvec *colvec = &te->col_fields.colvec[i]; + auto colvec = &te->col_fields.colvec[i]; void *slab = colvec_init_slab(colvec, block_i); if (io->sdb_read(slab, colvec->slab_size, 1, io) != 1) { selva_io_errlog(io, "colvec slab"); @@ -947,6 +821,67 @@ static int load_type(struct selva_io *io, struct SelvaDb *db, struct SelvaTypeEn return err; } +__attribute__((warn_unused_result)) +static int load_expire(struct selva_io *io, struct SelvaDb *db) +{ + sdb_arr_len_t count; + + if (!read_dump_magic(io, DUMP_MAGIC_EXPIRE)) { + selva_io_errlog(io, "Ivalid types magic"); + return SELVA_EINVAL; + } + + io->sdb_read(&count, sizeof(count), 1, io); + + for (sdb_arr_len_t i = 0; i < count; i++) { + node_type_t type; + node_id_t node_id; + int64_t expire; + + io->sdb_read(&type, sizeof(type), 1, io); + io->sdb_read(&node_id, sizeof(node_id), 1, io); + io->sdb_read(&expire, sizeof(expire), 1, io); + + selva_expire_node(db, type, node_id, expire, SELVA_EXPIRE_NODE_STRATEGY_IGNORE); + } + + return 0; +} + +__attribute__((warn_unused_result)) +static int load_aliases(struct selva_io *io, struct SelvaDb *db) +{ + if (!read_dump_magic(io, DUMP_MAGIC_ALIASES)) { + selva_io_errlog(io, "Invalid aliases magic"); + return SELVA_EINVAL; + } + + for (size_t ti = 0; ti < db->nr_types; ti++) { + auto te = &db->types[ti]; + const size_t nr_fields = te->ns.nr_alias_fields; + + for (size_t i = 0; i < nr_fields; i++) { + sdb_nr_aliases_t nr_aliases_by_name; + + io->sdb_read(&nr_aliases_by_name, sizeof(nr_aliases_by_name), 1, io); + for (size_t j = 0; j < nr_aliases_by_name; j++) { + sdb_arr_len_t name_len; + struct SelvaAlias *alias; + + io->sdb_read(&name_len, sizeof(name_len), 1, io); + alias = selva_malloc(sizeof_wflex(struct SelvaAlias, name, name_len)); + io->sdb_read(alias->name, sizeof(char), name_len, io); + alias->name_len = name_len; + io->sdb_read(&alias->dest, sizeof(alias->dest), 1, io); + + selva_set_alias_p(&te->aliases[i], alias); + } + } + } + + return 0; +} + __attribute__((warn_unused_result)) static int load_common_ids(struct selva_io *io, struct selva_dump_common_data *com) { @@ -1028,8 +963,8 @@ int selva_dump_load_common(struct SelvaDb *db, struct selva_dump_common_data *co db->sdb_version = io.sdb_version; - err = load_schema(&io, db); err = err ?: load_expire(&io, db); + err = err ?: load_aliases(&io, db); err = err ?: load_common_ids(&io, com); err = err ?: load_common_blocks(&io, db, com); selva_io_end(&io, nullptr); @@ -1063,7 +998,8 @@ int selva_dump_load_block(struct SelvaDb *db, struct SelvaTypeEntry *te, block_i } if (io.sdb_version > db->sdb_version) { - selva_io_errlog(&io, "SDB version mismatch! common: %"PRIu32" block: %"PRIu32, db->sdb_version, io.sdb_version); + selva_io_errlog(&io, "%s: SDB version mismatch! common_version: %"PRIu32" block_version: %"PRIu32, + __func__ , db->sdb_version, io.sdb_version); err = SELVA_ENOTSUP; goto fail; } @@ -1079,13 +1015,15 @@ int selva_dump_load_block(struct SelvaDb *db, struct SelvaTypeEntry *te, block_i } if (!read_dump_magic(&io, DUMP_MAGIC_BLOCK_HASH)) { - selva_io_errlog(&io, "%s: Invalid block hash magic", __func__); + selva_io_errlog(&io, "%s: Invalid block hash magic (block: %u:%u)", + __func__, (unsigned)te->type, (unsigned)block_i); err = SELVA_EINVAL; goto fail; } if (io.sdb_read(&old_hash, sizeof(old_hash), 1, &io) != 1) { - selva_io_errlog(&io, "%s: Failed to read the hash", __func__); + selva_io_errlog(&io, "%s: Failed to read the hash (block: %u:%u)", + __func__, (unsigned)te->type, (unsigned)block_i); err = SELVA_EINVAL; goto fail; } @@ -1107,8 +1045,9 @@ int selva_dump_load_block(struct SelvaDb *db, struct SelvaTypeEntry *te, block_i selva_block_status_replace(te, block_i, prev_block_status); } if (old_hash != new_hash) { - selva_io_errlog(&io, "%s: Block hash mismatch! %.*s != %.*s", + selva_io_errlog(&io, "%s: Block hash mismatch (block: %u:%u)! %.*s != %.*s", __func__, + (unsigned)te->type, (unsigned)block_i, SELVA_HASH_HEX_LEN, selva_hash_to_hex((char [SELVA_HASH_HEX_LEN]){ 0 }, old_hash), SELVA_HASH_HEX_LEN, selva_hash_to_hex((char [SELVA_HASH_HEX_LEN]){ 0 }, new_hash)); err = SELVA_EIO; diff --git a/clibs/lib/selva/io/io.c b/clibs/lib/selva/io/io.c index 9cee36fdbe..9441106093 100644 --- a/clibs/lib/selva/io/io.c +++ b/clibs/lib/selva/io/io.c @@ -92,9 +92,18 @@ int selva_io_init_file(struct selva_io *io, int dirfd, const char *filename, enu } if (flags & SELVA_IO_FLAGS_WRITE) { - fd = openat(dirfd, filename, O_WRONLY | O_CREAT | O_TRUNC | O_RESOLVE_BENEATH | O_CLOEXEC, 0640); + /* TODO Use Linux openat2 and RESOLVE_BENEATH if supported */ + fd = openat(dirfd, filename, O_WRONLY | O_CREAT | O_TRUNC | +#if __MACH__ + O_RESOLVE_BENEATH | +#endif + O_CLOEXEC, 0640); } else { - fd = openat(dirfd, filename, O_RDONLY | O_RESOLVE_BENEATH | O_CLOEXEC); + fd = openat(dirfd, filename, O_RDONLY | +#if __MACH__ + O_RESOLVE_BENEATH | +#endif + O_CLOEXEC); } if (fd == -1) { goto fopen_err; diff --git a/clibs/lib/selva/mem/mempool.c b/clibs/lib/selva/mem/mempool.c index 95a5b46ad4..153bb76b8b 100644 --- a/clibs/lib/selva/mem/mempool.c +++ b/clibs/lib/selva/mem/mempool.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2025 SAULX + * Copyright (c) 2020-2026 SAULX * SPDX-License-Identifier: MIT */ #if defined(__STDC_LIB_EXT1__) @@ -46,11 +46,13 @@ #define MEMPOOL_GROWING_FREE_LIST 1 #endif -char *mempool_get_obj(const struct mempool *mempool, struct mempool_chunk *chunk) { +char *mempool_get_obj(const struct mempool *mempool, struct mempool_chunk *chunk) +{ return ((char *)chunk) + sizeof(struct mempool_chunk) + PAD(sizeof(struct mempool_chunk), mempool->obj_align); } -static struct mempool_chunk *get_chunk(const struct mempool *mempool, void *obj) { +static struct mempool_chunk *get_chunk(const struct mempool *mempool, void *obj) +{ char *p = ((char *)obj) - PAD(sizeof(struct mempool_chunk), mempool->obj_align) - sizeof(struct mempool_chunk); return (struct mempool_chunk *)p; @@ -61,11 +63,13 @@ static struct mempool_slab *get_slab(const struct mempool_chunk *chunk) return (struct mempool_slab *)(chunk->slab & ~(uintptr_t)1); } -struct mempool_slab *mempool_get_slab(const struct mempool *mempool, void *obj) { +struct mempool_slab *mempool_get_slab(const struct mempool *mempool, void *obj) +{ return get_slab(get_chunk(mempool, obj)); } -struct mempool_slab_info mempool_slab_info(const struct mempool *mempool) { +struct mempool_slab_info mempool_slab_info(const struct mempool *mempool) +{ const size_t slab_size = (size_t)mempool->slab_size_kb * 1024; const size_t chunk_size = ALIGNED_SIZE( sizeof(struct mempool_chunk) + @@ -84,7 +88,8 @@ struct mempool_slab_info mempool_slab_info(const struct mempool *mempool) { }; } -void mempool_init(struct mempool *mempool, size_t slab_size, size_t obj_size, size_t obj_align) { +void mempool_init(struct mempool *mempool, size_t slab_size, size_t obj_size, size_t obj_align) +{ assert(slab_size - sizeof(struct mempool_slab) > obj_size && slab_size / 1024 > 0 && slab_size / 1024 < UINT16_MAX && @@ -100,7 +105,8 @@ void mempool_init(struct mempool *mempool, size_t slab_size, size_t obj_size, si mempool->advice = MEMPOOL_ADV_NORMAL | MEMPOOL_ADV_HP_NO; } -void mempool_init2(struct mempool *mempool, size_t slab_size, size_t obj_size, size_t obj_align, enum mempool_advice advice) { +void mempool_init2(struct mempool *mempool, size_t slab_size, size_t obj_size, size_t obj_align, enum mempool_advice advice) +{ mempool_init(mempool, slab_size, obj_size, obj_align); mempool->advice = advice; } @@ -108,11 +114,13 @@ void mempool_init2(struct mempool *mempool, size_t slab_size, size_t obj_size, s /** * Free slab that was allocated in mempool */ -static void mempool_free_slab(const struct mempool *mempool, struct mempool_slab *slab) { +static void mempool_free_slab(const struct mempool *mempool, struct mempool_slab *slab) +{ (void)munmap(slab, mempool->slab_size_kb * 1024); } -void mempool_destroy(struct mempool *mempool) { +void mempool_destroy(struct mempool *mempool) +{ /* * We don't keep track of the slab pointers because we assume the user to * know the slabs and return every single one of them before destroying the @@ -129,7 +137,8 @@ void mempool_destroy(struct mempool *mempool) { memset(mempool, 0, sizeof(*mempool)); } -void mempool_gc(struct mempool *mempool) { +void mempool_gc(struct mempool *mempool) +{ struct mempool_slab_info info = mempool_slab_info(mempool); /* @@ -185,7 +194,8 @@ static int defrag_cmp(void *ctx_, const void *a, const void *b) return chunk_a < chunk_b; } -void mempool_defrag(struct mempool *mempool, int (*obj_compar)(const void *, const void*)) { +void mempool_defrag(struct mempool *mempool, int (*obj_compar)(const void *, const void*)) +{ struct mempool_slab_info slab_nfo = mempool_slab_info(mempool); struct mempool_defrag_ctx ctx = { .mempool = mempool, @@ -240,11 +250,45 @@ void mempool_defrag(struct mempool *mempool, int (*obj_compar)(const void *, con } MEMPOOL_FOREACH_SLAB_END(); } +static void add_new_slab2freelist(struct mempool *mempool, struct mempool_slab *slab) +{ + const struct mempool_slab_info info = mempool_slab_info(mempool); + typeof(mempool->free_chunks) *free_chunks = &mempool->free_chunks; +#ifdef MEMPOOL_GROWING_FREE_LIST + struct mempool_chunk *prev = nullptr; +#endif + + slab->nr_free = info.nr_objects; + + /* + * Add all new objects to the list of free objects in the pool. + */ + MEMPOOL_FOREACH_CHUNK_BEGIN(info, slab) { + chunk->slab = (uintptr_t)slab; /* also marked as free. */ +#ifdef MEMPOOL_GROWING_FREE_LIST + if (prev) { + LIST_INSERT_AFTER(prev, chunk, next_free); + } else { +#endif + LIST_INSERT_HEAD(free_chunks, chunk, next_free); +#ifdef MEMPOOL_GROWING_FREE_LIST + } + prev = chunk; +#endif + } MEMPOOL_FOREACH_CHUNK_END(); + + SLIST_INSERT_HEAD(&mempool->slabs, slab, next_slab); +} + /** * Allocate a new slab using mmap(). */ -static int mempool_new_slab(struct mempool *mempool) { +static void mempool_new_slab(struct mempool *mempool) +{ const size_t bsize = mempool->slab_size_kb * 1024; +#if !defined(__linux__) + constexpr +#endif int mmap_flags = MAP_PRIVATE | MAP_ANONYMOUS; struct mempool_slab *slab; @@ -268,7 +312,7 @@ static int mempool_new_slab(struct mempool *mempool) { } #endif perror("Failed to allocate a slab"); - return 1; + abort(); } #if defined(__linux__) || defined(__MACH__) @@ -292,45 +336,79 @@ static int mempool_new_slab(struct mempool *mempool) { } #endif - const struct mempool_slab_info info = mempool_slab_info(mempool); + add_new_slab2freelist(mempool, slab); +} - slab->nr_free = info.nr_objects; +void mempool_prealloc(struct mempool *mempool, size_t nr_objects) +{ + struct mempool_slab_info nfo = mempool_slab_info(mempool); + const size_t nr_slabs = (nr_objects + nfo.nr_objects - 1) / nfo.nr_objects; + const size_t slab_size = nr_slabs * mempool->slab_size_kb * 1024; + const size_t bsize = nr_slabs * slab_size; +#if !defined(__linux__) + constexpr +#endif + int mmap_flags = MAP_PRIVATE | MAP_ANONYMOUS; + struct mempool_slab *slabs; - /* - * Add all new objects to the list of free objects in the pool. - */ -#ifdef MEMPOOL_GROWING_FREE_LIST - struct mempool_chunk *prev = nullptr; + assert(nr_objects > 0); + + /* TODO Verify that huge pages can be partially unmapped later if necessary. */ +#if 0 && defined(__linux__) + if (bsize >= 2048 * 1024 && + (mempool->advice & (MEMPOOL_ADV_HP_SOFT | MEMPOOL_ADV_HP_HARD))) { + mmap_flags |= MAP_HUGETLB /* | MAP_HUGE_2MB */; + } #endif - MEMPOOL_FOREACH_CHUNK_BEGIN(info, slab) { - chunk->slab = (uintptr_t)slab; /* also marked as free. */ -#ifdef MEMPOOL_GROWING_FREE_LIST - if (prev) { - LIST_INSERT_AFTER(prev, chunk, next_free); - } else { + +#if defined(__linux__) +retry: #endif - LIST_INSERT_HEAD(&mempool->free_chunks, chunk, next_free); -#ifdef MEMPOOL_GROWING_FREE_LIST + slabs = mmap(0, bsize, PROT_READ | PROT_WRITE, mmap_flags, -1, 0); + if (slabs == MAP_FAILED) { +#if defined(__linux__) + if ((mmap_flags & MAP_HUGETLB) && + (mempool->advice & MEMPOOL_ADV_HP_SOFT)) { + mmap_flags &= ~MAP_HUGETLB; + goto retry; } - prev = chunk; #endif - } MEMPOOL_FOREACH_CHUNK_END(); + perror("Failed to allocate a slabs"); + abort(); + } - SLIST_INSERT_HEAD(&mempool->slabs, slab, next_slab); +#if defined(__linux__) || defined(__MACH__) + if (mempool->advice & (MEMPOOL_ADV_RANDOM | MEMPOOL_ADV_SEQUENTIAL)) { + switch (mempool->advice & (MEMPOOL_ADV_RANDOM | MEMPOOL_ADV_SEQUENTIAL)) { + case MEMPOOL_ADV_RANDOM: + madvise(slabs, bsize, MADV_RANDOM); + break; + case MEMPOOL_ADV_SEQUENTIAL: + madvise(slabs, bsize, MADV_SEQUENTIAL); + break; + default: /* NOP */ + break; + } + } +#endif + +#if defined(__linux__) + if (bsize >= 2048 * 1024 && (mempool->advice & MEMPOOL_ADV_HP_THP)) { + (void)madvise(slabs, bsize, MADV_HUGEPAGE); + } +#endif - return 0; + for (size_t i = 0; i < nr_slabs; i++) { + add_new_slab2freelist(mempool, (typeof(slabs))((uint8_t *)slabs + i * slab_size)); + } } -void *mempool_get(struct mempool *mempool) { +void *mempool_get(struct mempool *mempool) +{ struct mempool_chunk *next; if (LIST_EMPTY(&mempool->free_chunks)) { - int err; - - err = mempool_new_slab(mempool); - if (err) { - abort(); - } + mempool_new_slab(mempool); } next = LIST_FIRST(&mempool->free_chunks); @@ -341,7 +419,8 @@ void *mempool_get(struct mempool *mempool) { return mempool_get_obj(mempool, next); } -void mempool_return(struct mempool *mempool, void *p) { +void mempool_return(struct mempool *mempool, void *p) +{ struct mempool_chunk *chunk = get_chunk(mempool, p); LIST_INSERT_HEAD(&mempool->free_chunks, chunk, next_free); diff --git a/clibs/lib/selva/schema.c b/clibs/lib/selva/schema.c index 9a84ea3237..773552fdf6 100644 --- a/clibs/lib/selva/schema.c +++ b/clibs/lib/selva/schema.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025 SAULX + * Copyright (c) 2024-2026 SAULX * SPDX-License-Identifier: MIT */ #include @@ -21,12 +21,13 @@ #include "io.h" #include "schema.h" -#define SCHEMA_MIN_SIZE 8 -#define SCHEMA_OFF_BLOCK_CAPACITY 0 /*!< u32 */ -#define SCHEMA_OFF_NR_FIELDS 4 /*!< u8 */ -#define SCHEMA_OFF_NR_FIXED_FIELDS 5 /*!< u8 */ -#define SCHEMA_OFF_NR_VIRTUAL_FIELDS 6 /*!< u8 */ -#define SCHEMA_OFF_VERSION 7 /*!< u8 */ +struct SelvaSchemaHeader { + uint32_t block_capacity; + uint8_t nr_fields; + uint8_t nr_fixed_fields; + uint8_t nr_virtual_fields; + uint8_t sdb_version; +} __packed; struct schemabuf_parser_ctx { struct SelvaTypeEntry *te; @@ -38,6 +39,8 @@ struct schemabuf_parser_ctx { unsigned version; }; +typedef uint8_t __attribute__((__hardbool__(0, 1))) schema_bool_t; +static_assert(sizeof(schema_bool_t) == 1); static inline uint32_t calc_default_off(struct schemabuf_parser_ctx *ctx, size_t off) { @@ -51,37 +54,38 @@ static int type2fs_reserved(struct schemabuf_parser_ctx *, struct SelvaFieldsSch static int type2fs_micro_buffer(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSchema *schema, field_t field) { - uint16_t len; - size_t off = 1; - const size_t min_buf_len = 1 + sizeof(len) + (ctx->version >= 6); + struct { + enum SelvaFieldType type; + uint16_t len; + schema_bool_t has_default; + } __packed head; + size_t off = 0; struct SelvaFieldSchema *fs = &schema->field_schemas[field]; - if (ctx->len < min_buf_len) { + if (ctx->len < sizeof(head)) { return SELVA_EINVAL; } - memcpy(&len, ctx->buf + off, sizeof(len)); - off += sizeof(len); + memcpy(&head, ctx->buf + off, sizeof(head)); + off += sizeof(head); *fs = (struct SelvaFieldSchema){ .field = field, .type = SELVA_FIELD_TYPE_MICRO_BUFFER, + .default_off = 0, .smb = { - .len = len, - .default_off = 0, + .len = head.len, }, }; - if (ctx->version >= 6) { - if (ctx->buf[off++]) { /* has default */ - if (ctx->len < off + len) { - return SELVA_EINVAL; - } - - /* * Default is copied straight from the schema buffer. */ - fs->smb.default_off = calc_default_off(ctx, off); - off += len; + if (head.has_default) { + if (ctx->len < off + head.len) { + return SELVA_EINVAL; } + + /* Default is copied straight from the schema buffer. */ + fs->default_off = calc_default_off(ctx, off); + off += head.len; } return off; @@ -89,17 +93,20 @@ static int type2fs_micro_buffer(struct schemabuf_parser_ctx *ctx, struct SelvaFi static int type2fs_string(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSchema *schema, field_t field) { - size_t off = 1; - uint8_t fixed_len; - const size_t min_buf_len = 1 + sizeof(fixed_len) + (ctx->version >= 7); + struct { + enum SelvaFieldType type; + uint8_t fixed_len_hint; + uint32_t default_len; + } __packed head; + size_t off = 0; struct SelvaFieldSchema *fs = &schema->field_schemas[field]; - if (ctx->len < min_buf_len) { + if (ctx->len < sizeof(head)) { return SELVA_EINVAL; } - memcpy(&fixed_len, ctx->buf + off, sizeof(fixed_len)); - off += sizeof(fixed_len); + memcpy(&head, ctx->buf + off, sizeof(head)); + off += sizeof(head); *fs = (struct SelvaFieldSchema){ .field = field, @@ -109,26 +116,19 @@ static int type2fs_string(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSc * We only allow very short strings to be stored as fixed embedded * strings. This is best to be aligned to 64-bit boundaries */ - .fixed_len = fixed_len <= 48 ? fixed_len : 0, + .fixed_len = head.fixed_len_hint <= 48 ? head.fixed_len_hint : 0, + .default_len = head.default_len, }, }; - if (ctx->version >= 8) { - uint32_t default_len; - - memcpy(&default_len, ctx->buf + off, sizeof(default_len)); - off += sizeof(default_len); - fs->string.default_len = default_len; - - if (default_len > 0) { /* has default */ - if (ctx->len < off + default_len) { - return SELVA_EINVAL; - } - - /* default is copied straight from the schema buffer. */ - fs->string.default_off = calc_default_off(ctx, off); - off += default_len; + if (head.default_len > 0) { /* has default */ + if (ctx->len < off + head.default_len) { + return SELVA_EINVAL; } + + /* default is copied straight from the schema buffer. */ + fs->default_off = calc_default_off(ctx, off); + off += head.default_len; } return off; @@ -136,37 +136,41 @@ static int type2fs_string(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSc static int type2fs_text(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSchema *schema, field_t field) { + struct { + enum SelvaFieldType type; + uint8_t nr_defaults; + } __packed head; + size_t off = 0; struct SelvaFieldSchema *fs = &schema->field_schemas[field]; - size_t off = 1; + + if (ctx->len < sizeof(head)) { + return SELVA_EINVAL; + } *fs = (struct SelvaFieldSchema){ .field = field, .type = SELVA_FIELD_TYPE_TEXT, }; - if (ctx->version >= 8) { - uint8_t nr_defaults; - - memcpy(&nr_defaults, ctx->buf + off, sizeof(nr_defaults)); - off += sizeof(nr_defaults); - fs->text.nr_defaults = nr_defaults; + memcpy(&head, ctx->buf + off, sizeof(head)); + off += sizeof(head); + fs->text.nr_defaults = head.nr_defaults; - if (nr_defaults > 0) { /* has defaults */ - fs->text.defaults_off = (uint32_t)((ptrdiff_t)(ctx->buf - ctx->schema_buf) + off); + if (head.nr_defaults > 0) { /* has defaults */ + fs->default_off = (uint32_t)((ptrdiff_t)(ctx->buf - ctx->schema_buf) + off); - /* - * Iterate over the defaults and skip them. - */ - for (size_t i = 0; i < nr_defaults; i++) { - uint32_t len; - - if (ctx->len < off + sizeof(len)) { - return SELVA_EINVAL; - } + /* + * Iterate over the defaults and skip them. + */ + for (size_t i = 0; i < head.nr_defaults; i++) { + uint32_t len; - memcpy(&len, ctx->buf + off, sizeof(len)); - off += sizeof(len) + len; + if (ctx->len < off + sizeof(len)) { + return SELVA_EINVAL; } + + memcpy(&len, ctx->buf + off, sizeof(len)); + off += sizeof(len) + len; } } @@ -176,8 +180,6 @@ static int type2fs_text(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSche static int type2fs_refs(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSchema *schema, field_t field, enum SelvaFieldType type) { const uint8_t *buf = ctx->buf; - size_t len = ctx->len; - size_t orig_len = ctx->len; struct SelvaFieldSchema *fs = &schema->field_schemas[field]; struct { enum SelvaFieldType type; @@ -186,35 +188,31 @@ static int type2fs_refs(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSche field_t inverse_field; node_type_t edge_node_type; uint32_t capped; - } __packed constraints; + } __packed head; - static_assert(sizeof(constraints) == 11); + static_assert(sizeof(head) == 11); - size_t copy_len = sizeof(constraints) - (ctx->version < 7) * sizeof_field(typeof(constraints), capped); - if (len < copy_len) { + if (ctx->len < sizeof(head)) { return SELVA_EINVAL; } - constraints.capped = 0; - memcpy(&constraints, buf, copy_len); - buf += copy_len; - len -= copy_len; + memcpy(&head, buf, sizeof(head)); - enum EdgeFieldConstraintFlag flags = constraints.flags & (EDGE_FIELD_CONSTRAINT_FLAG_DEPENDENT); + enum EdgeFieldConstraintFlag flags = head.flags & (EDGE_FIELD_CONSTRAINT_FLAG_DEPENDENT); *fs = (struct SelvaFieldSchema){ .field = field, .type = type, .edge_constraint = { .flags = flags, - .inverse_field = constraints.inverse_field, - .dst_node_type = constraints.dst_node_type, - .edge_node_type = constraints.edge_node_type, - .limit = constraints.capped, + .inverse_field = head.inverse_field, + .dst_node_type = head.dst_node_type, + .edge_node_type = head.edge_node_type, + .limit = head.capped, }, }; - return orig_len - len; + return sizeof(head); } static int type2fs_reference(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSchema *schema, field_t field) @@ -234,20 +232,9 @@ static int type2fs_alias(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSch *fs = (struct SelvaFieldSchema){ .field = field, .type = SELVA_FIELD_TYPE_ALIAS, - .alias_index = ctx->alias_index++, - }; - - return 1; -} - -static int type2fs_aliases(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSchema *schema, field_t field) -{ - struct SelvaFieldSchema *fs = &schema->field_schemas[field]; - - *fs = (struct SelvaFieldSchema){ - .field = field, - .type = SELVA_FIELD_TYPE_ALIASES, - .alias_index = ctx->alias_index++, + .alias = { + .index = ctx->alias_index++, + }, }; return 1; @@ -260,28 +247,41 @@ static int type2fs_colvec(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSc enum SelvaFieldType type; uint16_t vec_len; /*!< Length of a single vector. */ uint16_t comp_size; /*!< Component size in the vector. */ - uint8_t has_default; - } __packed spec = {}; - size_t copy_len = sizeof(spec) + (ctx->version < 8) * -sizeof_field(typeof(spec), has_default); + schema_bool_t has_default; + } __packed head; + size_t off = 0; - if (ctx->len < sizeof(spec)) { + if (ctx->len < sizeof(head)) { return SELVA_EINVAL; } - memcpy(&spec, ctx->buf, copy_len); + memcpy(&head, ctx->buf + off, sizeof(head)); + off += sizeof(head); *fs = (struct SelvaFieldSchema){ .field = field, .type = SELVA_FIELD_TYPE_COLVEC, + .default_off = 0, .colvec = { - .vec_len = spec.vec_len, - .comp_size = spec.comp_size, + .vec_len = head.vec_len, + .comp_size = head.comp_size, .index = ctx->colvec_index++, - .default_off = (spec.has_default) ? calc_default_off(ctx, sizeof(spec)) : 0, }, }; - return copy_len; + if (head.has_default) { + size_t vec_size = head.vec_len * head.comp_size; + + if (ctx->len < off + vec_size) { + return SELVA_EINVAL; + } + + /* Default is copied straight from the schema buffer. */ + fs->default_off = calc_default_off(ctx, off); + off += vec_size; + } + + return off; } static struct schemabuf_parser { @@ -316,10 +316,6 @@ static struct schemabuf_parser { .type = SELVA_FIELD_TYPE_ALIAS, .type2fs = type2fs_alias, }, - [SELVA_FIELD_TYPE_ALIASES] = { - .type = SELVA_FIELD_TYPE_ALIASES, - .type2fs = type2fs_aliases, - }, [SELVA_FIELD_TYPE_COLVEC] = { .type = SELVA_FIELD_TYPE_COLVEC, .type2fs = type2fs_colvec, @@ -328,19 +324,18 @@ static struct schemabuf_parser { int schemabuf_get_info(struct schema_info *nfo, const uint8_t *buf, size_t len) { - uint32_t block_capacity; + struct SelvaSchemaHeader head; - if (len < SCHEMA_MIN_SIZE) { + if (len < sizeof(struct SelvaSchemaHeader)) { return SELVA_EINVAL; } - memcpy(&block_capacity, buf + SCHEMA_OFF_BLOCK_CAPACITY, sizeof(block_capacity)); - + memcpy(&head, buf, sizeof(head)); *nfo = (struct schema_info){ - .block_capacity = block_capacity, - .nr_fields = buf[SCHEMA_OFF_NR_FIELDS], - .nr_fixed_fields = buf[SCHEMA_OFF_NR_FIXED_FIELDS], - .nr_virtual_fields = buf[SCHEMA_OFF_NR_VIRTUAL_FIELDS], + .block_capacity = head.block_capacity, + .nr_fields = head.nr_fields, + .nr_fixed_fields = head.nr_fixed_fields, + .nr_virtual_fields = head.nr_virtual_fields, }; if (nfo->nr_fixed_fields > nfo->nr_fields || @@ -399,9 +394,10 @@ static bool has_defaults(struct SelvaFieldsSchema *schema) for (size_t i = 0; i < nr_fixed_fields; i++) { const struct SelvaFieldSchema *fs = get_fs_by_fields_schema_field(schema, i); - if ((fs->type == SELVA_FIELD_TYPE_MICRO_BUFFER && fs->smb.default_off > 0) || - (fs->type == SELVA_FIELD_TYPE_STRING && fs->string.default_off > 0) || - (fs->type == SELVA_FIELD_TYPE_TEXT && fs->string.default_off > 0)) { + if ((fs->type == SELVA_FIELD_TYPE_MICRO_BUFFER || + fs->type == SELVA_FIELD_TYPE_STRING || + fs->type == SELVA_FIELD_TYPE_TEXT) && + fs->default_off > 0) { return true; } } @@ -425,12 +421,12 @@ static void make_fixed_fields_template(struct SelvaFieldsSchema *schema, const u const struct SelvaFieldSchema *fs = get_fs_by_fields_schema_field(schema, i); void *field_data = fixed_data_buf + (nfo[i].off << SELVA_FIELDS_OFF); - if (fs->type == SELVA_FIELD_TYPE_MICRO_BUFFER && fs->smb.default_off > 0) { - memcpy(field_data, schema_buf + fs->smb.default_off, fs->smb.len); - } else if (fs->type == SELVA_FIELD_TYPE_STRING && fs->string.default_off > 0) { + if (fs->type == SELVA_FIELD_TYPE_MICRO_BUFFER && fs->default_off > 0) { + memcpy(field_data, schema_buf + fs->default_off, fs->smb.len); + } else if (fs->type == SELVA_FIELD_TYPE_STRING && fs->default_off > 0) { if (fs->string.fixed_len > 0) { /* Fixed string needs to be copied here. */ struct selva_string *s = (struct selva_string *)field_data; - const void *default_str = schema_buf + fs->string.default_off; + const void *default_str = schema_buf + fs->default_off; size_t default_len = fs->string.default_len; int err; @@ -486,7 +482,7 @@ static int parse2(struct schemabuf_parser_ctx *ctx, struct SelvaFieldsSchema *fi } make_field_map_template(fields_schema); - make_fixed_fields_template(fields_schema, buf - SCHEMA_MIN_SIZE); + make_fixed_fields_template(fields_schema, buf - sizeof(struct SelvaSchemaHeader)); return 0; } @@ -505,23 +501,23 @@ int schemabuf_parse_ns(struct SelvaNodeSchema *ns, const uint8_t *buf, size_t le .alias_index = 0, }; - if (len < SCHEMA_MIN_SIZE) { + if (len < sizeof(struct SelvaSchemaHeader)) { return SELVA_EINVAL; } /* We just assume that fields_schema is allocated properly. */ - ctx.version = buf[SCHEMA_OFF_VERSION]; - fields_schema->nr_fields = buf[SCHEMA_OFF_NR_FIELDS]; - fields_schema->nr_fixed_fields = buf[SCHEMA_OFF_NR_FIXED_FIELDS]; + ctx.version = buf[offsetof(struct SelvaSchemaHeader, sdb_version)]; + fields_schema->nr_fields = buf[offsetof(struct SelvaSchemaHeader, nr_fields)]; + fields_schema->nr_fixed_fields = buf[offsetof(struct SelvaSchemaHeader, nr_fixed_fields)]; if (ctx.version > max_version) { /* Can't load a schema created with a newer version. */ return SELVA_ENOTSUP; } - int err = parse2(&ctx, fields_schema, buf + SCHEMA_MIN_SIZE, len - SCHEMA_MIN_SIZE); - ns->nr_aliases = ctx.alias_index; - ns->nr_colvecs = ctx.colvec_index; + int err = parse2(&ctx, fields_schema, buf + sizeof(struct SelvaSchemaHeader), len - sizeof(struct SelvaSchemaHeader)); + ns->nr_alias_fields = ctx.alias_index; + ns->nr_colvec_fields = ctx.colvec_index; return err; } diff --git a/clibs/lib/selva/types.c b/clibs/lib/selva/types.c index 100876caba..c5fcaf85dc 100644 --- a/clibs/lib/selva/types.c +++ b/clibs/lib/selva/types.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025 SAULX + * Copyright (c) 2024-2026 SAULX * SPDX-License-Identifier: MIT */ @@ -15,7 +15,6 @@ bool selva_is_valid_field_type(enum SelvaFieldType ftype) case SELVA_FIELD_TYPE_REFERENCES: case SELVA_FIELD_TYPE_MICRO_BUFFER: case SELVA_FIELD_TYPE_ALIAS: - case SELVA_FIELD_TYPE_ALIASES: case SELVA_FIELD_TYPE_COLVEC: return true; } @@ -40,8 +39,6 @@ const char *selva_str_field_type(enum SelvaFieldType ftype) return (const char *)"micro buffer"; case SELVA_FIELD_TYPE_ALIAS: return (const char *)"alias"; - case SELVA_FIELD_TYPE_ALIASES: - return (const char *)"aliases"; case SELVA_FIELD_TYPE_COLVEC: return (const char *)"columnar vector"; } diff --git a/compile_errors.txt b/compile_errors.txt new file mode 100644 index 0000000000..7fee670c10 --- /dev/null +++ b/compile_errors.txt @@ -0,0 +1,7914 @@ +scripts/repl.ts:6:10 - error TS2305: Module '"../src/protocol/index.js"' has no exported member 'AggregateType'. + +6 import { AggregateType } from '../src/protocol/index.js' +   ~~~~~~~~~~~~~ + +scripts/test_push_exports.ts:13:37 - error TS2307: Cannot find module '../src/modify/AutoSizedUint8Array.js' or its corresponding type declarations. + +13 import { AutoSizedUint8Array } from '../src/modify/AutoSizedUint8Array.js' +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +scripts/test_push_exports.ts:17:9 - error TS2739: Type '{ id: number; start: number; size: number; }' is missing the following properties from type 'ModifyMainHeader': type, increment, incrementPositive, expire + +17 const header: ModifyMainHeader = { +   ~~~~~~ + +src/db-client/query2/index.ts:73:5 - error TS2304: Cannot find name 'L'. + +73 L, +   ~ + +src/db-client/query2/index.ts:142:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +142 ): NextBranch< +   ~~~~~~~~~~~ +143 S, +  ~~~~~~ +... +151 GroupedKey +  ~~~~~~~~~~~~~~ +152 > +  ~~~ + +src/db-client/query2/index.ts:155:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +155 ): NextBranch< +   ~~~~~~~~~~~ +156 S, +  ~~~~~~ +... +164 GroupedKey +  ~~~~~~~~~~~~~~ +165 > +  ~~~ + +src/db-client/query2/index.ts:182:12 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +182 count(): NextBranch< +   ~~~~~~~~~~~ +183 S, +  ~~~~~~ +... +191 GroupedKey +  ~~~~~~~~~~~~~~ +192 > { +  ~~~ + +src/db-client/query2/index.ts:199:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +199 ): NextBranch< +   ~~~~~~~~~~~ +200 S, +  ~~~~~~ +... +208 GroupedKey +  ~~~~~~~~~~~~~~ +209 > +  ~~~ + +src/db-client/query2/index.ts:212:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +212 ): NextBranch< +   ~~~~~~~~~~~ +213 S, +  ~~~~~~ +... +221 GroupedKey +  ~~~~~~~~~~~~~~ +222 > +  ~~~ + +src/db-client/query2/index.ts:241:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +241 ): NextBranch< +   ~~~~~~~~~~~ +242 S, +  ~~~~~~ +... +250 GroupedKey +  ~~~~~~~~~~~~~~ +251 > +  ~~~ + +src/db-client/query2/index.ts:254:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +254 ): NextBranch< +   ~~~~~~~~~~~ +255 S, +  ~~~~~~ +... +263 GroupedKey +  ~~~~~~~~~~~~~~ +264 > +  ~~~ + +src/db-client/query2/index.ts:283:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +283 ): NextBranch< +   ~~~~~~~~~~~ +284 S, +  ~~~~~~ +... +292 GroupedKey +  ~~~~~~~~~~~~~~ +293 > +  ~~~ + +src/db-client/query2/index.ts:296:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +296 ): NextBranch< +   ~~~~~~~~~~~ +297 S, +  ~~~~~~ +... +305 GroupedKey +  ~~~~~~~~~~~~~~ +306 > +  ~~~ + +src/db-client/query2/index.ts:325:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +325 ): NextBranch< +   ~~~~~~~~~~~ +326 S, +  ~~~~~~ +... +334 GroupedKey +  ~~~~~~~~~~~~~~ +335 > +  ~~~ + +src/db-client/query2/index.ts:338:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +338 ): NextBranch< +   ~~~~~~~~~~~ +339 S, +  ~~~~~~ +... +348 GroupedKey +  ~~~~~~~~~~~~~~ +349 > +  ~~~ + +src/db-client/query2/index.ts:368:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +368 ): NextBranch< +   ~~~~~~~~~~~ +369 S, +  ~~~~~~ +... +377 GroupedKey +  ~~~~~~~~~~~~~~ +378 > +  ~~~ + +src/db-client/query2/index.ts:381:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +381 ): NextBranch< +   ~~~~~~~~~~~ +382 S, +  ~~~~~~ +... +391 GroupedKey +  ~~~~~~~~~~~~~~ +392 > +  ~~~ + +src/db-client/query2/index.ts:412:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +412 ): NextBranch< +   ~~~~~~~~~~~ +413 S, +  ~~~~~~ +... +421 GroupedKey +  ~~~~~~~~~~~~~~ +422 > +  ~~~ + +src/db-client/query2/index.ts:425:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +425 ): NextBranch< +   ~~~~~~~~~~~ +426 S, +  ~~~~~~ +... +434 GroupedKey +  ~~~~~~~~~~~~~~ +435 > +  ~~~ + +src/db-client/query2/index.ts:438:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +438 ): NextBranch { +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +src/db-client/query2/index.ts:472:6 - error TS2314: Generic type 'NextBranch' requires 10 type argument(s). + +472 ): NextBranch< +   ~~~~~~~~~~~ +473 S, +  ~~~~~~ +... +481 GroupedKey +  ~~~~~~~~~~~~~~ +482 > +  ~~~ + +src/db-client/query2/index.ts:493:5 - error TS2304: Cannot find name 'L'. + +493 L, +   ~ + +src/db-client/query2/index.ts:541:5 - error TS2304: Cannot find name 'L'. + +541 L, +   ~ + +src/db-client/query2/index.ts:550:36 - error TS2536: Type '"locales"' cannot be used to index type 'S'. + +550 locale>( +   ~~~~~~~~~~~~ + +src/db-client/query2/index.ts:579:5 - error TS2304: Cannot find name 'L'. + +579 L, +   ~ + +src/db-client/query2/index.ts:598:5 - error TS2304: Cannot find name 'L'. + +598 L, +   ~ + +src/db-client/query2/index.ts:617:5 - error TS2304: Cannot find name 'L'. + +617 L, +   ~ + +src/db-client/query2/index.ts:675:13 - error TS2707: Generic type 'Query' requires between 0 and 9 type arguments. + +675 >(type: T): Query +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +src/db-client/query2/index.ts:684:24 - error TS2314: Generic type 'InferSchemaOutput' requires 2 type argument(s). + +684 id: number | Partial>, +   ~~~~~~~~~~~~~~~~~~~~~~~~~~ + +src/db-client/query2/index.ts:685:4 - error TS2707: Generic type 'Query' requires between 0 and 9 type arguments. + +685 ): Query +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +src/db-client/query2/index.ts:693:40 - error TS2314: Generic type 'InferSchemaOutput' requires 2 type argument(s). + +693 target?: number | number[] | Partial>, +   ~~~~~~~~~~~~~~~~~~~~~~~~~~ + +src/db-client/query2/index.ts:694:4 - error TS2707: Generic type 'Query' requires between 0 and 9 type arguments. + +694 ): Query { +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +src/db-client/query2/index.ts:697:20 - error TS2558: Expected 0-9 type arguments, but got 10. + +697 return new Query( +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +src/db-client/query2/index.ts:715:11 - error TS2707: Generic type 'Query' requires between 0 and 9 type arguments. + +715 > extends Query< +   ~~~~~~ +716 S, +  ~~~~ +... +725 GroupedKey +  ~~~~~~~~~~~~ +726 > { +  ~ + +src/db-client/query2/index.ts:730:42 - error TS2314: Generic type 'InferSchemaOutput' requires 2 type argument(s). + +730 target?: number | number[] | Partial>, +   ~~~~~~~~~~~~~~~~~~~~~~~~~~ + +src/db-client/query2/index.ts:733:10 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +733 this.ast.type = type as string +   ~~~ + +src/db-client/query2/index.ts:734:22 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +734 if (target) this.ast.target = target +   ~~~ + +src/db-client/query2/index.ts:758:13 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +758 !this.ast.props && +   ~~~ + +src/db-client/query2/index.ts:759:13 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +759 !this.ast.sum && +   ~~~ + +src/db-client/query2/index.ts:760:13 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +760 !this.ast.count && +   ~~~ + +src/db-client/query2/index.ts:761:13 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +761 !this.ast.avg && +   ~~~ + +src/db-client/query2/index.ts:762:13 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +762 !this.ast.hmean && +   ~~~ + +src/db-client/query2/index.ts:763:13 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +763 !this.ast.max && +   ~~~ + +src/db-client/query2/index.ts:764:13 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +764 !this.ast.min && +   ~~~ + +src/db-client/query2/index.ts:765:13 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +765 !this.ast.stddev && +   ~~~ + +src/db-client/query2/index.ts:766:13 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +766 !this.ast.variance && +   ~~~ + +src/db-client/query2/index.ts:767:13 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +767 !this.ast.cardinality +   ~~~ + +src/db-client/query2/index.ts:769:12 - error TS2339: Property 'include' does not exist on type 'BasedQuery2'. + +769 this.include('*') +   ~~~~~~~ + +src/db-client/query2/index.ts:777:12 - error TS2339: Property 'ast' does not exist on type 'BasedQuery2'. + +777 this.ast, +   ~~~ + +src/db-client/query2/index.ts:875:43 - error TS2707: Generic type 'Query' requires between 0 and 9 type arguments. + +875 type AnyQuery = Query< +   ~~~~~~ +876 S, +  ~~~~ +... +885 any +  ~~~~~ +886 > +  ~ + +src/db-client/query2/index.ts:907:5 - error TS2707: Generic type 'Query' requires between 0 and 9 type arguments. + +907 : Query< +   ~~~~~~ +908 S, +  ~~~~~~~~ +... +917 GroupedKey +  ~~~~~~~~~~~~~~~~ +918 > +  ~~~~~ + +src/db-client/query2/types.ts:347:15 - error TS2314: Generic type 'InferPropsPathType' requires 3 type argument(s). + +347 ? InferPropsPathType +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +src/db-client/query2/types.ts:347:56 - error TS2304: Cannot find name 'L'. + +347 ? InferPropsPathType +   ~ + +src/db-client/query2/types.ts:376:5 - error TS2314: Generic type 'InferPropsPathType' requires 3 type argument(s). + +376 > = InferPropsPathType & EdgeProps, P, L> +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/aggregate/basic.ts:55:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +55 await db.query2('vote').sum('NL').get(), +   ~~~ + +test/aggregate/basic.ts:61:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +61 await db.query2('vote').sum('NL', 'AU').get(), +   ~~~ + +test/aggregate/basic.ts:72:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +72 await db.query2('vote').sum('flap.hello').get(), +   ~~~ + +test/aggregate/basic.ts:126:29 - error TS2339: Property 'count' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +126 await db.query2('vote').count().get(), +   ~~~~~ + +test/aggregate/basic.ts:142:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +142 await db.query2('vote').include('IT').count().get(), +   ~~~~~~~ + +test/aggregate/basic.ts:158:29 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +158 await db.query2('vote').filter('NL', '=', 20).count().get(), +   ~~~~~~ + +test/aggregate/basic.ts:164:29 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +164 await db.query2('vote').filter('NL', '>', 255).count().get(), +   ~~~~~~ + +test/aggregate/basic.ts:225:29 - error TS2339: Property 'stddev' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +225 await db.query2('vote').stddev('NL', { mode: 'sample' }).get(), +   ~~~~~~ + +test/aggregate/basic.ts:233:29 - error TS2339: Property 'stddev' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +233 await db.query2('vote').stddev('NL', { mode: 'sample' }).get(), +   ~~~~~~ + +test/aggregate/basic.ts:241:29 - error TS2339: Property 'stddev' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +241 await db.query2('vote').stddev('NL', { mode: 'population' }).get(), +   ~~~~~~ + +test/aggregate/basic.ts:249:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +249 await db.query2('vote').sum('NL').get(), +   ~~~ + +test/aggregate/basic.ts:259:8 - error TS2339: Property 'stddev' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +259 .stddev('NL', { mode: 'population' }) +   ~~~~~~ + +test/aggregate/basic.ts:392:29 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +392 await db.query2('vote').groupBy('region').get(), +   ~~~~~~~ + +test/aggregate/basic.ts:402:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +402 await db.query2('vote').sum('NL', 'FI').groupBy('region').get(), +   ~~~ + +test/aggregate/basic.ts:421:29 - error TS2339: Property 'count' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +421 await db.query2('vote').count().groupBy('region').get(), +   ~~~~~ + +test/aggregate/basic.ts:437:29 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +437 await db.query2('vote').avg('NL', 'PT', 'FI').groupBy('region').get(), +   ~~~ + +test/aggregate/basic.ts:459:29 - error TS2339: Property 'hmean' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +459 await db.query2('vote').hmean('NL', 'PT', 'FI').groupBy('region').get(), +   ~~~~~ + +test/aggregate/basic.ts:483:8 - error TS2339: Property 'stddev' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +483 .stddev('NL', 'PL', { mode: 'population' }) +   ~~~~~~ + +test/aggregate/basic.ts:504:29 - error TS2339: Property 'stddev' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +504 await db.query2('vote').stddev('NL', 'PL').groupBy('region').get(), +   ~~~~~~ + +test/aggregate/basic.ts:525:8 - error TS2339: Property 'var' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +525 .var('NL', 'PL', { mode: 'population' }) +   ~~~ + +test/aggregate/basic.ts:548:8 - error TS2339: Property 'var' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +548 .var('NL', 'PL', { mode: 'sample' }) +   ~~~ + +test/aggregate/basic.ts:560:29 - error TS2339: Property 'var' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +560 await db.query2('vote').var('NL', 'PL').groupBy('region').get(), +   ~~~ + +test/aggregate/basic.ts:570:29 - error TS2339: Property 'max' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +570 await db.query2('vote').max('NL', 'NO', 'PT', 'FI').groupBy('region').get(), +   ~~~ + +test/aggregate/basic.ts:595:29 - error TS2339: Property 'min' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +595 await db.query2('vote').min('NL', 'NO', 'PT', 'FI').groupBy('region').get(), +   ~~~ + +test/aggregate/basic.ts:915:12 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly product: { props: { readonly name: { readonly type: "string"; readonly maxBytes: 10; }; readonly flap: { ...; }; readonly product: { ...; }; }; }; readonly shel...'. + +915 .include('*') +   ~~~~~~~ + +test/aggregate/basic.ts:1002:10 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly job: { props: { readonly day: { type: "timestamp"; }; readonly tip: { ...; }; readonly employee: { ...; } & { ...; }; }; }; readonly employee: { ...; }; readonl...'. + +1002 .groupBy('day', { step: 'hour', timeZone: 'America/Sao_Paulo' }) +   ~~~~~~~ + +test/aggregate/deep.ts:57:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +57 .include((select) => select('votes').sum('NL', 'AU')) +   ~~~~~~~ + +test/aggregate/deep.ts:66:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +66 .include((select) => select('votes').groupBy('country').sum('NL', 'AU')) +   ~~~~~~~ + +test/aggregate/deep.ts:142:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +142 .include((select) => select('votes').count()) +   ~~~~~~~ + +test/aggregate/deep.ts:151:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +151 .include((select) => select('votes').groupBy('country').sum('NL', 'AU')) +   ~~~~~~~ + +test/aggregate/deep.ts:271:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly team: { props: { readonly teamName: { readonly type: "string"; }; readonly city: { ...; }; readonly players: Omit<...> & { ...; }; }; }; readonly player: { ...;...'. + +271 .include('teamName', 'city', (select) => +   ~~~~~~~ + +test/aggregate/deep.ts:357:29 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly beer: { props: { readonly name: { type: "string"; }; ... 4 more ...; readonly year: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +357 await db.query2('beer').avg('price').groupBy('type').get(), +   ~~~ + +test/aggregate/deep.ts:370:29 - error TS2339: Property 'hmean' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly beer: { props: { readonly name: { type: "string"; }; ... 4 more ...; readonly year: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +370 await db.query2('beer').hmean('price').groupBy('type').get(), +   ~~~~~ + +test/aggregate/deep.ts:422:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly movie: { props: { readonly name: { type: "string"; }; readonly genre: { ...; }; readonly actors: Omit<...> & { ...; }; readonly movie: { ...; }; }; }; readonly ...'. + +422 .include((q) => q('movies').groupBy('genre').count()) +   ~~~~~~~ + +test/aggregate/deep.ts:508:30 - error TS2339: Property 'cardinality' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly lunch: { props: { readonly week: { type: "string"; }; ... 6 more ...; readonly Fri: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +508 await db.query2('lunch').cardinality('Mon').get(), +   ~~~~~~~~~~~ + +test/aggregate/deep.ts:514:30 - error TS2339: Property 'cardinality' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly lunch: { props: { readonly week: { type: "string"; }; ... 6 more ...; readonly Fri: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +514 await db.query2('lunch').cardinality('Mon').groupBy('week').get(), +   ~~~~~~~~~~~ + +test/aggregate/deep.ts:571:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly booth: { props: { readonly company: { type: "string"; }; readonly badgesScanned: { ...; }; readonly booth: { ...; }; }; }; readonly fair: { ...; }; }; locales: ...'. + +571 .include((s) => s('booths').cardinality('badgesScanned')) +   ~~~~~~~ + +test/aggregate/deep.ts:655:31 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vehicle: { ...; } & { ...; }; readon...'. + +655 await db.query2('driver').sum('rank').groupBy('vehicle').get(), +   ~~~ + +test/aggregate/deep.ts:667:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vehicle: { ...; } & { ...; }; readon...'. + +667 .include((q) => q('trips').groupBy('vehicle').max('distance')) +   ~~~~~~~ + +test/aggregate/deep.ts:723:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly strong: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5...'. + +723 await db.query2('user').sum('friends.strong').get(), +   ~~~ + +test/aggregate/experimental.ts:90:30 - error TS2339: Property 'cardinality' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly lunch: { props: { readonly week: { type: "string"; }; ... 6 more ...; readonly Fri: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +90 await db.query2('lunch').cardinality('Mon').get(), +   ~~~~~~~~~~~ + +test/aggregate/experimental.ts:98:30 - error TS2339: Property 'cardinality' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly lunch: { props: { readonly week: { type: "string"; }; ... 6 more ...; readonly Fri: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +98 await db.query2('lunch').cardinality('Mon').groupBy('week').get(), +   ~~~~~~~~~~~ + +test/aggregate/groupBY.ts:52:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +52 await db.query2('vote').sum('NL', 'AU').groupBy('country').get(), +   ~~~ + +test/aggregate/groupBY.ts:61:29 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +61 await db.query2('vote').groupBy('country').get(), +   ~~~~~~~ + +test/aggregate/groupBY.ts:126:29 - error TS2339: Property 'count' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +126 await db.query2('vote').count().groupBy('country').get(), +   ~~~~~ + +test/aggregate/groupBY.ts:241:29 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly flap: { readonly type: "uint32"; }; readonly country: { ...; }; readonly name: { ...; }; readonly articles: Omit<...> & { ...; }; }; }...'. + +241 await db.query2('user').groupBy('name').sum('flap').get(), +   ~~~~~~~ + +test/aggregate/groupBY.ts:253:29 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly flap: { readonly type: "uint32"; }; readonly country: { ...; }; readonly name: { ...; }; readonly articles: Omit<...> & { ...; }; }; }...'. + +253 await db.query2('user').groupBy('country').sum('flap').get(), +   ~~~~~~~ + +test/aggregate/groupBY.ts:321:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; ... 8 more ...; readonly vendorIdnumber: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +321 await db.query2('trip').sum('distance').groupBy('vendorIduint8').get(), +   ~~~ + +test/aggregate/groupBY.ts:330:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; ... 8 more ...; readonly vendorIdnumber: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +330 await db.query2('trip').sum('distance').groupBy('vendorIdint8').get(), +   ~~~ + +test/aggregate/groupBY.ts:339:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; ... 8 more ...; readonly vendorIdnumber: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +339 await db.query2('trip').sum('distance').groupBy('vendorIduint16').get(), +   ~~~ + +test/aggregate/groupBY.ts:348:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; ... 8 more ...; readonly vendorIdnumber: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +348 await db.query2('trip').sum('distance').groupBy('vendorIdint16').get(), +   ~~~ + +test/aggregate/groupBY.ts:357:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; ... 8 more ...; readonly vendorIdnumber: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +357 await db.query2('trip').sum('distance').groupBy('vendorIduint32').get(), +   ~~~ + +test/aggregate/groupBY.ts:366:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; ... 8 more ...; readonly vendorIdnumber: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +366 await db.query2('trip').sum('distance').groupBy('vendorIdint32').get(), +   ~~~ + +test/aggregate/groupBY.ts:375:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; ... 8 more ...; readonly vendorIdnumber: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +375 await db.query2('trip').sum('distance').groupBy('vendorIdnumber').get(), +   ~~~ + +test/aggregate/groupBY.ts:386:20 - error TS2448: Block-scoped variable 'db' used before its declaration. + +386 const db = await db.setSchema({ +   ~~ + + test/aggregate/groupBY.ts:386:9 + 386 const db = await db.setSchema({ +    ~~ + 'db' is declared here. + +test/aggregate/multiple.ts:77:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +77 await db.query2('vote').sum('NL').sum('NO').max('NL').min('NL').get(), +   ~~~ + +test/aggregate/multiple.ts:85:8 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +85 .sum('NL') +   ~~~ + +test/aggregate/multiple.ts:106:6 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +106 .sum('NL') +   ~~~ + +test/aggregate/multiple.ts:139:6 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +139 .sum('NL') +   ~~~ + +test/aggregate/multiple.ts:206:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +206 await db.query2('vote').sum('NL').count().sum('PT').stddev('NO').get(), +   ~~~ + +test/aggregate/multiple.ts:224:8 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly votes: Omit<...> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +224 .sum('NL') +   ~~~ + +test/aggregate/overall.perf.ts:41:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly beer: { props: { readonly name: { type: "string"; }; ... 4 more ...; readonly year: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +41 await db.query2('beer').sum('price').get() +   ~~~ + +test/aggregate/overall.perf.ts:45:29 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly beer: { props: { readonly name: { type: "string"; }; ... 4 more ...; readonly year: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +45 await db.query2('beer').groupBy('year').get() +   ~~~~~~~ + +test/aggregate/overall.perf.ts:49:29 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly beer: { props: { readonly name: { type: "string"; }; ... 4 more ...; readonly year: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +49 await db.query2('beer').groupBy('type').get() +   ~~~~~~~ + +test/aggregate/overall.perf.ts:53:29 - error TS2339: Property 'max' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly beer: { props: { readonly name: { type: "string"; }; ... 4 more ...; readonly year: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +53 await db.query2('beer').max('price').groupBy('type').get() +   ~~~ + +test/aggregate/overall.perf.ts:72:41 - error TS2339: Property 'count' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly sequence: { props: { readonly bla: { type: "uint32"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +72 const q = await db.query2('sequence').count().get() +   ~~~~~ + +test/aggregate/overall.perf.ts:316:8 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly audience: { partial?: boolean | undefined; ... 4 more ...; props: { ...; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +316 .avg(...countries) +   ~~~ + +test/aggregate/overall.perf.ts:337:8 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly audience: { partial?: boolean | undefined; ... 4 more ...; props: { ...; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +337 .avg(...countries) +   ~~~ + +test/aggregate/overall.perf.ts:349:8 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly audience: { partial?: boolean | undefined; ... 4 more ...; props: { ...; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +349 .avg(...countries) +   ~~~ + +test/aggregate/overall.perf.ts:361:8 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly audience: { partial?: boolean | undefined; ... 4 more ...; props: { ...; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +361 .avg(...countries) +   ~~~ + +test/aggregate/overall.perf.ts:373:8 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly audience: { partial?: boolean | undefined; ... 4 more ...; props: { ...; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +373 .avg(...countries) +   ~~~ + +test/aggregate/temporal.ts:32:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +32 await db.query2('trip').sum('distance').groupBy('pickup', 'day').get(), +   ~~~ + +test/aggregate/temporal.ts:43:8 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +43 .sum('distance') +   ~~~ + +test/aggregate/temporal.ts:54:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +54 await db.query2('trip').sum('distance').groupBy('pickup', 'hour').get(), +   ~~~ + +test/aggregate/temporal.ts:63:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +63 await db.query2('trip').sum('distance').groupBy('pickup', 'dow').get(), +   ~~~ + +test/aggregate/temporal.ts:72:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +72 await db.query2('trip').sum('distance').groupBy('pickup', 'isoDOW').get(), +   ~~~ + +test/aggregate/temporal.ts:81:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +81 await db.query2('trip').sum('distance').groupBy('pickup', 'doy').get(), +   ~~~ + +test/aggregate/temporal.ts:90:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +90 await db.query2('trip').sum('distance').groupBy('pickup', 'month').get(), +   ~~~ + +test/aggregate/temporal.ts:99:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +99 await db.query2('trip').sum('distance').groupBy('pickup', 'year').get(), +   ~~~ + +test/aggregate/temporal.ts:144:6 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +144 .sum('distance') +   ~~~ + +test/aggregate/temporal.ts:179:6 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +179 .sum('distance') +   ~~~ + +test/aggregate/temporal.ts:260:42 - error TS2339: Property 'cardinality' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly lunch: { props: { readonly day: { type: "timestamp"; }; readonly eaters: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +260 const total = await db.query2('lunch').cardinality('eaters').get() +   ~~~~~~~~~~~ + +test/aggregate/temporal.ts:267:6 - error TS2339: Property 'cardinality' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly lunch: { props: { readonly day: { type: "timestamp"; }; readonly eaters: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +267 .cardinality('eaters') +   ~~~~~~~~~~~ + +test/aggregate/temporal.ts:295:6 - error TS2339: Property 'cardinality' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly lunch: { props: { readonly day: { type: "timestamp"; }; readonly eaters: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +295 .cardinality('eaters') +   ~~~~~~~~~~~ + +test/aggregate/temporal.ts:345:29 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +345 await db.query2('trip').sum('distance').groupBy('pickup').get(), +   ~~~ + +test/aggregate/temporal.ts:360:8 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +360 .sum('distance') +   ~~~ + +test/aggregate/temporal.ts:374:8 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +374 .sum('distance') +   ~~~ + +test/aggregate/temporal.ts:420:8 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +420 .sum('distance') +   ~~~ + +test/aggregate/temporal.ts:437:8 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +437 .sum('distance') +   ~~~ + +test/aggregate/temporal.ts:453:8 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly trip: { props: { readonly pickup: { type: "timestamp"; }; readonly dropoff: { ...; }; readonly distance: { ...; }; readonly vendorId: { ...; }; }; }; }; locales...'. + +453 .sum('distance') +   ~~~ + +test/aggregate/validation.ts:29:29 - error TS2339: Property 'max' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly vote: { props: { readonly region: { readonly type: "string"; }; readonly AU: { ...; }; readonly FI: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +29 await db.query2('vote').max('AU', 'FI').groupBy('region').get(), +   ~~~ + +test/aggregate/validation.ts:39:29 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly vote: { props: { readonly region: { readonly type: "string"; }; readonly AU: { ...; }; readonly FI: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +39 await db.query2('vote').avg('AU', 'FI').groupBy('region').get(), +   ~~~ + +test/aggregate/validation.ts:50:29 - error TS2339: Property 'hmean' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly vote: { props: { readonly region: { readonly type: "string"; }; readonly AU: { ...; }; readonly FI: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +50 await db.query2('vote').hmean('AU', 'FI').groupBy('region').get(), +   ~~~~~ + +test/aggregate/validation.ts:121:30 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly movie: { props: { readonly name: { type: "string"; }; readonly year: { ...; }; readonly genre: { ...; }; readonly actors: Omit<...> & { ...; }; readonly movies:...'. + +121 await db.query2('movie').groupBy('year').count().get(), +   ~~~~~~~ + +test/aggregate/validation.ts:134:30 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly movie: { props: { readonly name: { type: "string"; }; readonly year: { ...; }; readonly genre: { ...; }; readonly actors: Omit<...> & { ...; }; readonly movies:...'. + +134 await db.query2('movie').groupBy('genre').min('year').get(), +   ~~~~~~~ + +test/alias/alias.ts:43:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly externalId: { ...; }; readonly potato: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +43 deepEqual(await db.query2('user').filter('externalId', '=', 'cool').get(), [ +   ~~~~~~ + +test/alias/alias.ts:52:29 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly externalId: { ...; }; readonly potato: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +52 await db.query2('user').filter('externalId', 'includes', 'cool').get(), +   ~~~~~~ + +test/alias/alias.ts:156:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly email: { ...; }; readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; };...'. + +156 await db.query2('user').include('email', 'friends').get(), +   ~~~~~~~ + +test/alias/alias.ts:191:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly email: { ...; }; readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; };...'. + +191 await db.query2('user').include('friends', 'email').get(), +   ~~~~~~~ + +test/alias/alias.ts:210:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly email: { ...; }; readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; };...'. + +210 .filter('email', 'includes', '2', { lowerCase: true }) +   ~~~~~~ + +test/alias/alias.ts:545:48 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "alias"; }; readonly role: { ...; } & { ...; }; }; }; readonly role: { ...; }; }; locales: Partial<...>; }, ... 5 more ....'. + +545 deepEqual(await db.query2('role', adminRole).include('name', 'users').get(), { +   ~~~~~~~ + +test/alias/alias.ts:605:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "alias"; }; readonly role: { ...; } & { ...; }; readonly projects: { ...; }; }; }; readonly project: { ...; }; readonly ...'. + +605 .include('id') +   ~~~~~~~ + +test/alias/alias.ts:615:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "alias"; }; readonly role: { ...; } & { ...; }; readonly projects: { ...; }; }; }; readonly project: { ...; }; readonly ...'. + +615 .include('name', 'users', 'users.$role') +   ~~~~~~~ + +test/alias/filter.ts:33:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly plot: { props: { readonly uuid: { type: "alias"; }; readonly slug: { ...; }; readonly name: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; },...'. + +33 .filter('age', '>', 10) +   ~~~~~~ + +test/alignModify.ts:45:24 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +45 const res = await db.query2('user').include('friends', 'str').get() +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/based-client/addSpecs.ts:57:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +57 client.query2('cookie').subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/authorize.ts:139:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +139 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/authorize.ts:156:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +156 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/authorize.ts:197:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +197 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/authorize.ts:242:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +242 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/authorizeOnSpec.ts:94:16 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +94 await client.query2('slax').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/browser/index.ts:78:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +78 client.query2('counter').subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/browser/index.ts:89:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +89 client.query2('text').subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/dbQuery.ts:32:23 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +32 return db.query2('user').subscribe(update) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/based-client/dbQuery.ts:38:23 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +38 return db.query2('user').get() +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/based-client/dbQuery.ts:73:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +73 client.query2('users').subscribe((res) => nextResolve?.(res)) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/dbQuery.ts:74:13 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +74 clientOld.query2('users').subscribe((res) => nextResolveOld?.(res)) +   ~~~~~~ + + node_modules/@based/client-old/dist/src/index.d.ts:141:5 + 141 query(name: string, payload?: any, opts?: QueryOptions): BasedQuery; +    ~~~~~ + 'query' is declared here. + +test/based-client/error.ts:140:16 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +140 coreClient.query2('counter', {}).subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/error.ts:177:18 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +177 coreClient.query2('errorTimer', {}).subscribe(() => {}, reject), +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/functions.ts:3:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +3 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/functionsPerf.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/get.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/get.ts:3:44 - error TS2307: Cannot find module '@based/errors' or its corresponding type declarations. + +3 import { BasedError, BasedErrorCode } from '@based/errors' +   ~~~~~~~~~~~~~~~ + +test/based-client/get.ts:36:37 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +36 const bla = await based.query2('any', payload).get() +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:83:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +83 return based.query2('checkPayload', payload).get() +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:108:16 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +108 coreClient.query2('any', 'xxx').subscribe((res) => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:113:33 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +113 const res1 = await coreClient.query2('any', 'xxx').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:115:33 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +115 const res2 = await coreClient.query2('any', 'xxx').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:133:32 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +133 const str = await coreClient.query2('any', 'xxx').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:137:32 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +137 const num = await coreClient.query2('any', 19).get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:141:37 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +141 const boolTrue = await coreClient.query2('any', true).get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:145:38 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +145 const boolFalse = await coreClient.query2('any', false).get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:151:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +151 .query2('checkPayload', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:172:25 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +172 t.is(await coreClient.query2('counter').get(), 0) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:176:25 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +176 t.is(await coreClient.query2('counter').get(), 0) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:181:25 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +181 t.is(await coreClient.query2('counter').get(), 0) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:188:25 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +188 t.is(await coreClient.query2('counter-cached').get(), 0) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:189:25 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +189 t.is(await coreClient.query2('counter-cached').get(), 0) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:222:16 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +222 coreClient.query2('counter').get(), +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:229:37 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +229 await t.notThrowsAsync(coreClient.query2('counter').get()) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/get.ts:270:26 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +270 const g = await client.query2('flap').getWhen((d) => d.status) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/hooks.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/hooks.ts:112:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +112 return based.query2('myobs').get() +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/hooks.ts:119:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +119 return based.query2('myobs').subscribe(update) +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/hooks.ts:138:24 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +138 const close = client.query2('myobs', { bla: true }).subscribe(() => {}) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/hooks.ts:149:16 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +149 await client.query2('myobs').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/hooks.ts:157:25 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +157 const close2 = client.query2('myobs2', { bla: true }).subscribe(() => {}) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/http.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/httpGet.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/installFunctions.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/lazyConnect.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/lazyConnect.ts:73:24 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +73 const close = client.query2('cookie').subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/memLeaks.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/messages.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/messages.ts:62:24 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +62 const close = client.query2('counter').subscribe(() => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedChannelSimple.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/nestedFunctions.ts:3:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +3 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/nestedFunctions.ts:33:29 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +33 const closeX = coreClient.query2('counter').subscribe(() => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctions.ts:45:28 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +45 const close = coreClient.query2('obsWithNested').subscribe(() => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctions.ts:50:29 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +50 const close2 = coreClient.query2('obsWithNested', 'json').subscribe(() => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctions.ts:56:32 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +56 const bla = await coreClient.query2('obsWithNested', 'json').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctions.ts:67:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +67 .query2('obsWithNestedLvl2', 'glurk') +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctions.ts:72:33 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +72 const bla2 = await coreClient.query2('obsWithNestedLvl2', 'glakkel').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctions.ts:109:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +109 return based.query2('obsWithNested', 'json').subscribe(update) +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctions.ts:118:16 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +118 .query2(payload === 'json' ? 'objectCounter' : 'counter', payload) +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctions.ts:167:25 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +167 await based.query2('obsWithNested', 'json', context).get() +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctions.ts:202:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +202 return based.query2('obsWithNested', 'json').subscribe(update) +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctions.ts:211:16 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +211 .query2(payload === 'json' ? 'objectCounter' : 'counter', payload) +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctions.ts:260:25 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +260 await based.query2('obsWithNested', 'json').get() +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctionsError.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/nestedFunctionsError.ts:93:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +93 return based.query2('blabla').subscribe(update) +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedFunctionsError.ts:108:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +108 client.query2('hello').subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedQuerySimple.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/nestedQuerySimple.ts:47:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +47 return based.query2('nested').subscribe((r) => { +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedQuerySimple.ts:58:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +58 client.query2('bla').subscribe(() => {}) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/nestedQuerySimple.ts:60:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +60 client.query2('bla', { x: 1 }).subscribe(() => {}) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/null.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/null.ts:46:22 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +46 return b.query2('null').subscribe(update) +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/null.ts:60:28 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +60 const val = await client.query2('null').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/null.ts:66:29 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +66 const val2 = await client.query2('nestedNull').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/null.ts:71:24 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +71 const close = client.query2('null').subscribe((v) => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/payloadPerf.ts:85:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +85 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/payloadPerf.ts:110:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +110 .query2('counterUint8', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/persist.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/persist.ts:73:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +73 .query2( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/persist.ts:85:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +85 .query2( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/persist.ts:111:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +111 .query2( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/persist.ts:128:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +128 .query2( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:3:34 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +3 import { wait, readStream } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/protocolContentType.ts:245:12 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +245 client.query2('errorQuery').subscribe((d, err) => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:248:15 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +248 clientOld.query2('errorQuery').subscribe((d, err) => { +   ~~~~~~ + + node_modules/@based/client-old/dist/src/index.d.ts:141:5 + 141 query(name: string, payload?: any, opts?: QueryOptions): BasedQuery; +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:251:12 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +251 client.query2('nullQuery').subscribe((d) => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:254:15 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +254 clientOld.query2('nullQuery').subscribe((d) => { +   ~~~~~~ + + node_modules/@based/client-old/dist/src/index.d.ts:141:5 + 141 query(name: string, payload?: any, opts?: QueryOptions): BasedQuery; +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:257:12 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +257 client.query2('undefinedQuery').subscribe((d) => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:260:15 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +260 clientOld.query2('undefinedQuery').subscribe((d) => { +   ~~~~~~ + + node_modules/@based/client-old/dist/src/index.d.ts:141:5 + 141 query(name: string, payload?: any, opts?: QueryOptions): BasedQuery; +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:263:12 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +263 client.query2('numberQuery').subscribe((d) => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:266:15 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +266 clientOld.query2('numberQuery').subscribe((d) => { +   ~~~~~~ + + node_modules/@based/client-old/dist/src/index.d.ts:141:5 + 141 query(name: string, payload?: any, opts?: QueryOptions): BasedQuery; +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:270:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +270 .query2('stringQuery', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:277:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +277 .query2('stringQuery', { +   ~~~~~~ + + node_modules/@based/client-old/dist/src/index.d.ts:141:5 + 141 query(name: string, payload?: any, opts?: QueryOptions): BasedQuery; +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:284:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +284 .query2('bigStringQuery', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:291:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +291 .query2('bigStringQuery', { +   ~~~~~~ + + node_modules/@based/client-old/dist/src/index.d.ts:141:5 + 141 query(name: string, payload?: any, opts?: QueryOptions): BasedQuery; +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:298:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +298 .query2('flap', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:305:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +305 .query2('flap', { +   ~~~~~~ + + node_modules/@based/client-old/dist/src/index.d.ts:141:5 + 141 query(name: string, payload?: any, opts?: QueryOptions): BasedQuery; +    ~~~~~ + 'query' is declared here. + +test/based-client/protocolContentType.ts:312:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +312 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/query.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/query.ts:51:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +51 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/query.ts:59:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +59 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCache.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/queryCache.ts:54:8 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +54 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCache.ts:68:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +68 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/queryCtxBound.ts:70:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +70 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:108:18 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +108 close = client.query2('counter', 'error').subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:121:28 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +121 t.deepEqual(await client.query2('counter').get(), { userId: 1, cnt: 0 }) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:122:28 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +122 t.deepEqual(await client.query2('counter', 'bla').get(), { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:127:30 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +127 t.throwsAsync(() => client.query2('counter', 'error').get()) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:187:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +187 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:280:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +280 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:318:18 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +318 close = client.query2('counter', 'error').subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:331:28 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +331 t.deepEqual(await client.query2('counter').get(), { userId: 1, cnt: 0 }) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:332:28 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +332 t.deepEqual(await client.query2('counter', 'bla').get(), { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:337:30 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +337 t.throwsAsync(() => client.query2('counter', 'error').get()) +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:417:24 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +417 const close = client.query2('counter').subscribe((d) => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:421:26 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +421 const close2 = client2.query2('counter').subscribe((d) => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:477:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +477 return based.query2('nest', payload, ctx).subscribe(update) +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:492:24 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +492 const close = client.query2('counter').subscribe((d) => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:545:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +545 return based.query2('nest', payload, ctx).get() +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/queryCtxBound.ts:613:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +613 return based.query2('nest', payload, ctx).get() +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/queryDiff.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/queryDiff.ts:62:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +62 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryErrorHandling.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/queryErrorHandling.ts:46:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +46 return based.query2('nested').subscribe( +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/queryErrorHandling.ts:58:26 - error TS2551: Property 'query2' does not exist on type 'BasedFunctionClient'. Did you mean 'query'? + +58 return based.query2('nested').subscribe(async (r) => { +   ~~~~~~ + + src/functions/client.ts:17:12 + 17 abstract query( +    ~~~~~ + 'query' is declared here. + +test/based-client/queryErrorHandling.ts:72:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +72 client.query2('bla').subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryErrorHandling.ts:78:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +78 client.query2('bla', { x: 1 }).subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryErrorHandling.ts:84:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +84 client.query2('bla', new Uint8Array(1000)).subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryErrorHandling.ts:89:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +89 client.query2('asyncBla', new Uint8Array(1000)).subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryErrorHandling.ts:94:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +94 client.query2('asyncBla', new Uint8Array(1000)).subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryErrorHandling.ts:145:24 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +145 const close = client.query2('bla', { x: 1 }).subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryErrorHandling.ts:221:24 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +221 const close = client.query2('bla', { x: 1 }).subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryInstancePerf.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/queryInstancePerf.ts:76:10 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +76 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryReusedDiff.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/queryReusedDiff.ts:62:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +62 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryReusedDiff.ts:73:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +73 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryUint8Payload.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/queryUint8Payload.ts:66:24 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +66 const close = client.query2('counter', flap).subscribe((d) => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryUint8Payload.ts:70:25 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +70 const close2 = client.query2('counter', flap2).subscribe((d) => { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/queryUint8Payload.ts:84:26 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +84 const x = await client.query2('bla', flap2).get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/rateLimit.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/reEvaluateAuthState.ts.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/reEvaluateAuthState.ts.ts:83:14 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +83 client.query2('counter').subscribe( +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/relay.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/relay.ts:125:26 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +125 const x = await client.query2('counter').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/relay.ts:142:30 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +142 const count = await client.query2('flap').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/reload.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/ssr.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/ssr.ts:51:16 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +51 await client.query2('counter').get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/ssr.ts:52:16 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +52 await client.query2('counter', { bla: true }).get() +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/ssr.ts:71:12 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +71 client.query2('counter'), +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/stream.ts:2:34 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait, readStream } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/streamChunks.ts:4:28 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +4 import { readStream } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/streamHttp.ts:2:34 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait, readStream } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/streamNested.ts:2:34 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { readStream, wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/throttle.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/throttle.ts:54:6 - error TS2551: Property 'query2' does not exist on type 'BasedClient'. Did you mean 'query'? + +54 .query2('counter', { +   ~~~~~~ + + src/client/index.ts:389:3 + 389 query(name: string, payload?: any, opts?: QueryOptions): BasedClientQuery { +    ~~~~~ + 'query' is declared here. + +test/based-client/verifyAuthState.ts:2:22 - error TS2307: Cannot find module '@based/utils' or its corresponding type declarations. + +2 import { wait } from '@based/utils' +   ~~~~~~~~~~~~~~ + +test/based-client/verifyAuthState.ts:80:8 - error TS18048: 'err' is possibly 'undefined'. + +80 t.is(err.message, 'Token is too small') +   ~~~ + +test/bench.perf.ts:54:14 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +54 db.query2('test').filter('x', '=', 0).range(1, 10_001).get(), +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/bench.perf.ts:67:12 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +67 db.query2('test', i + 1) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/bench.perf.ts:113:34 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +113 client1.query2('test').filter('x', '=', 0).range(1, 10_001).get(), +   ~~~~~~ + +test/bench.perf.ts:128:36 - error TS18047: 'cur' is possibly 'null'. + +128 ).reduce((prev, cur) => prev + cur.length, 0) +   ~~~ + +test/bench.perf.ts:128:40 - error TS2339: Property 'length' does not exist on type '{ id: any; }'. + +128 ).reduce((prev, cur) => prev + cur.length, 0) +   ~~~~~~ + +test/benchmarks/utils.ts:1:25 - error TS2307: Cannot find module '@based/db' or its corresponding type declarations. + +1 import { BasedDb } from '@based/db' +   ~~~~~~~~~~~ + +test/bigNode.perf.ts:37:5 - error TS2353: Object literal may only specify known properties, and 'f0' does not exist in type '{ ref?: number | BasedModify | { id: number | BasedModify; } | null | undefined; }'. + +37 f0: 10, +   ~~ + +test/bigNode.perf.ts:46:5 - error TS2353: Object literal may only specify known properties, and 'f0' does not exist in type '{ ref?: number | BasedModify | { id: number | BasedModify; } | null | undefined; }'. + +46 f0: 10, +   ~~ + +test/bigNode.perf.ts:53:21 - error TS2339: Property 'f4092' does not exist on type '{ id: number; }'. + +53 deepEqual(mega[1].f4092, 1337) +   ~~~~~ + +test/bigNode.perf.ts:54:21 - error TS2339: Property 'f100' does not exist on type '{ id: number; }'. + +54 deepEqual(giga[1].f100, 1337) +   ~~~~ + +test/bigNode.perf.ts:60:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly mega: { props: { readonly ref: { readonly type: "reference"; readonly ref: "giga"; readonly prop: "ref"; } & { ...; }; }; }; readonly giga: { ...; }; }; locales...'. + +60 const megaRefQ = await db.query2('mega').include('ref').get() +   ~~~~~~~ + +test/bigNode.perf.ts:63:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly mega: { props: { readonly ref: { readonly type: "reference"; readonly ref: "giga"; readonly prop: "ref"; } & { ...; }; }; }; readonly giga: { ...; }; }; locales...'. + +63 const gigaRef = await db.query2('giga').include('ref').get() +   ~~~~~~~ + +test/bigNode.perf.ts:70:50 - error TS2339: Property 'def' does not exist on type '{ id: number; }[]'. + +70 const serializedSchema = serialize(megaInclude.def.readSchema!) +   ~~~ + +test/bigNode.perf.ts:75:17 - error TS2339: Property 'result' does not exist on type '{ id: number; }[]'. + +75 megaInclude.result, +   ~~~~~~ + +test/bigNode.perf.ts:76:17 - error TS2339: Property 'result' does not exist on type '{ id: number; }[]'. + +76 megaInclude.result.byteLength - 4, +   ~~~~~~ + +test/bigNode.perf.ts:82:56 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly mega: { props: { readonly ref: { readonly type: "reference"; readonly ref: "giga"; readonly prop: "ref"; } & { ...; }; }; }; readonly giga: { ...; }; }; locales...'. + +82 const megaIncludeSelective = await db.query2('mega').include('f4092').get() +   ~~~~~~~ + +test/binary.ts:21:5 - error TS2322: Type 'Uint32Array' is not assignable to type 'Uint8Array'. + The types of 'filter(...).findLast' are incompatible between these types. + Type '{ (predicate: (value: number, index: number, array: Uint32Array) => value is S, thisArg?: any): S | undefined; (predicate: (value: number, index: number, array: Uint32Array<...>) => unknown, thisArg?: any): number | undefined; }' is not assignable to type '{ (predicate: (value: number, index: number, array: Uint8Array) => value is S, thisArg?: any): S | undefined; (predicate: (value: number, index: number, array: Uint8Array<...>) => unknown, thisArg?: any): number | undefined; }'. + Types of parameters 'predicate' and 'predicate' are incompatible. + Type '(value: number, index: number, array: Uint8Array) => unknown' is not assignable to type '(value: number, index: number, array: Uint32Array) => value is any'. + Signature '(value: number, index: number, array: Uint8Array): unknown' must be a type predicate. + +21 file: new Uint32Array([1, 2, 3, 4]), +   ~~~~ + +test/binary.ts:51:9 - error TS2531: Object is possibly 'null'. + +51 equal((await db.query2('user', id2).get()).file.length, italyBytes.byteLength) +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/binary.ts:69:20 - error TS2531: Object is possibly 'null'. + +69 const checksum = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/binary.ts:69:59 - error TS2339: Property 'checksum' does not exist on type '{ id: number; article: Uint8Array; }'. + +69 const checksum = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~ + +test/binary.ts:75:21 - error TS2531: Object is possibly 'null'. + +75 const checksum2 = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/binary.ts:75:60 - error TS2339: Property 'checksum' does not exist on type '{ id: number; article: Uint8Array; }'. + +75 const checksum2 = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~ + +test/boolean.ts:34:41 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +34 deepEqual(await client.query2('user').filter('isNice', '=', true).get(), [ +   ~~~~~~ + +test/boolean.ts:38:41 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +38 deepEqual(await client.query2('user').filter('isNice').get(), [ +   ~~~~~~ + +test/boolean.ts:42:41 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +42 deepEqual(await client.query2('user').filter('isNice', '=', false).get(), [ +   ~~~~~~ + +test/capped.ts:131:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly latestArticles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +131 deepEqual(await client.query2('user', user).include('**').get(), { +   ~~~~~~~ + +test/cardinality.ts:45:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly myUniqueValuesCount: { ...; }; readonly myUniqueValuesCountFromArray: { ...; }; readonly contri...'. + +45 .include('myUniqueValuesCount', 'myUniqueValuesCountFromArray') +   ~~~~~~~ + +test/cardinality.ts:60:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly myUniqueValuesCount: { ...; }; readonly myUniqueValuesCountFromArray: { ...; }; readonly contri...'. + +60 .include('myUniqueValuesCount') +   ~~~~~~~ + +test/cardinality.ts:96:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly myUniqueValuesCount: { ...; }; readonly myUniqueValuesCountFromArray: { ...; }; readonly contri...'. + +96 .include('myUniqueValuesCount', 'myUniqueValuesCountFromArray') +   ~~~~~~~ + +test/cardinality.ts:107:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly myUniqueValuesCount: { ...; }; readonly myUniqueValuesCountFromArray: { ...; }; readonly contri...'. + +107 .include('myUniqueValuesCountFromArray') +   ~~~~~~~ + +test/cardinality.ts:121:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly myUniqueValuesCount: { ...; }; readonly myUniqueValuesCountFromArray: { ...; }; readonly contri...'. + +121 .include('myUniqueValuesCount') +   ~~~~~~~ + +test/cardinality.ts:153:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly myUniqueValuesCount: { ...; }; readonly myUniqueValuesCountFromArray: { ...; }; readonly contri...'. + +153 .include('myUniqueValuesCount', 'myUniqueValuesCountFromArray') +   ~~~~~~~ + +test/cardinality.ts:183:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly myUniqueValuesCount: { ...; }; readonly myUniqueValuesCountFromArray: { ...; }; readonly contri...'. + +183 .filter('myUniqueValuesCount', '=', 11) +   ~~~~~~ + +test/cardinality.ts:214:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly myUniqueValuesCount: { ...; }; readonly myUniqueValuesCountFromArray: { ...; }; readonly contri...'. + +214 .filter('id', '>=', 3) +   ~~~~~~ + +test/cardinality.ts:259:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly myUniqueValuesCount: { ...; }; readonly myUniqueValuesCountFromArray: { ...; }; readonly contri...'. + +259 .filter('id', '>=', 3) +   ~~~~~~ + +test/cardinality.ts:279:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly myUniqueValuesCount: { ...; }; readonly myUniqueValuesCountFromArray: { ...; }; readonly contri...'. + +279 .filter('id', '>=', 3) +   ~~~~~~ + +test/cardinality.ts:299:9 - error TS2322: Type 'Uint8Array' is not assignable to type 'string | string[] | undefined'. + Type 'Uint8Array' is missing the following properties from type 'string[]': pop, push, concat, shift, and 6 more. + +299 $undeftokens: xxHash64(ENCODER.encode('lala')), +   ~~~~~~~~~~~~ + +test/cardinality.ts:307:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly myUniqueValuesCount: { ...; }; readonly myUniqueValuesCountFromArray: { ...; }; readonly contri...'. + +307 .filter('id', '>=', 3) +   ~~~~~~ + +test/cardinality.ts:348:30 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly store: { props: { readonly name: { type: "string"; }; readonly visitors: { ...; }; readonly visits: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +348 await db.query2('store').include('visitors').get(), +   ~~~~~~~ + +test/cardinality.ts:365:30 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly store: { props: { readonly name: { type: "string"; }; readonly visitors: { ...; }; readonly visits: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +365 await db.query2('store').include('visitors').get(), +   ~~~~~~~ + +test/cardinality.ts:404:42 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly store: { props: { readonly name: { type: "string"; }; readonly customers: Omit<...> & { ...; }; }; }; readonly customer: { ...; }; }; locales: Partial<...>; }, ...'. + +404 const pb = await db.query2('customer').include('productsBought').get() +   ~~~~~~~ + +test/cardinality.ts:417:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly store: { props: { readonly name: { type: "string"; }; readonly customers: Omit<...> & { ...; }; }; }; readonly customer: { ...; }; }; locales: Partial<...>; }, ...'. + +417 const pbr = await db.query2('store').include('*', '**').get() +   ~~~~~~~ + +test/clientServer.perf.ts:43:35 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +43 client.query2('user').sort('name').include('name', 'users').get(), +   ~~~~ + +test/clientServer.perf.ts:54:53 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +54 const allUsers1 = await clients[0].query2('user').range(0, 100_000).get() +   ~~~~~ + +test/clientServer.ts:27:14 - error TS2353: Object literal may only specify known properties, and 'name' does not exist in type '{ id: any; }'. + +27 { id: 1, name: 'youzi' }, +   ~~~~ + +test/clientServer.ts:28:14 - error TS2353: Object literal may only specify known properties, and 'name' does not exist in type '{ id: any; }'. + +28 { id: 2, name: 'jamez' }, +   ~~~~ + +test/clientServer.ts:40:14 - error TS2353: Object literal may only specify known properties, and 'age' does not exist in type '{ id: any; }'. + +40 { id: 1, age: 0 }, +   ~~~ + +test/clientServer.ts:41:14 - error TS2353: Object literal may only specify known properties, and 'age' does not exist in type '{ id: any; }'. + +41 { id: 2, age: 0 }, +   ~~~ + +test/clientServer.ts:80:39 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +80 await client1.query2('user', res).include('*', '**').get(), +   ~~~~~~~ + +test/colvec.ts:63:11 - error TS2304: Cannot find name 'db'. + +63 await db +   ~~ + +test/colvec.ts:91:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly col: { readonly blockCapacity: 10000; readonly insertOnly: true; props: { ...; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +91 deepEqual(await client.query2('col').include('str').get(), [ +   ~~~~~~~ + +test/colvec.ts:118:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly col: { readonly blockCapacity: 10000; readonly insertOnly: true; props: { ...; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +118 deepEqual(await client.query2('col').include('str').get(), [ +   ~~~~~~~ + +test/concurrency.perf.ts:46:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly friends: Omit<{ ...; }, "items"> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +46 .include((s) => s('friends').range(0, 10)) +   ~~~~~~~ + +test/concurrency.perf.ts:112:10 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +112 db.query2('t').search('contribution', 's').include('i').get(), +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/copy.ts:63:12 - error TS2339: Property 'copy' does not exist on type 'BasedDb'. + +63 await db.copy('edition', edition1, { +   ~~~~ + +test/copy.ts:73:19 - error TS2339: Property 'copy' does not exist on type 'BasedDb'. + +73 return db.copy('sequence', id, { +   ~~~~ + +test/copy.ts:80:18 - error TS2339: Property 'copy' does not exist on type 'BasedDb'. + +80 db.copy('page', id, { +   ~~~~ + +test/copy.ts:94:6 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +94 .query2('edition') +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/crc32c.ts:115:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly transaction: { props: { readonly myHash: { ...; }; }; }; readonly transactionN: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +115 equal(await db.query2('transaction').include('id', 'myHash').get(), [ +   ~~~~~~~ + +test/crc32c.ts:123:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly transaction: { props: { readonly myHash: { ...; }; }; }; readonly transactionN: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +123 await db.query2('transactionN').include('id', 'myNativeMadeHash').get(), +   ~~~~~~~ + +test/db-schema/schemaUpdates.ts:45:50 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +45 const ageSorted = await client2.query2('user').sort('age', 'asc').get() +   ~~~~ + +test/db-schema/schemaUpdates.ts:55:51 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +55 const ageSorted2 = await client1.query2('user').sort('age', 'asc').get() +   ~~~~ + +test/db-schema/schemaUpdates.ts:70:51 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +70 const ageSorted3 = await client1.query2('user').sort('age', 'asc').get() +   ~~~~ + +test/db-schema/schemaUpdates.ts:180:45 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +180 const all = (await client2.query2('user').range(0, 1000_000).get())! +   ~~~~~ + +test/default.ts:46:49 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly friends: Omit<{ ...; }, "items"> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +46 deepEqual(await client.query2('user', userId).include('friends.**').get(), { +   ~~~~~~~ + +test/default.ts:79:49 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly friends: Omit<{ ...; }, "items"> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +79 deepEqual(await client.query2('user', userId).include('friends.**').get(), { +   ~~~~~~~ + +test/default.ts:106:9 - error TS2552: Cannot find name 'clint'. Did you mean 'client'? + +106 await clint.update('user', userId, { +   ~~~~~ + + test/default.ts:13:9 + 13 const client = await testDb(t, { +    ~~~~~~ + 'client' is declared here. + +test/default.ts:112:49 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly friends: Omit<{ ...; }, "items"> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +112 deepEqual(await client.query2('user', userId).include('friends.**').get(), { +   ~~~~~~~ + +test/default.ts:174:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly avatar: { readonly type: "binary"; readonly default: Uint8Array<...>; }; readonly name: { ...; }; readonly flap: { ...; }; }; }; }; lo...'. + +174 await db.query2('user', userId).include('*', '**').get(), +   ~~~~~~~ + +test/default.ts:195:38 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly avatar: { readonly type: "binary"; readonly default: Uint8Array<...>; }; readonly name: { ...; }; readonly flap: { ...; }; }; }; }; lo...'. + +195 await db.query2('user', userId2).include('*', '**').get(), +   ~~~~~~~ + +test/default.ts:249:13 - error TS2322: Type 'string' is not assignable to type 'undefined'. + +249 default: 'default-slug', +   ~~~~~~~ + +test/default.ts:257:13 - error TS2322: Type '{ ref: string; prop: string; }' is not assignable to type 'Omit, keyof Base> | (Omit, keyof Base> & Omit, keyof Base>)'. + Type '{ ref: string; prop: string; }' is not assignable to type 'Omit, keyof Base> & Omit, keyof Base>'. + Property 'type' is missing in type '{ ref: string; prop: string; }' but required in type 'Omit, keyof Base>'. + +257 items: { +   ~~~~~ + + src/schema/schema/reference.ts:34:22 + 34 RequiredIfStrict<{ type: 'reference' }, strict> & { +    ~~~~ + 'type' is declared here. + +test/default.ts:284:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +284 await db.query2('user', userId).include('*', '**').get(), +   ~~~~~~~ + +test/default.ts:317:7 - error TS2353: Object literal may only specify known properties, and 'label' does not exist in type '{ id: any; }'. + +317 label: { en: 'Default Label' }, +   ~~~~~ + +test/delete.perf.ts:58:22 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +58 deepEqual(await db.query2('user').get(), []) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/delete.perf.ts:79:22 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +79 deepEqual(await db.query2('article').get(), []) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/delete.perf.ts:114:22 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +114 deepEqual(await db.query2('article').get(), []) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/delete.ts:39:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly nurp: { props: { readonly email: { readonly type: "string"; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +39 deepEqual(await db.query2('nurp').include('email').get(), [ +   ~~~~~~~ + +test/delete.ts:49:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly nurp: { props: { readonly email: { readonly type: "string"; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +49 deepEqual(await db.query2('user').include('email').get(), []) +   ~~~~~~~ + +test/delete.ts:59:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly nurp: { props: { readonly email: { readonly type: "string"; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +59 deepEqual(await db.query2('nurp').include('email').get(), [ +   ~~~~~~~ + +test/delete.ts:100:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly nurp: { props: { readonly email: { readonly type: "string"; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +100 deepEqual(await db.query2('nurp').include('email').get(), [ +   ~~~~~~~ + +test/delete.ts:150:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly nurp: { props: { readonly email: { readonly type: "string"; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +150 deepEqual(await db.query2('nurp').include('email').get(), [ +   ~~~~~~~ + +test/delete.ts:213:42 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly age: { ...; }; readonly email: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +213 deepEqual(await client2.query2('user').include('id').get(), [{ id: 2 }]) +   ~~~~~~~ + +test/delete.ts:214:41 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly age: { ...; }; readonly email: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +214 deepEqual(await client.query2('user').include('id').get(), [{ id: 2 }]) +   ~~~~~~~ + +test/dependent.ts:66:40 - error TS2345: Argument of type 'string' is not assignable to parameter of type '"page" | "sequence" | "edition" | "show" | "item"'. + +66 const len = (await client.query2(type).get()).length +   ~~~~ + + src/db-client/index.ts:112:3 + 112 query2( +    ~~~~~~ + The call would have succeeded against this implementation, but implementation signatures of overloads are not externally visible. + +test/dependent.ts:76:32 - error TS2345: Argument of type 'string' is not assignable to parameter of type '"page" | "sequence" | "edition" | "show" | "item"'. + +76 equal((await client.query2(type).get()).length, 0) +   ~~~~ + + src/db-client/index.ts:112:3 + 112 query2( +    ~~~~~~ + The call would have succeeded against this implementation, but implementation signatures of overloads are not externally visible. + +test/dependent.ts:108:45 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "parent", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "parent", id: number | (Partial; }, "parent">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "parent">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly child: { ...; }; }; locales: Partial<...>; }, "parent">> & { ...; }'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +108 deepEqual(await client.query2('parent', head).include('**').get(), { +   ~~~~ + + test/dependent.ts:108:45 + 108 deepEqual(await client.query2('parent', head).include('**').get(), { +    ~~~~ + Did you forget to use 'await'? + +test/dependent.ts:108:51 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly parent: { props: { readonly children: Omit<...> & { ...; }; }; }; readonly child: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +108 deepEqual(await client.query2('parent', head).include('**').get(), { +   ~~~~~~~ + +test/dependent.ts:117:45 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "parent", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "parent", id: number | (Partial; }, "parent">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "parent">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly child: { ...; }; }; locales: Partial<...>; }, "parent">> & { ...; }'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +117 deepEqual(await client.query2('parent', head).include('**').get(), { +   ~~~~ + + test/dependent.ts:117:45 + 117 deepEqual(await client.query2('parent', head).include('**').get(), { +    ~~~~ + Did you forget to use 'await'? + +test/dependent.ts:117:51 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly parent: { props: { readonly children: Omit<...> & { ...; }; }; }; readonly child: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +117 deepEqual(await client.query2('parent', head).include('**').get(), { +   ~~~~~~~ + +test/dependent.ts:158:42 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly human: { props: { readonly name: { readonly type: "string"; readonly maxBytes: 8; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ...'. + +158 deepEqual(await client.query2('human').include('**').get(), [ +   ~~~~~~~ + +test/dependent.ts:201:42 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly human: { props: { readonly name: { readonly type: "string"; readonly maxBytes: 8; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ...'. + +201 deepEqual(await client.query2('human').include('**').get(), [ +   ~~~~~~~ + +test/dependent.ts:223:42 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly human: { props: { readonly name: { readonly type: "string"; readonly maxBytes: 8; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ...'. + +223 deepEqual(await client.query2('human').include('**').get(), []) +   ~~~~~~~ + +test/edges/edgeFilterNested.ts:45:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly initiative: { props: { readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +45 .include('name', (q) => +   ~~~~~~~ + +test/edges/edgeFilterNested.ts:62:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly initiative: { props: { readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +62 .include('name', (q) => +   ~~~~~~~ + +test/edges/edgeNumbers.ts:41:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +41 deepEqual(await db.query2('user', user2).include('**').get(), { +   ~~~~~~~ + +test/edges/edges.ts:113:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +113 .include('contributors.$role', 'contributors.$bigString') +   ~~~~~~~ + +test/edges/edges.ts:127:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +127 deepEqual(await db.query2('article').include('contributors.$rating').get(), [ +   ~~~~~~~ + +test/edges/edges.ts:138:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +138 deepEqual(await db.query2('article').include('contributors.$lang').get(), [ +   ~~~~~~~ + +test/edges/edges.ts:149:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +149 deepEqual(await db.query2('article').include('contributors.$on').get(), [ +   ~~~~~~~ + +test/edges/edges.ts:161:32 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +161 await db.query2('article').include('contributors.$file').get(), +   ~~~~~~~ + +test/edges/edges.ts:190:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +190 .include((s) => +   ~~~~~~~ + +test/edges/edges.ts:214:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +214 .include((s) => +   ~~~~~~~ + +test/edges/edges.ts:246:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +246 .include('contributors.$rating') +   ~~~~~~~ + +test/edges/edges.ts:259:35 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +259 await db.query2('article', 3).include('contributors.$countries.id').get(), +   ~~~~~~~ + +test/edges/edges.ts:279:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +279 .include('contributors') +   ~~~~~~~ + +test/edges/edges.ts:330:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly derp: { ...; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; loca...'. + +330 deepEqual(await db.query2('article').include('author.$role', '*').get(), [ +   ~~~~~~~ + +test/edges/edges.ts:349:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly derp: { ...; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; loca...'. + +349 .include('author.$role', '*') +   ~~~~~~~ + +test/edges/edges.ts:369:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly derp: { ...; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; loca...'. + +369 deepEqual(await db.query2('article').include('author.$msg', '*').get(), [ +   ~~~~~~~ + +test/edges/edges.ts:426:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +426 deepEqual(await db.query2('user', user2).include('**').get(), { +   ~~~~~~~ + +test/edges/edges.ts:442:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +442 deepEqual(await db.query2('user', user1).include('**').get(), { +   ~~~~~~~ + +test/edges/edges.ts:447:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +447 deepEqual(await db.query2('user', user3).include('**').get(), { +   ~~~~~~~ + +test/edges/edges.ts:460:38 - error TS2322: Type '{ update: { id: number; $index: number; }[]; }' is not assignable to type '{ add?: (number | BasedModify | { id: number | BasedModify; $x?: NumInc | undefined; })[] | undefined; update?: (number | BasedModify | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined'. + Types of property 'update' are incompatible. + Type '{ id: number; $index: number; }[]' is not assignable to type '(number | BasedModify | { id: number | BasedModify; $x?: NumInc | undefined; })[]'. + Object literal may only specify known properties, and '$index' does not exist in type 'BasedModify | { id: number | BasedModify; $x?: NumInc | undefined; }'. + +460 friends: { update: [{ id: user2, $index: 0 }] }, +   ~~~~~~ + + test/edges/edges.ts:407:11 + 407 friends: { +    ~~~~~~~~~~ + 408 items: { +   ~~~~~~~~~~~~~~~~~~~~ + ... + 412 }, +   ~~~~~~~~~~~~~~ + 413 }, +   ~~~~~~~~~~~ + The expected type comes from property 'friends' which is declared here on type '{ bestFriend?: number | BasedModify | { id: number | BasedModify; $x?: NumInc | undefined; } | null | undefined; friends?: { add?: (number | ... 1 more ... | { ...; })[] | undefined; update?: (number | ... 1 more ... | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (nu...' + +test/edges/edges.ts:463:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +463 deepEqual(await db.query2('user', user3).include('**').get(), { +   ~~~~~~~ + +test/edges/edgesMain.ts:60:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ......'. + +60 .include('contributors.$rdy') +   ~~~~~~~ + +test/edges/edgesMain.ts:100:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ......'. + +100 .include('name') +   ~~~~~~~ + +test/edges/edgesMain.ts:140:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ......'. + +140 .include('name') +   ~~~~~~~ + +test/edges/edgesMain.ts:183:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ......'. + +183 .include('name') +   ~~~~~~~ + +test/edges/edgesMain.ts:258:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ......'. + +258 .include('contributor.$rdy') +   ~~~~~~~ + +test/edges/edgesMain.ts:275:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ......'. + +275 deepEqual(await db.query2('article').include('contributor.$rdy').get(), [ +   ~~~~~~~ + +test/edges/edgesMain.ts:339:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ......'. + +339 .include('contributors.$age') +   ~~~~~~~ + +test/edges/edgesMain.ts:361:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ......'. + +361 .include('contributors.$age') +   ~~~~~~~ + +test/edges/edgesMain.ts:371:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ......'. + +371 .include('contributors.$plonki') +   ~~~~~~~ + +test/edges/edgesMain.ts:409:32 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly article: { type: "references"; items: { ...; } & NormalizeEdges<...>; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }...'. + +409 await db.query2('article').include('writer.$age').get(), +   ~~~~~~~ + +test/edges/edgesMain.ts:423:32 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly article: { type: "references"; items: { ...; } & NormalizeEdges<...>; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }...'. + +423 await db.query2('article').include('writer.$age').get(), +   ~~~~~~~ + +test/edges/edgesMain.ts:429:32 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly article: { type: "references"; items: { ...; } & NormalizeEdges<...>; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }...'. + +429 await db.query2('article').include('writer.$plonki').get(), +   ~~~~~~~ + +test/edges/edgesReference.ts:58:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly articles: Omit<...> & { ...; }; }; }; readonly country: { ...; }; readonly article: { ...; }; }; l...'. + +58 deepEqual(await db.query2('article').include('contributor.$friend').get(), [ +   ~~~~~~~ + +test/edges/edgesReference.ts:150:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +150 .include('name', 'contributor.$countries') +   ~~~~~~~ + +test/edges/edgesReference.ts:192:32 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +192 await db.query2('article').include('name', 'contributor.$countries').get() +   ~~~~~~~ + +test/edges/edgesReferences.ts:100:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +100 .include('contributors.$age') +   ~~~~~~~ + +test/edges/edgesReferences.ts:110:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +110 .include('contributors.$friend.name', 'contributors.$friend.location') +   ~~~~~~~ + +test/edges/edgesReferences.ts:131:32 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly email: { type: "string"; }; readonly name: { ...; }; readonly smurp: { ...; }; readonly articles: Omit<...> & { ...; }; readonly locat...'. + +131 await db.query2('article').include('contributors.$friend').get(), +   ~~~~~~~ + +test/edges/edgesReferences.ts:243:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +243 .include('contributors.id') +   ~~~~~~~ + +test/edges/edgesReferences.ts:255:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +255 .include('contributors.id', 'contributors.$countries.id') +   ~~~~~~~ + +test/edges/edgesReferences.ts:273:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +273 .include('contributors.id', 'contributors.$countries.code') +   ~~~~~~~ + +test/edges/edgesReferences.ts:307:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +307 .include('contributors.id', 'contributors.$countries') +   ~~~~~~~ + +test/edges/edgesReferences.ts:341:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +341 .include((t) => { +   ~~~~~~~ + +test/edges/edgesReferences.ts:379:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +379 .include((t) => { +   ~~~~~~~ + +test/edges/edgesReferences.ts:404:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +404 .include((t) => { +   ~~~~~~~ + +test/edges/edgesReferences.ts:437:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +437 .include((s) => { +   ~~~~~~~ + +test/edges/edgesReferences.ts:469:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +469 .include((s) => { +   ~~~~~~~ + +test/edges/edgesReferences.ts:498:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly country: { props: { readonly code: { readonly type: "string"; readonly maxBytes: 2; }; readonly name: { ...; }; readonly users: Omit<...> & { ...; }; }; }; read...'. + +498 .include((s) => { +   ~~~~~~~ + +test/edges/edgesReferences.ts:565:12 - error TS2339: Property 'save' does not exist on type 'DbClient<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly round: { props: { readonly name: { type: "alias"; }; readonly phases: { ...; }; }; }; readonly sequence: { ...; }; readonly scenario: { ...; }; readonly phase: { ....'. + +565 await db.save() +   ~~~~ + +test/edges/edgesReferences.ts:578:38 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly round: { props: { readonly name: { type: "alias"; }; readonly phases: { ...; }; }; }; readonly sequence: { ...; }; readonly scenario: { ...; }; readonly phase: ...'. + +578 deepEqual(await db.query2('phase').include('scenarios').get(), [ +   ~~~~~~~ + +test/edges/edgesReferences.ts:584:30 - error TS2345: Argument of type '{}' is not assignable to parameter of type 'StrictSchema'. + +584 const db = await testDb(t, {}) +   ~~ + +test/edges/edgesReferences.ts:613:38 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +613 deepEqual(await db.query2('phase').include('scenarios').get(), [ +   ~~~~~~~ + +test/edges/edgesReferences.ts:625:38 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +625 deepEqual(await db.query2('phase').include('scenarios').get(), [ +   ~~~~~~~ + +test/edges/edgeText.ts:39:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +39 deepEqual(await db.query2('user', user2).include('**').get(), { +   ~~~~~~~ + +test/edges/edgeText.ts:55:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +55 deepEqual(await db.query2('user', user1).include('**').get(), { +   ~~~~~~~ + +test/edges/edgeText.ts:60:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +60 deepEqual(await db.query2('user', user3).include('**').get(), { +   ~~~~~~~ + +test/edges/edgeText.ts:73:38 - error TS2322: Type '{ update: { id: number; $index: number; }[]; }' is not assignable to type '{ add?: (number | BasedModify | { id: number | BasedModify; $x?: string | Partial> | undefined; })[] | undefined; update?: (number | ... 1 more ... | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | nul...'. + Types of property 'update' are incompatible. + Type '{ id: number; $index: number; }[]' is not assignable to type '(number | BasedModify | { id: number | BasedModify; $x?: string | Partial> | undefined; })[]'. + Object literal may only specify known properties, and '$index' does not exist in type 'BasedModify | { id: number | BasedModify; $x?: string | Partial> | undefined; }'. + +73 friends: { update: [{ id: user2, $index: 0 }] }, +   ~~~~~~ + + test/edges/edgeText.ts:20:11 +  20 friends: { +    ~~~~~~~~~~ +  21 items: { +   ~~~~~~~~~~~~~~~~~~~~ + ... +  25 }, +   ~~~~~~~~~~~~~~ +  26 }, +   ~~~~~~~~~~~ + The expected type comes from property 'friends' which is declared here on type '{ bestFriend?: number | BasedModify | { id: number | BasedModify; $x?: string | Partial> | undefined; } | null | undefined; friends?: { ...; } | ... 2 more ... | undefined; }' + +test/edges/edgeText.ts:75:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +75 deepEqual(await db.query2('user', user3).include('**').get(), { +   ~~~~~~~ + +test/edges/edgeType.ts:92:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly workspace: { props: { readonly name: { type: "string"; }; readonly serviceAccounts: { ...; }; }; }; readonly serviceAccount: { ...; }; }; locales: Partial<...>;...'. + +92 .include( +   ~~~~~~~ + +test/enum.ts:34:41 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly fancyness: { ...; } & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +34 deepEqual(await client.query2('user').include('fancyness').get(), [ +   ~~~~~~~ + +test/enum.ts:44:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly fancyness: { ...; } & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +44 .include('fancyness') +   ~~~~~~~ + +test/enum.ts:57:41 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly fancyness: { ...; } & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +57 deepEqual(await client.query2('user').include('fancyness').get(), [ +   ~~~~~~~ + +test/enum.ts:70:7 - error TS2322: Type 'number' is not assignable to type '"fire" | "mid" | "beta" | null | undefined'. + +70 fancyness: 3, +   ~~~~~~~~~ + + test/enum.ts:10:11 +  10 fancyness: { +    ~~~~~~~~~~~~ +  11 type: 'enum', +   ~~~~~~~~~~~~~~~~~~~~~~~~~ + ... +  13 default: 'fire', +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +  14 }, +   ~~~~~~~~~~~ + The expected type comes from property 'fancyness' which is declared here on type '{ fancyness?: "fire" | "mid" | "beta" | null | undefined; }' + +test/enum.ts:75:7 - error TS2322: Type '"fond"' is not assignable to type '"fire" | "mid" | "beta" | null | undefined'. + +75 fancyness: 'fond', +   ~~~~~~~~~ + + test/enum.ts:10:11 +  10 fancyness: { +    ~~~~~~~~~~~~ +  11 type: 'enum', +   ~~~~~~~~~~~~~~~~~~~~~~~~~ + ... +  13 default: 'fire', +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +  14 }, +   ~~~~~~~~~~~ + The expected type comes from property 'fancyness' which is declared here on type '{ fancyness?: "fire" | "mid" | "beta" | null | undefined; }' + +test/enum.ts:79:41 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly fancyness: { ...; } & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +79 deepEqual(await client.query2('user').include('fancyness').get(), [ +   ~~~~~~~ + +test/enum.ts:105:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly review: { props: { readonly score: { readonly type: "enum"; readonly enum: [...]; readonly default: "😐"; } & { ...; }; }; }; }; locales: Partial<...>; }, ... 5...'. + +105 deepEqual(await client.query2('review').include('score').get(), [ +   ~~~~~~~ + +test/enum.ts:119:35 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly review: { props: { readonly score: { readonly type: "enum"; readonly enum: [...]; readonly default: "😐"; } & { ...; }; }; }; }; locales: Partial<...>; }, ... 5...'. + +119 await client.query2('review').include('score').sort('score', 'desc').get(), +   ~~~~~~~ + +test/errors.ts:23:33 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly friends: Omit<{ ...; }, "items"> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +23 equal(await db.query2('user').include('friends').get(), [ +   ~~~~~~~ + +test/errors.ts:47:33 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly friend: { readonly ref: "user"; readonly prop: "friend"; } & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +47 equal(await db.query2('user').include('friend').get(), [ +   ~~~~~~~ + +test/exists.ts:32:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<......'. + +32 deepEqual(await db.query2('user').filter('name', 'exists').get(), [ +   ~~~~~~ + +test/exists.ts:39:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<......'. + +39 deepEqual(await db.query2('user').filter('name', '!exists').get(), [ +   ~~~~~~ + +test/exists.ts:46:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<......'. + +46 deepEqual(await db.query2('user').filter('friend', '!exists').get(), [ +   ~~~~~~ + +test/exists.ts:57:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<......'. + +57 deepEqual(await db.query2('user').filter('friends', '!exists').get(), [ +   ~~~~~~ + +test/exists.ts:70:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<......'. + +70 deepEqual(await db.query2('user').filter('friends', 'exists').get(), [ +   ~~~~~~ + +test/exists.ts:83:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<......'. + +83 deepEqual(await db.query2('user').filter('friends', 'exists').get(), []) +   ~~~~~~ + +test/exists.ts:94:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<......'. + +94 deepEqual(await db.query2('user').filter('friends', '!exists').get(), [ +   ~~~~~~ + +test/exists.ts:134:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly derp: { ...; }; readonly start: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales...'. + +134 .include('name') +   ~~~~~~~ + +test/exists.ts:145:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly derp: { ...; }; readonly start: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales...'. + +145 .include('name') +   ~~~~~~~ + +test/exists.ts:154:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly derp: { ...; }; readonly start: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales...'. + +154 await db.query2('user').include('name').filter('friends', '!exists').get(), +   ~~~~~~~ + +test/expire.ts:40:10 - error TS2339: Property 'expire' does not exist on type 'DbClientClass<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly token: { props: { readonly name: { type: "string"; }; readonly user: { ...; } & { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }>'. + +40 client.expire('token', token1, 1) +   ~~~~~~ + +test/expire.ts:50:16 - error TS2339: Property 'expire' does not exist on type 'DbClientClass<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly token: { props: { readonly name: { type: "string"; }; readonly user: { ...; } & { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }>'. + +50 await client.expire('token', token2, 1) +   ~~~~~~ + +test/expire.ts:92:16 - error TS2339: Property 'expire' does not exist on type 'DbClientClass<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; }; }; }; locales: Partial<...>; }>'. + +92 await client.expire('user', id1, 1) +   ~~~~~~ + +test/expire.ts:94:16 - error TS2339: Property 'expire' does not exist on type 'DbClientClass<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; }; }; }; locales: Partial<...>; }>'. + +94 await client.expire('user', id1, 3) +   ~~~~~~ + +test/filter/api.ts:33:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +33 .include('friends') +   ~~~~~~~ + +test/filter/api.ts:46:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +46 .include('friends') +   ~~~~~~~ + +test/filter/api.ts:58:29 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +58 await db.query2('user').filter('bestFriend', '=', { id: 9 }).get(), +   ~~~~~~ + +test/filter/api.ts:70:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +70 .filter('bestFriend', '=', [{ id: 9 }, { id: 10 }]) +   ~~~~~~ + +test/filter/edges.ts:55:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly team: { props: { readonly name: { type: "string"; }; readonly files: { ...; }; }; }; readonly libraryFile: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +55 .include((q) => +   ~~~~~~~ + +test/filter/edges.ts:73:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly team: { props: { readonly name: { type: "string"; }; readonly files: { ...; }; }; }; readonly libraryFile: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +73 .filter('files', 'exists') +   ~~~~~~ + +test/filter/filter.ts:44:36 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly status: { type: "enum"; enum: string[]; }; readonly name: { ...; }; readonly x: { ...; }; readonly orgs: Omit<...> & { ...; }; readonly...'. + +44 deepEqual(await db.query2('org').filter('x', '=', x).get(), [ +   ~~~~~~ + +test/filter/filter.ts:52:36 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly status: { type: "enum"; enum: string[]; }; readonly name: { ...; }; readonly x: { ...; }; readonly orgs: Omit<...> & { ...; }; readonly...'. + +52 deepEqual(await db.query2('org').filter('orgs', '=', [org, org2]).get(), [ +   ~~~~~~ + +test/filter/filter.ts:60:36 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly status: { type: "enum"; enum: string[]; }; readonly name: { ...; }; readonly x: { ...; }; readonly orgs: Omit<...> & { ...; }; readonly...'. + +60 deepEqual(await db.query2('org').filter('status', '=', 'error').get(), []) +   ~~~~~~ + +test/filter/filter.ts:61:36 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly status: { type: "enum"; enum: string[]; }; readonly name: { ...; }; readonly x: { ...; }; readonly orgs: Omit<...> & { ...; }; readonly...'. + +61 deepEqual(await db.query2('org').filter('status', '=', 'ok').get(), [ +   ~~~~~~ + +test/filter/filter.ts:75:36 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly status: { type: "enum"; enum: string[]; }; readonly name: { ...; }; readonly x: { ...; }; readonly orgs: Omit<...> & { ...; }; readonly...'. + +75 deepEqual(await db.query2('org').filter('name', 'includes', '0').get(), []) +   ~~~~~~ + +test/filter/filter.ts:76:36 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly status: { type: "enum"; enum: string[]; }; readonly name: { ...; }; readonly x: { ...; }; readonly orgs: Omit<...> & { ...; }; readonly...'. + +76 deepEqual(await db.query2('org').filter('name', 'includes', 'hello').get(), [ +   ~~~~~~ + +test/filter/filter.ts:187:33 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +187 (await db.query2('machine').filter('lastPing', '=', x).get()).length, +   ~~~~~~ + +test/filter/filter.ts:211:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +211 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:238:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +238 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:257:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +257 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:268:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +268 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:279:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +279 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:291:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +291 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:302:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +302 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:313:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +313 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:326:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +326 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:339:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +339 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:351:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +351 .include('id') +   ~~~~~~~ + +test/filter/filter.ts:373:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +373 .include('id') +   ~~~~~~~ + +test/filter/filter.ts:409:28 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +409 await db.query2('env').filter('machines', '<', 10).get(), +   ~~~~~~ + +test/filter/filter.ts:428:28 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +428 await db.query2('env').filter('machines', '=', ids).get(), +   ~~~~~~ + +test/filter/filter.ts:443:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +443 .include('env', '*') +   ~~~~~~~ + +test/filter/filter.ts:490:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +490 .filter('status', '=', '🦄') +   ~~~~~~ + +test/filter/filter.ts:501:36 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +501 deepEqual(await db.query2('env').filter('standby').get(), []) +   ~~~~~~ + +test/filter/filter.ts:507:36 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +507 deepEqual(await db.query2('env').filter('standby').get(), [ +   ~~~~~~ + +test/filter/filter.ts:513:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +513 .include('temperature') +   ~~~~~~~ + +test/filter/filter.ts:526:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly org: { props: { readonly name: { type: "string"; }; readonly type: { ...; }; readonly envs: Omit<...> & { ...; }; }; }; readonly env: { ...; }; readonly machine...'. + +526 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:589:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly machine: { props: { readonly derp: { type: "int32"; }; ... 5 more ...; readonly scheduled: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +589 .include('id', 'lastPing') +   ~~~~~~~ + +test/filter/filter.ts:608:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly machine: { props: { readonly derp: { type: "int32"; }; ... 5 more ...; readonly scheduled: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +608 .include('id', 'lastPing') +   ~~~~~~~ + +test/filter/filter.ts:616:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly machine: { props: { readonly derp: { type: "int32"; }; ... 5 more ...; readonly scheduled: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +616 .include('id', 'lastPing') +   ~~~~~~~ + +test/filter/filter.ts:626:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly machine: { props: { readonly derp: { type: "int32"; }; ... 5 more ...; readonly scheduled: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +626 .include('id', 'lastPing') +   ~~~~~~~ + +test/filter/filter.ts:641:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly machine: { props: { readonly derp: { type: "int32"; }; ... 5 more ...; readonly scheduled: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +641 .include('id', 'lastPing') +   ~~~~~~~ + +test/filter/filter.ts:652:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly machine: { props: { readonly derp: { type: "int32"; }; ... 5 more ...; readonly scheduled: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +652 .include('id', 'lastPing') +   ~~~~~~~ + +test/filter/filter.ts:663:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly machine: { props: { readonly derp: { type: "int32"; }; ... 5 more ...; readonly scheduled: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +663 .include('temperature') +   ~~~~~~~ + +test/filter/filter.ts:702:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly machine: { props: { readonly temperature: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +702 .include('temperature') +   ~~~~~~~ + +test/filter/filter.ts:734:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly machine: { props: { readonly temperature: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +734 .include('id', 'temperature') +   ~~~~~~~ + +test/filter/filter.ts:754:33 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly machine: { props: { readonly temperature: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +754 (await db.query2('machine').include('id').range(0, 3).get()).node(-1), +   ~~~~~~~ + +test/filter/filter.ts:764:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly machine: { props: { readonly temperature: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +764 .include('temperature') +   ~~~~~~~ + +test/filter/filter.ts:815:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly age: { ...; }; readonly bestBud: { ...; } & { ...; }; readonly buddies: Omit<...> & { ......'. + +815 .include('*') +   ~~~~~~~ + +test/filter/filter.ts:823:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly age: { ...; }; readonly bestBud: { ...; } & { ...; }; readonly buddies: Omit<...> & { ......'. + +823 .include( +   ~~~~~~~ + +test/filter/filter.ts:835:12 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly age: { ...; }; readonly bestBud: { ...; } & { ...; }; readonly buddies: Omit<...> & { ......'. + +835 .include( +   ~~~~~~~ + +test/filter/filter.ts:863:39 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly bucket: { props: { readonly red: { type: "uint8"; }; readonly blue: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +863 const b = await db.query2('bucket').filter('red', '<', 4).get() +   ~~~~~~ + +test/filter/or.ts:21:29 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly nr: { type: "uint32"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +21 await db.query2('user').filter('nr', '>', 8).or('nr', '<', 1).get(), +   ~~~~~~ + +test/filter/or.ts:38:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly nr: { type: "uint32"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +38 .filter('nr', '>', 8) +   ~~~~~~ + +test/filter/or.ts:63:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly nr: { type: "uint32"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +63 .filter('nr', '>', 8) +   ~~~~~~ + +test/filter/or.ts:87:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly nr: { type: "uint32"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +87 .filter('nr', '>', 8) +   ~~~~~~ + +test/filter/references.ts:51:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly workspace: { props: { readonly name: { type: "string"; }; readonly users: { ...; }; readonly drones: { ...; }; }; }; readonly user: { ...; }; readonly drone: { ...'. + +51 .include((s) => +   ~~~~~~~ + +test/filter/references.ts:61:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly workspace: { props: { readonly name: { type: "string"; }; readonly users: { ...; }; readonly drones: { ...; }; }; }; readonly user: { ...; }; readonly drone: { ...'. + +61 .filter('workspace.users', 'includes', user) +   ~~~~~~ + +test/filter/referencesField.ts:32:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly age: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mo...'. + +32 .include('name', 'age', 'friends') +   ~~~~~~~ + +test/filter/referencesField.ts:49:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly age: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mo...'. + +49 .include('name', 'age', 'friends') +   ~~~~~~~ + +test/filter/referencesField.ts:66:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly age: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mo...'. + +66 .include('name', 'age', 'friends') +   ~~~~~~~ + +test/filter/referencesField.ts:83:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly age: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mo...'. + +83 .include('name', 'age', 'friends') +   ~~~~~~~ + +test/filter/referencesField.ts:104:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly age: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mo...'. + +104 .include('name', 'age', 'friends') +   ~~~~~~~ + +test/filter/referencesField.ts:131:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly age: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mo...'. + +131 .include('name', 'age', 'friends') +   ~~~~~~~ + +test/filter/string.ts:18:3 - error TS2739: Type 'Map' is missing the following properties from type 'PropTree': props, required, path, schema + +18 tree: new Map(), +   ~~~~ + + src/schema/defs/index.ts:35:3 + 35 tree: PropTree +    ~~~~ + The expected type comes from property 'tree' which is declared here on type 'TypeDef' + +test/filter/string.ts:63:7 - error TS2322: Type 'Uint8Array' is not assignable to type 'string'. + +63 body: compressedItaly, +   ~~~~ + +test/filter/string.ts:74:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly type: { type: "enum"; enum: ("opinion" | ... 1 more ... | "gossip")[]; }; ... 5 more ...; readonly derp: { ...; }; }; }; readonly i...'. + +74 .filter('stuff', '=', ENCODER.encode('#' + 2)) +   ~~~~~~ + +test/filter/string.ts:93:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly type: { type: "enum"; enum: ("opinion" | ... 1 more ... | "gossip")[]; }; ... 5 more ...; readonly derp: { ...; }; }; }; readonly i...'. + +93 .filter('stuff', 'includes', new Uint8Array([55, 57])) +   ~~~~~~ + +test/filter/string.ts:124:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly type: { type: "enum"; enum: ("opinion" | ... 1 more ... | "gossip")[]; }; ... 5 more ...; readonly derp: { ...; }; }; }; readonly i...'. + +124 .filter('derp', 'includes', ENCODER.encode('vitorio')) +   ~~~~~~ + +test/filter/string.ts:136:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly type: { type: "enum"; enum: ("opinion" | ... 1 more ... | "gossip")[]; }; ... 5 more ...; readonly derp: { ...; }; }; }; readonly i...'. + +136 .filter('derp', 'includes', ENCODER.encode('xx')) +   ~~~~~~ + +test/filter/string.ts:148:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly type: { type: "enum"; enum: ("opinion" | ... 1 more ... | "gossip")[]; }; ... 5 more ...; readonly derp: { ...; }; }; }; readonly i...'. + +148 .filter('derp', 'includes', q) +   ~~~~~~ + +test/filter/string.ts:160:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly type: { type: "enum"; enum: ("opinion" | ... 1 more ... | "gossip")[]; }; ... 5 more ...; readonly derp: { ...; }; }; }; readonly i...'. + +160 .filter('derp', '=', largeDerp) +   ~~~~~~ + +test/filter/string.ts:173:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly type: { type: "enum"; enum: ("opinion" | ... 1 more ... | "gossip")[]; }; ... 5 more ...; readonly derp: { ...; }; }; }; readonly i...'. + +173 .filter('body', '=', italy) +   ~~~~~~ + +test/filter/string.ts:198:7 - error TS2322: Type 'Uint8Array' is not assignable to type 'string'. + +198 body: compressedItaly, +   ~~~~ + +test/filter/string.ts:208:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly body: { readonly type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +208 .filter('body', 'includes', n) +   ~~~~~~ + +test/filter/string.ts:251:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly f: { type: "boolean"; }; readonly headline: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., und...'. + +251 .filter('body', 'includes', 'derp derp derp', { lowerCase: true }) +   ~~~~~~ + +test/filter/string.ts:263:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly f: { type: "boolean"; }; readonly headline: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., und...'. + +263 .filter('body', 'includes', 'derp derp derp') +   ~~~~~~ + +test/filter/string.ts:274:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly f: { type: "boolean"; }; readonly headline: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., und...'. + +274 .filter('headline', 'includes', 'pager') +   ~~~~~~ + +test/filter/string.ts:293:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly f: { type: "boolean"; }; readonly headline: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., und...'. + +293 .filter('headline', 'includes', 'Pager', { lowerCase: true }) +   ~~~~~~ + +test/filter/string.ts:312:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly f: { type: "boolean"; }; readonly headline: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., und...'. + +312 .filter('headline', 'includes', 'refugee', { lowerCase: true }) +   ~~~~~~ + +test/filter/string.ts:322:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly f: { type: "boolean"; }; readonly headline: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., und...'. + +322 .filter('headline', 'includes', 'gaza', { lowerCase: true }) +   ~~~~~~ + +test/filter/string.ts:358:40 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { readonly type: "binary"; readonly maxBytes: 30; }; readonly stuff: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...'. + +358 deepEqual(await db.query2('article').filter('stuff', '=', stuff).get(), [ +   ~~~~~~ + +test/filter/string.ts:364:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { readonly type: "binary"; readonly maxBytes: 30; }; readonly stuff: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...'. + +364 .filter('derp', 'includes', new Uint8Array([4])) +   ~~~~~~ + +test/filter/string.ts:393:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly body: { readonly type: "string"; readonly compression: "none"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +393 .filter('body', 'includes', 'aaaaaa', { lowerCase: true }) +   ~~~~~~ + +test/filter/string.ts:423:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly body: { readonly type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +423 .filter('body', 'includes', 'aaaaa', { lowerCase: true }) +   ~~~~~~ + +test/filter/string.ts:460:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly f: { type: "boolean"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefi...'. + +460 .filter('body', 'includes', ['aaaaaaaaaaa', 'bbbbbb'], { +   ~~~~~~ + +test/filter/string.ts:473:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly f: { type: "boolean"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefi...'. + +473 .filter('title', 'includes', ['gaza', 'tubbies'], { lowerCase: true }) +   ~~~~~~ + +test/filter/string.ts:489:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly f: { type: "boolean"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefi...'. + +489 .filter('title', 'includes', ['crisis', 'refugee'], { lowerCase: true }) +   ~~~~~~ + +test/filter/string.ts:515:7 - error TS2322: Type 'string | Uint8Array' is not assignable to type 'string | null | undefined'. + Type 'Uint8Array' is not assignable to type 'string'. + +515 body: i === amount - 1 ? italy + ' aaabbbbbbbbbaaa' : compressedItaly, +   ~~~~ + +test/filter/string.ts:522:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly f: { type: "boolean"; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +522 .filter('body', 'includes', ['aaaaaaaaaaa', 'bbbbbbbb'], { +   ~~~~~~ + +test/filter/string.ts:551:7 - error TS2322: Type 'string | Uint8Array' is not assignable to type 'string | null | undefined'. + Type 'Uint8Array' is not assignable to type 'string'. + +551 body: i === 999 ? derpItaly : compressedItaly, +   ~~~~ + +test/filter/string.ts:559:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly f: { type: "boolean"; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +559 .filter('body', '=', [derpItaly, 'derp', italy]) +   ~~~~~~ + +test/filter/string.ts:588:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly italy: { props: { readonly body: { readonly type: "string"; readonly maxBytes: 5; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +588 .filter('body', '=', ['xx', 'bb']) +   ~~~~~~ + +test/filter/string.ts:640:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly ent: { props: { readonly city: { readonly type: "string"; readonly maxBytes: 15; }; readonly country: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +640 .filter('country', 'includes', ['Italy', 'Germany']) +   ~~~~~~ + +test/filter/string.ts:666:29 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly potato: { type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +666 await db.query2('user').filter('potato', '=', '').get(), +   ~~~~~~ + +test/filter/string.ts:700:29 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly region: { readonly type: "string"; readonly maxBytes: 15; }; readonly potato: { ...; }; readonly city: { ...; }; }; }; }; locales: Par...'. + +700 await db.query2('user').filter('potato', '!=', '').get(), +   ~~~~~~ + +test/filter/string.ts:711:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly region: { readonly type: "string"; readonly maxBytes: 15; }; readonly potato: { ...; }; readonly city: { ...; }; }; }; }; locales: Par...'. + +711 .filter('potato', '!=', '') +   ~~~~~~ + +test/filter/string.ts:722:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly region: { readonly type: "string"; readonly maxBytes: 15; }; readonly potato: { ...; }; readonly city: { ...; }; }; }; }; locales: Par...'. + +722 .filter('potato', '=', '') +   ~~~~~~ + +test/HLLunion.ts:86:6 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +86 .query2('user') +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/hooks.ts:67:7 - error TS2322: Type 'undefined' is not assignable to type 'number'. + +67 age: undefined, +   ~~~ + + src/db-client/query2/types.ts:42:20 + 42 type Prettify = { +    ~ + 43 [K in keyof T]: T[K] +   ~~~~~~~~~~~~~~~~~~~~~~ + 44 } & {} +   ~ + The expected type comes from property 'age' which is declared here on type '{ id: number; name: string; age: number; defined: InferType<{ readonly age: { type: "boolean"; }; }, { readonly user: { readonly hooks: { readonly create: (payload: Record) => void; readonly update: (payload: Record) => void; readonly read: (result: Record<...>) => void; readonly include: (...' + +test/hooks.ts:69:5 - error TS2741: Property 'defined' is missing in type '{ id: number; name: string; age: number; }' but required in type '{ id: number; name: string; age: number; defined: InferType<{ readonly age: { type: "boolean"; }; }, { readonly user: { readonly hooks: { readonly create: (payload: Record) => void; readonly update: (payload: Record) => void; readonly read: (result: Record<...>) => void; readonly include: (...'. + + 69 { +   ~ + 70 id: 2, +  ~~~~~~~~~~~~ +... + 72 age: 25, +  ~~~~~~~~~~~~~~ + 73 }, +  ~~~~~ + +test/hooks.ts:76:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { readonly hooks: { readonly create: (payload: Record) => void; readonly update: (payload: Record<...>) => void; readonly read: (result: Reco...'. + +76 deepEqual(await db.query2('user').filter('age', '<', 50).get(), [ +   ~~~~~~ + +test/hooks.ts:89:5 - error TS2741: Property 'defined' is missing in type '{ id: number; name: string; age: number; }' but required in type '{ id: number; name: string; age: number; defined: InferType<{ readonly age: { type: "boolean"; }; }, { readonly user: { readonly hooks: { readonly create: (payload: Record) => void; readonly update: (payload: Record) => void; readonly read: (result: Record<...>) => void; readonly include: (...'. + + 89 { +   ~ + 90 id: 1, +  ~~~~~~~~~~~~ +... + 92 age: 31, +  ~~~~~~~~~~~~~~ + 93 }, +  ~~~~~ + +test/hooks.ts:94:5 - error TS2741: Property 'defined' is missing in type '{ id: number; name: string; age: number; }' but required in type '{ id: number; name: string; age: number; defined: InferType<{ readonly age: { type: "boolean"; }; }, { readonly user: { readonly hooks: { readonly create: (payload: Record) => void; readonly update: (payload: Record) => void; readonly read: (result: Record<...>) => void; readonly include: (...'. + + 94 { +   ~ + 95 id: 2, +  ~~~~~~~~~~~~ +... + 97 age: 25, +  ~~~~~~~~~~~~~~ + 98 }, +  ~~~~~ + +test/hooks.ts:101:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { readonly hooks: { readonly create: (payload: Record) => void; readonly update: (payload: Record<...>) => void; readonly read: (result: Reco...'. + +101 deepEqual(await db.query2('user').filter('age', '<', 50).get(), [ +   ~~~~~~ + +test/hooks.ts:152:5 - error TS2739: Type '{ id: number; private: true; }' is missing the following properties from type '{ id: number; name: string; age: number; private: boolean; }': name, age + +152 { +   ~ +153 id: 1, +  ~~~~~~~~~~~~ +154 private: true, +  ~~~~~~~~~~~~~~~~~~~~ +155 }, +  ~~~~~ + +test/hooks.ts:325:7 - error TS2353: Object literal may only specify known properties, and 'powerful' does not exist in type '{ id: number; name: string; age: number; city: string; }'. + +325 powerful: true, +   ~~~~~~~~ + +test/hooks.ts:370:34 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { readonly hooks: { readonly aggregate: (query: any) => void; }; props: { ...; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +370 equal((await db.query2('user').sum('age').get()).age.sum, 21) +   ~~~ + +test/hooks.ts:412:34 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { readonly hooks: { readonly search: (query: any) => void; }; props: { ...; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +412 equal((await db.query2('user').search('youzi').get()).length, 1) +   ~~~~~~ + +test/hooks.ts:454:33 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { readonly hooks: { readonly groupBy: (query: any) => void; }; props: { ...; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +454 equal(await db.query2('user').groupBy('name').sum('age').get(), { +   ~~~~~~~ + +test/hooks.ts:460:20 - error TS2554: Expected 2-3 arguments, but got 1. + +460 const db = await testDb({ +   ~~~~~~ + + test/shared/index.ts:40:3 + 40 schema: StrictSchema, +    ~~~~~~~~~~~~~~~~~~~~~~~ + An argument for 'schema' was not provided. + +test/hooks.ts:502:33 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +502 equal(await db.query2('user').filter('name', '=', 'youzi').get(), [ +   ~~~~~~ + +test/hooks.ts:546:33 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { readonly hooks: { readonly include: (query: any) => void; }; props: { ...; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +546 equal(await db.query2('user').include('name', 'age').get(), [ +   ~~~~~~~ + +test/include/include.ts:26:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly nr: { type: "uint32"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +26 await db.query2('user').include([]).range(0, 5).get(), +   ~~~~~~~ + +test/include/include.ts:41:9 - error TS2531: Object is possibly 'null'. + +41 equal((await db.query2('user', 1).get()).id, 1) +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/include/include.ts:43:41 - error TS2339: Property 'checksum' does not exist on type '{ id: number; nr: number; }[]'. + +43 equal((await db.query2('user').get()).checksum, 2149520223) +   ~~~~~~~~ + +test/include/include.ts:44:41 - error TS2339: Property 'version' does not exist on type '{ id: number; nr: number; }[]'. + +44 equal((await db.query2('user').get()).version, 4507870634704934) +   ~~~~~~~ + +test/include/include.ts:69:29 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly a: { readonly type: "string"; readonly maxBytes: 10; }; readonly b: { ...; }; readonly c: { ...; }; readonly d: { ...; }; }; }; }; loc...'. + +69 await db.query2('user').range(0, 5).get(), +   ~~~~~ + +test/include/includeMeta.ts:39:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +39 deepEqual(await db.query2('item').include('name', { meta: true }).get(), [ +   ~~~~~~~ + +test/include/includeMeta.ts:54:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +54 deepEqual(await db.query2('item').include('name', { meta: true }).get(), [ +   ~~~~~~~ + +test/include/includeMeta.ts:71:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +71 deepEqual(await db.query2('item').include('name', { meta: 'only' }).get(), [ +   ~~~~~~~ + +test/include/includeMeta.ts:97:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +97 await db.query2('item').include('items.$edgeName', { meta: 'only' }).get(), +   ~~~~~~~ + +test/include/includeMeta.ts:131:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +131 deepEqual(await db.query2('item').include('email', { meta: 'only' }).get(), [ +   ~~~~~~~ + +test/include/includeMeta.ts:147:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +147 deepEqual(await db.query2('item').include('email', { meta: true }).get(), [ +   ~~~~~~~ + +test/include/includeMeta.ts:166:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +166 deepEqual(await db.query2('item').include('name', { meta: true }).get(), [ +   ~~~~~~~ + +test/include/includeMeta.ts:183:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +183 deepEqual(await db.query2('item').include('body', { meta: true }).get(), [ +   ~~~~~~~ + +test/include/includeMeta.ts:219:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +219 await db.query2('item').include('body', { meta: true }).get(), +   ~~~~~~~ + +test/include/includeMeta.ts:258:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +258 await db.query2('item').include('body', { meta: 'only' }).get(), +   ~~~~~~~ + +test/include/includeMeta.ts:296:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +296 .include('body', { meta: 'only' }) +   ~~~~~~~ + +test/include/includeMeta.ts:319:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly name: { type: "string"; }; readonly body: { ...; }; readonly email: { ...; }; readonly items: Omit<...> & { ...; }; }; }; }; locales: ...'. + +319 .include('body', { meta: 'only' }) +   ~~~~~~~ + +test/include/includeNested.ts:26:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly nr: { type: "uint32"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +26 await db.query2('user').include('*', '**').range(0, 5).get(), +   ~~~~~~~ + +test/include/includeNested.ts:60:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly nr: { type: "uint32"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +60 await db.query2('user').include('friends.*').range(0, 5).get(), +   ~~~~~~~ + +test/include/includeSlice.ts:58:9 - error TS18047: 'q' is possibly 'null'. + +58 equal(q.id, 1) +   ~ + +test/include/includeSlice.ts:63:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly x: { type: "uint32"; }; ... 11 more ...; readonly bigBoyString: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +63 .include('name', { +   ~~~~~~~ + +test/include/includeSlice.ts:121:34 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly x: { type: "uint32"; }; ... 11 more ...; readonly bigBoyString: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +121 await db.query2('item', id1).include('body', { end: 3 }).get(), +   ~~~~~~~ + +test/include/includeSlice.ts:129:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly x: { type: "uint32"; }; ... 11 more ...; readonly bigBoyString: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +129 .include('body.fi', { end: 3 }, 'body.en', { end: 3 }) +   ~~~~~~~ + +test/include/includeSlice.ts:136:34 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly x: { type: "uint32"; }; ... 11 more ...; readonly bigBoyString: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +136 await db.query2('item', id1).include('body', { end: 3 }).locale('en').get(), +   ~~~~~~~ + +test/include/includeSlice.ts:144:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly x: { type: "uint32"; }; ... 11 more ...; readonly bigBoyString: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +144 .include('body', { end: 4, bytes: true }) +   ~~~~~~~ + +test/include/includeSlice.ts:154:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly item: { props: { readonly x: { type: "uint32"; }; ... 11 more ...; readonly bigBoyString: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +154 .include('body.en', { end: 3 }, 'body.fi') +   ~~~~~~~ + +test/include/referencesField.ts:34:34 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly age: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mo...'. + +34 await db.query2('user', mrA).include('name', 'age', 'friends[0].age').get(), +   ~~~~~~~ + +test/include/referencesField.ts:40:29 - error TS2339: Property 'at' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly age: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mo...'. + +40 await db.query2('user').at(0).get(), +   ~~ + +test/include/referencesField.ts:46:29 - error TS2339: Property 'at' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly age: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mo...'. + +46 await db.query2('user').at(3).get(), +   ~~ + +test/include/thread.perf.ts:79:17 - error TS2345: Argument of type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly simple: { props: { readonly nr: { type: "uint32"; }; readonly start: { ...; }; readonly end: { ...; }; readonly target: { ...; } & { ...; }; }; }; }; locales: P...' is not assignable to parameter of type 'BasedDbQuery'. + Type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly simple: { props: { readonly nr: { type: "uint32"; }; readonly start: { ...; }; readonly end: { ...; }; readonly target: { ...; } & { ...; }; }; }; }; locales: P...' is missing the following properties from type 'BasedDbQuery': target, readSchema, at, reset, and 21 more. + +79 registerQuery(x) +   ~ + +test/include/thread.perf.ts:99:10 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly simple: { props: { readonly nr: { type: "uint32"; }; readonly start: { ...; }; readonly end: { ...; }; readonly target: { ...; } & { ...; }; }; }; }; locales: P...'. + +99 .range(0, 5e6 + i) +   ~~~~~ + +test/include/thread.perf.ts:167:14 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly simple: { props: { readonly nr: { type: "uint32"; }; readonly start: { ...; }; readonly end: { ...; }; readonly target: { ...; } & { ...; }; }; }; }; locales: P...'. + +167 .include('id', 'nr') +   ~~~~~~~ + +test/include/thread.perf.ts:186:14 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly simple: { props: { readonly nr: { type: "uint32"; }; readonly start: { ...; }; readonly end: { ...; }; readonly target: { ...; } & { ...; }; }; }; }; locales: P...'. + +186 .include('id', 'nr') +   ~~~~~~~ + +test/include/thread.perf.ts:206:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly simple: { props: { readonly nr: { type: "uint32"; }; readonly start: { ...; }; readonly end: { ...; }; readonly target: { ...; } & { ...; }; }; }; }; locales: P...'. + +206 .include('nr') // 'start', 'end', 'target' +   ~~~~~~~ + +test/include/thread.perf.ts:324:10 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +324 .query2('user') +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/isModified.perf.ts:23:30 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly nr: { type: "uint32"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +23 q.push(db.query2('user').range(0, 5).get()) +   ~~~~~ + +test/json.ts:82:20 - error TS2531: Object is possibly 'null'. + +82 const checksum = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/json.ts:82:59 - error TS2339: Property 'checksum' does not exist on type '{ id: number; article: any; }'. + +82 const checksum = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~ + +test/json.ts:88:21 - error TS2531: Object is possibly 'null'. + +88 const checksum2 = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/json.ts:88:60 - error TS2339: Property 'checksum' does not exist on type '{ id: number; article: any; }'. + +88 const checksum2 = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~ + +test/locales.ts:39:7 - error TS2322: Type 'null' is not assignable to type 'string'. + +39 string: null, +   ~~~~~~ + + src/db-client/query2/types.ts:42:20 + 42 type Prettify = { +    ~ + 43 [K in keyof T]: T[K] +   ~~~~~~~~~~~~~~~~~~~~~~ + 44 } & {} +   ~ + The expected type comes from property 'string' which is declared here on type '{ string: string; id: number; text: { id: string; none: string; aa: string; ab: string; af: string; ak: string; sq: string; am: string; ar: string; an: string; hy: string; as: string; av: string; ae: string; ay: string; ... 131 more ...; cnr: string; }; }' + +test/locales.ts:40:7 - error TS2740: Type '{ [k: string]: null; }' is missing the following properties from type '{ id: string; none: string; aa: string; ab: string; af: string; ak: string; sq: string; am: string; ar: string; an: string; hy: string; as: string; av: string; ae: string; ay: string; az: string; eu: string; be: string; ... 128 more ...; cnr: string; }': id, none, aa, ab, and 143 more. + +40 text: Object.fromEntries(Object.keys(thing.text).map((l) => [l, null])), +   ~~~~ + + src/db-client/query2/types.ts:42:20 + 42 type Prettify = { +    ~ + 43 [K in keyof T]: T[K] +   ~~~~~~~~~~~~~~~~~~~~~~ + 44 } & {} +   ~ + The expected type comes from property 'text' which is declared here on type '{ string: string; id: number; text: { id: string; none: string; aa: string; ab: string; af: string; ak: string; sq: string; am: string; ar: string; an: string; hy: string; as: string; av: string; ae: string; ay: string; ... 131 more ...; cnr: string; }; }' + +test/mem.ts:57:12 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly data: { props: { readonly a: { readonly ref: "data"; readonly prop: "b"; readonly $derp: { ...; }; } & { ...; }; readonly b: Omit<...> & { ...; }; readonly age:...'. + +57 .include('b') +   ~~~~~~~ + +test/mem.ts:72:36 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly data: { props: { readonly a: { readonly ref: "data"; readonly prop: "b"; readonly $derp: { ...; }; } & { ...; }; readonly b: Omit<...> & { ...; }; readonly age:...'. + +72 (await client.query2('data').range(0, 10e6).get()).length, +   ~~~~~ + +test/migration.ts:227:26 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +227 const users = await db.query2('user').get() +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/migration.ts:228:27 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +228 const people = await db.query2('person').include('*', '**').get() +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/modify/props/binary.ts:63:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly blob: { type: "binary"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +63 const res1 = await db.query2('holder', id1).include('toThing.$edgeBlob').get() +   ~~~~~~~ + +test/modify/props/binary.ts:75:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly blob: { type: "binary"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +75 const res2 = await db.query2('holder', id1).include('toThing.$edgeBlob').get() +   ~~~~~~~ + +test/modify/props/boolean.ts:94:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +94 .include('toUser.$edgeBool') +   ~~~~~~~ + +test/modify/props/boolean.ts:99:45 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +99 const resB = await db.query2('holder', b).include('toUser.$edgeBool').get() +   ~~~~~~~ + +test/modify/props/boolean.ts:106:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +106 .include('toUser.$edgeBool') +   ~~~~~~~ + +test/modify/props/boolean.ts:118:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +118 .include('toUser.$edgeBool') +   ~~~~~~~ + +test/modify/props/boolean.ts:122:46 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +122 const resB2 = await db.query2('holder', b).include('toUser.$edgeBool').get() +   ~~~~~~~ + +test/modify/props/boolean.ts:127:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +127 .include('toUser.$edgeBool') +   ~~~~~~~ + +test/modify/props/boolean.ts:139:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +139 .include('toUser.$edgeBool') +   ~~~~~~~ + +test/modify/props/boolean.ts:143:46 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +143 const resB3 = await db.query2('holder', b).include('toUser.$edgeBool').get() +   ~~~~~~~ + +test/modify/props/boolean.ts:148:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ......'. + +148 .include('toUser.$edgeBool') +   ~~~~~~~ + +test/modify/props/cardinality.ts:74:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly counter: { type: "cardinality"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 mo...'. + +74 .include('toThing.$edgeCounter') +   ~~~~~~~ + +test/modify/props/cardinality.ts:88:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly counter: { type: "cardinality"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 mo...'. + +88 .include('toThing.$edgeCounter') +   ~~~~~~~ + +test/modify/props/default.ts:102:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly groups: { ...; }; }; }; readonly group: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., und...'. + +102 .include('member.$role') +   ~~~~~~~ + +test/modify/props/default.ts:124:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly groups: { ...; }; }; }; readonly group: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., und...'. + +124 .include('member.$role') +   ~~~~~~~ + +test/modify/props/default.ts:145:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly groups: { ...; }; }; }; readonly group: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., und...'. + +145 .include('member.$role') +   ~~~~~~~ + +test/modify/props/enum.ts:65:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly option: { readonly enum: [...]; } & { ...; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; ...'. + +65 .include('toThing.$edgeOption') +   ~~~~~~~ + +test/modify/props/enum.ts:79:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly option: { readonly enum: [...]; } & { ...; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; ...'. + +79 .include('toThing.$edgeOption') +   ~~~~~~~ + +test/modify/props/json.ts:66:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly data: { type: "json"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., un...'. + +66 const res1 = await db.query2('holder', id1).include('toThing.$edgeData').get() +   ~~~~~~~ + +test/modify/props/json.ts:78:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly data: { type: "json"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., un...'. + +78 const res2 = await db.query2('holder', id1).include('toThing.$edgeData').get() +   ~~~~~~~ + +test/modify/props/mixed.ts:41:48 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; ... 4 more ...; readonly test: { ...; }; }; }; readonly typeTest: { ...; }; }; locales: Partial<.....'. + +41 const typeTest = await db.query2('typeTest').include('*', '**').get() +   ~~~~~~~ + +test/modify/props/mixed.ts:57:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; ... 4 more ...; readonly test: { ...; }; }; }; readonly typeTest: { ...; }; }; locales: Partial<.....'. + +57 const user = await db.query2('user').include('*', '**').get() +   ~~~~~~~ + +test/modify/props/numbers.ts:241:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly n: { type: "number"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., und...'. + +241 .include( +   ~~~~~~~ + +test/modify/props/object.ts:56:13 - error TS2531: Object is possibly 'null'. + +56 deepEqual((await db.query2('thing', id1).get()).info, { +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/modify/props/object.ts:65:13 - error TS2531: Object is possibly 'null'. + +65 deepEqual((await db.query2('thing', id1).get()).info, { +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/modify/props/object.ts:104:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly info: Omit<{ readonly type: "object"; readonly props: { ...; }; }, "props"> & { ...; }; readonly holders: { ...; }; }; }; readonly ho...'. + +104 const res1 = await db.query2('holder', id1).include('toThing.$edgeInfo').get() +   ~~~~~~~ + +test/modify/props/object.ts:121:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly info: Omit<{ readonly type: "object"; readonly props: { ...; }; }, "props"> & { ...; }; readonly holders: { ...; }; }; }; readonly ho...'. + +121 const res2 = await db.query2('holder', id1).include('toThing.$edgeInfo').get() +   ~~~~~~~ + +test/modify/props/references.ts:26:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly refHolders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more .....'. + +26 const res = await db.query2('holder', h1).include('dest.id').get() +   ~~~~~~~ + +test/modify/props/references.ts:38:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly refHolders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more .....'. + +38 const res = await db.query2('holder', h1).include('dest.id').get() +   ~~~~~~~ + +test/modify/props/references.ts:50:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly refHolders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more .....'. + +50 const res = await db.query2('holder', h1).include('dest.id').get() +   ~~~~~~~ + +test/modify/props/references.ts:60:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly refHolders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more .....'. + +60 deepEqual(await db.query2('holder', h1).include('dest').get(), { +   ~~~~~~~ + +test/modify/props/references.ts:95:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly refsHolders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ....'. + +95 const res = await db.query2('holder', h1).include('dests').get() +   ~~~~~~~ + +test/modify/props/references.ts:166:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly refsHolders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ....'. + +166 .include('dests.id') +   ~~~~~~~ + +test/modify/props/references.ts:211:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +211 .include('toThing.$edgeRef.id') +   ~~~~~~~ + +test/modify/props/references.ts:279:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +279 .include('toThing.$edgeRefs.id') +   ~~~~~~~ + +test/modify/props/string.ts:111:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +111 const res1 = await db.query2('holder', id1).include('toThing.$edgeName').get() +   ~~~~~~~ + +test/modify/props/string.ts:123:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +123 const res2 = await db.query2('holder', id1).include('toThing.$edgeName').get() +   ~~~~~~~ + +test/modify/props/string.ts:134:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +134 const res3 = await db.query2('holder', id1).include('toThing.$edgeName').get() +   ~~~~~~~ + +test/modify/props/string.ts:145:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +145 const res4 = await db.query2('holder', id1).include('toThing.$edgeName').get() +   ~~~~~~~ + +test/modify/props/string.ts:156:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +156 const res5 = await db.query2('holder', id1).include('toThing.$edgeName').get() +   ~~~~~~~ + +test/modify/props/string.ts:167:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly name: { type: "string"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +167 const res6 = await db.query2('holder', id1).include('toThing.$edgeName').get() +   ~~~~~~~ + +test/modify/props/text.ts:71:13 - error TS2531: Object is possibly 'null'. + +71 deepEqual((await db.query2('thing', id1).get()).content, { +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/modify/props/text.ts:105:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly content: { type: "text"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +105 const res1 = await db.query2('holder', id1).include('toThing.$edgeText').get() +   ~~~~~~~ + +test/modify/props/text.ts:116:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly content: { type: "text"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +116 const res2 = await db.query2('holder', id1).include('toThing.$edgeText').get() +   ~~~~~~~ + +test/modify/props/timestamp.ts:100:13 - error TS2531: Object is possibly 'null'. + +100 deepEqual((await db.query2('event', id1).get()).ts, 0) +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/modify/props/timestamp.ts:134:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly event: { props: { readonly ts: { type: "timestamp"; }; readonly holders: { ...; }; }; }; readonly holder: { ...; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +134 const res = await db.query2('holder', id).include('toEvent.$edgeTs').get() +   ~~~~~~~ + +test/modify/props/vector.ts:108:29 - error TS18047: 'res' is possibly 'null'. + +108 const vecArr = Array.from(res.vec) as number[] +   ~~~ + +test/modify/props/vector.ts:121:30 - error TS18047: 'res2' is possibly 'null'. + +121 const vecArr2 = Array.from(res2.vec) as number[] +   ~~~~ + +test/modify/props/vector.ts:164:29 - error TS18047: 'res' is possibly 'null'. + +164 const vecArr = Array.from(res.vec) as number[] +   ~~~ + +test/modify/props/vector.ts:176:30 - error TS18047: 'res2' is possibly 'null'. + +176 const vecArr2 = Array.from(res2.vec) as number[] +   ~~~~ + +test/modify/props/vector.ts:217:46 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly vec: { readonly type: "vector"; readonly size: 3; readonly baseType: "float32"; }; readonly holders: { ...; }; }; }; readonly holder:...'. + +217 const res = await db.query2('holder', id1).include('toThing.$edgeVec').get() +   ~~~~~~~ + +test/modify/props/vector.ts:236:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly thing: { props: { readonly vec: { readonly type: "vector"; readonly size: 3; readonly baseType: "float32"; }; readonly holders: { ...; }; }; }; readonly holder:...'. + +236 const res2 = await db.query2('holder', id1).include('toThing.$edgeVec').get() +   ~~~~~~~ + +test/protocol/schema.ts:59:40 - error TS2322: Type '14' is not assignable to type 'PropTypeEnum'. + +59 prop: { path: ['scenarios'], typeIndex: 14, readBy: 0 }, +   ~~~~~~~~~ + + src/protocol/db-read/types.ts:45:3 + 45 typeIndex: PropTypeEnum +    ~~~~~~~~~ + The expected type comes from property 'typeIndex' which is declared here on type 'ReaderPropDef' + +test/protocol/schema.ts:124:36 - error TS2322: Type '14' is not assignable to type 'PropTypeEnum'. + +124 prop: { path: ['items'], typeIndex: 14, readBy: 0 }, +   ~~~~~~~~~ + + src/protocol/db-read/types.ts:45:3 + 45 typeIndex: PropTypeEnum +    ~~~~~~~~~ + The expected type comes from property 'typeIndex' which is declared here on type 'ReaderPropDef' + +test/query-ast/validate.perf.ts:1:20 - error TS2307: Cannot find module 'valibot' or its corresponding type declarations. + +1 import * as v from 'valibot' +   ~~~~~~~~~ + +test/query/db.ts:66:41 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +66 const res = await db.query2('user').include('address.street').get() +   ~~~~~~~ + +test/query/db.ts:77:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +77 .include('name') +   ~~~~~~~ + +test/query/db.ts:87:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +87 .include('name') +   ~~~~~~~ + +test/query/db.ts:114:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +114 .include('name') +   ~~~~~~~ + +test/query/db.ts:124:41 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +124 const res = await db.query2('user').sum('age').get() +   ~~~ + +test/query/db.ts:172:33 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +172 const q = db.query2('user').include('friends.$rank', 'friends.$rating') +   ~~~~~~~ + +test/query/db.ts:181:33 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +181 const q = db.query2('user').include('friends.$friendRef') +   ~~~~~~~ + +test/query/types.ts:110:11 - error TS2322: Type '"a" | "b" | undefined' is not assignable to type '"a" | "b"'. + Type 'undefined' is not assignable to type '"a" | "b"'. + +110 const myEnum: 'a' | 'b' = everything.myEnum +   ~~~~~~ + +test/query/types.ts:123:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +123 const query = db.query2('everything').include('myEnum') +   ~~~~~~~ + +test/query/types.ts:129:7 - error TS2578: Unused '@ts-expect-error' directive. + +129 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/query/types.ts:135:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +135 const query = db.query2('everything').include('*') +   ~~~~~~~ + +test/query/types.ts:142:7 - error TS2578: Unused '@ts-expect-error' directive. + +142 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/query/types.ts:147:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +147 const query = db.query2('everything').include('**') +   ~~~~~~~ + +test/query/types.ts:158:7 - error TS2578: Unused '@ts-expect-error' directive. + +158 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/query/types.ts:160:7 - error TS2578: Unused '@ts-expect-error' directive. + +160 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/query/types.ts:162:7 - error TS2578: Unused '@ts-expect-error' directive. + +162 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/query/types.ts:169:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +169 const query = db.query2('everything').include('myEnum', '**') +   ~~~~~~~ + +test/query/types.ts:179:7 - error TS2578: Unused '@ts-expect-error' directive. + +179 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/query/types.ts:186:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +186 const query = db.query2('everything').include('n', 's', 'nested') +   ~~~~~~~ + +test/query/types.ts:196:7 - error TS2578: Unused '@ts-expect-error' directive. + +196 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/query/types.ts:203:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +203 const query = db.query2('everything').include('*', 'myRefs') +   ~~~~~~~ + +test/query/types.ts:212:7 - error TS2578: Unused '@ts-expect-error' directive. + +212 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/query/types.ts:219:46 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +219 const query = db.query2('everything', 1).include('*', 'myRefs') +   ~~~~~~~ + +test/query/types.ts:238:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +238 .include((select) => select('myRefs').include('isNice')) +   ~~~~~~~ + +test/query/types.ts:250:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +250 .include((select) => select('backRefs').include('myEnum')) +   ~~~~~~~ + +test/query/types.ts:260:43 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +260 const query = db.query2('everything').sum('n', 'i8').sum('card') +   ~~~ + +test/query/types.ts:288:43 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +288 const query = db.query2('everything').sum('n') +   ~~~ + +test/query/types.ts:293:7 - error TS2578: Unused '@ts-expect-error' directive. + +293 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/query/types.ts:297:48 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; readonly backRef: { ...; }; readonly backRefs: { ...; }; }; }; readonly everything: { ...; }; }; locales...'. + +297 const queryGroup = db.query2('everything').groupBy('s').sum('n') +   ~~~~~~~ + +test/query/types.ts:305:9 - error TS2578: Unused '@ts-expect-error' directive. + +305 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/queryResponse.ts:25:19 - error TS18047: 'response' is possibly 'null'. + +25 extractNumber(response.version), +   ~~~~~~~~ + +test/queryResponse.ts:25:28 - error TS2339: Property 'version' does not exist on type '{ id: number; status: "a" | "b" | undefined; }'. + +25 extractNumber(response.version), +   ~~~~~~~ + +test/queryResponse.ts:26:5 - error TS18047: 'response' is possibly 'null'. + +26 response.checksum, +   ~~~~~~~~ + +test/queryResponse.ts:26:14 - error TS2339: Property 'checksum' does not exist on type '{ id: number; status: "a" | "b" | undefined; }'. + +26 response.checksum, +   ~~~~~~~~ + +test/queryResponse.ts:36:12 - error TS18047: 'response' is possibly 'null'. + +36 notEqual(response.version, response2.version) +   ~~~~~~~~ + +test/queryResponse.ts:36:21 - error TS2339: Property 'version' does not exist on type '{ id: number; status: "a" | "b" | undefined; }'. + +36 notEqual(response.version, response2.version) +   ~~~~~~~ + +test/queryResponse.ts:36:30 - error TS18047: 'response2' is possibly 'null'. + +36 notEqual(response.version, response2.version) +   ~~~~~~~~~ + +test/queryResponse.ts:36:40 - error TS2339: Property 'version' does not exist on type '{ id: number; status: "a" | "b" | undefined; }'. + +36 notEqual(response.version, response2.version) +   ~~~~~~~ + +test/range.ts:59:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; readonly nr: { ...; }; readonly location: Omit<....'. + +59 deepEqual(await db.query2('user').include('nr').range(1, 2).get(), [ +   ~~~~~~~ + +test/range.ts:63:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; readonly nr: { ...; }; readonly location: Omit<....'. + +63 await db.query2('user').include('nr').sort('email').range(1, 2).get(), +   ~~~~~~~ + +test/raw.ts:21:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly uniqueSkills: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +21 .include('uniqueSkills', { raw: true }) +   ~~~~~~~ + +test/raw.ts:50:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly role: { ...; }; readonly resume: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., unde...'. + +50 .include(['name', 'role', 'resume'], { raw: true }) +   ~~~~~~~ + +test/references/references.ts:61:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly flap: { type: "uint32"; }; readonly name: { ...; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; loca...'. + +61 deepEqual(await db.query2('article').include('contributors.name').get(), [ +   ~~~~~~~ + +test/references/references.ts:75:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly flap: { type: "uint32"; }; readonly name: { ...; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; loca...'. + +75 deepEqual(await db.query2('user').include('articles.name').get(), [ +   ~~~~~~~ + +test/references/references.ts:130:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly uid: { readonly type: "uint32"; }; readonly name: { ...; }; readonly resources: Omit<...> & { ...; }; }; }; readonly resource: { ...; ...'. + +130 deepEqual(await db.query2('user').include('resources').get(), [ +   ~~~~~~~ + +test/references/references.ts:158:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly uid: { readonly type: "uint32"; }; readonly name: { ...; }; readonly resources: Omit<...> & { ...; }; }; }; readonly resource: { ...; ...'. + +158 deepEqual(await db.query2('user').include('resources.name').get(), [ +   ~~~~~~~ + +test/references/references.ts:218:37 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "user", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "user", id: number | (Partial; }, "user">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "user">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly resource: ...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +218 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~ + + test/references/references.ts:218:37 + 218 deepEqual(await db.query2('user', user).include('resources').get(), { +    ~~~~ + Did you forget to use 'await'? + +test/references/references.ts:218:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; readonly max: 8; }; readonly resources: Omit<...> & { ...; }; }; }; readonly resource: { ...; }; }; l...'. + +218 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~~~~ + +test/references/references.ts:242:37 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "user", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "user", id: number | (Partial; }, "user">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "user">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly resource: ...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +242 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~ + + test/references/references.ts:242:37 + 242 deepEqual(await db.query2('user', user).include('resources').get(), { +    ~~~~ + Did you forget to use 'await'? + +test/references/references.ts:242:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; readonly max: 8; }; readonly resources: Omit<...> & { ...; }; }; }; readonly resource: { ...; }; }; l...'. + +242 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~~~~ + +test/references/references.ts:263:37 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "user", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "user", id: number | (Partial; }, "user">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "user">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly resource: ...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +263 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~ + + test/references/references.ts:263:37 + 263 deepEqual(await db.query2('user', user).include('resources').get(), { +    ~~~~ + Did you forget to use 'await'? + +test/references/references.ts:263:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; readonly max: 8; }; readonly resources: Omit<...> & { ...; }; }; }; readonly resource: { ...; }; }; l...'. + +263 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~~~~ + +test/references/references.ts:284:37 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "user", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "user", id: number | (Partial; }, "user">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "user">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly resource: ...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +284 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~ + + test/references/references.ts:284:37 + 284 deepEqual(await db.query2('user', user).include('resources').get(), { +    ~~~~ + Did you forget to use 'await'? + +test/references/references.ts:284:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; readonly max: 8; }; readonly resources: Omit<...> & { ...; }; }; }; readonly resource: { ...; }; }; l...'. + +284 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~~~~ + +test/references/references.ts:309:37 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "user", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "user", id: number | (Partial; }, "user">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "user">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly resource: ...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +309 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~ + + test/references/references.ts:309:37 + 309 deepEqual(await db.query2('user', user).include('resources').get(), { +    ~~~~ + Did you forget to use 'await'? + +test/references/references.ts:309:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; readonly max: 8; }; readonly resources: Omit<...> & { ...; }; }; }; readonly resource: { ...; }; }; l...'. + +309 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~~~~ + +test/references/references.ts:335:37 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "user", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "user", id: number | (Partial; }, "user">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "user">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly resource: ...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +335 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~ + + test/references/references.ts:335:37 + 335 deepEqual(await db.query2('user', user).include('resources').get(), { +    ~~~~ + Did you forget to use 'await'? + +test/references/references.ts:335:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; readonly max: 8; }; readonly resources: Omit<...> & { ...; }; }; }; readonly resource: { ...; }; }; l...'. + +335 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~~~~ + +test/references/references.ts:369:37 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "user", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "user", id: number | (Partial; }, "user">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "user">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly resource: ...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +369 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~ + + test/references/references.ts:369:37 + 369 deepEqual(await db.query2('user', user).include('resources').get(), { +    ~~~~ + Did you forget to use 'await'? + +test/references/references.ts:369:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; readonly max: 8; }; readonly resources: Omit<...> & { ...; }; }; }; readonly resource: { ...; }; }; l...'. + +369 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~~~~ + +test/references/references.ts:400:37 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "user", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "user", id: number | (Partial; }, "user">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "user">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly resource: ...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +400 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~ + + test/references/references.ts:400:37 + 400 deepEqual(await db.query2('user', user).include('resources').get(), { +    ~~~~ + Did you forget to use 'await'? + +test/references/references.ts:400:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; readonly max: 8; }; readonly resources: Omit<...> & { ...; }; }; }; readonly resource: { ...; }; }; l...'. + +400 deepEqual(await db.query2('user', user).include('resources').get(), { +   ~~~~~~~ + +test/references/references.ts:477:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly flap: { readonly type: "uint32"; }; readonly name: { ...; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }...'. + +477 deepEqual(await db.query2('article').include('contributors.name').get(), [ +   ~~~~~~~ + +test/references/references.ts:550:32 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "article", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "article", id: number | (Partial; }, "article">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "article">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly art...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +550 await db.query2('article', strudelArticle).include('contributors').get(), +   ~~~~~~~~~~~~~~ + + test/references/references.ts:550:32 + 550 await db.query2('article', strudelArticle).include('contributors').get(), +    ~~~~~~~~~~~~~~ + Did you forget to use 'await'? + +test/references/references.ts:550:48 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly flap: { readonly type: "uint32"; }; readonly name: { ...; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }...'. + +550 await db.query2('article', strudelArticle).include('contributors').get(), +   ~~~~~~~ + +test/references/references.ts:565:26 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "article", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "article", id: number | (Partial; }, "article">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "article">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly art...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +565 .query2('article', strudelArticle) +   ~~~~~~~~~~~~~~ + + test/references/references.ts:565:26 + 565 .query2('article', strudelArticle) +    ~~~~~~~~~~~~~~ + Did you forget to use 'await'? + +test/references/references.ts:566:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly flap: { readonly type: "uint32"; }; readonly name: { ...; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }...'. + +566 .include((q) => q('contributors').include('name').filter('flap', '>', 25)) +   ~~~~~~~ + +test/references/references.ts:580:26 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "article", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "article", id: number | (Partial; }, "article">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "article">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly art...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +580 .query2('article', strudelArticle) +   ~~~~~~~~~~~~~~ + + test/references/references.ts:580:26 + 580 .query2('article', strudelArticle) +    ~~~~~~~~~~~~~~ + Did you forget to use 'await'? + +test/references/references.ts:581:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly flap: { readonly type: "uint32"; }; readonly name: { ...; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }...'. + +581 .include((q) => { +   ~~~~~~~ + +test/references/references.ts:597:26 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "article", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "article", id: number | (Partial; }, "article">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "article">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial & { ...; }; }; }; readonly art...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +597 .query2('article', strudelArticle) +   ~~~~~~~~~~~~~~ + + test/references/references.ts:597:26 + 597 .query2('article', strudelArticle) +    ~~~~~~~~~~~~~~ + Did you forget to use 'await'? + +test/references/references.ts:598:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly flap: { readonly type: "uint32"; }; readonly name: { ...; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }...'. + +598 .include((select) => { +   ~~~~~~~ + +test/references/references.ts:814:41 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly email: { ...; }; readonly invitedBy: { ...; } & { ...; }; readonly invited: { ...; }; }; }; }; loc...'. + +814 deepEqual(await client.query2('user').include('email', 'invitedBy').get(), [ +   ~~~~~~~ + +test/references/references.ts:877:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly product: { props: { readonly reviews: Omit<...> & { ...; }; }; }; readonly review: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +877 const products = await db.query2('product').include('*', '**').get() +   ~~~~~~~ + +test/references/references.ts:878:45 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly product: { props: { readonly reviews: Omit<...> & { ...; }; }; }; readonly review: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +878 const reviews = await db.query2('review').include('*', '**').get() +   ~~~~~~~ + +test/references/references.ts:928:46 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +928 deepEqual(await db.query2('user', mrSnurp).include('**').get(), { +   ~~~~~~~ + +test/references/references.ts:936:46 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +936 deepEqual(await db.query2('user', mrSnurp).include('**').get(), { +   ~~~~~~~ + +test/references/referencesIndex.ts:37:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +37 deepEqual(await db.query2('user', john).include('*', 'friends').get(), { +   ~~~~~~~ + +test/references/referencesIndex.ts:51:11 - error TS2322: Type '{ update: { id: BasedCreatePromise; $index: number; }[]; }' is not assignable to type '{ add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify | { id: number | BasedModify<...>; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined'. + Types of property 'update' are incompatible. + Type '{ id: BasedCreatePromise; $index: number; }[]' is not assignable to type '(number | BasedModify | { id: number | BasedModify; })[]'. + Object literal may only specify known properties, and '$index' does not exist in type 'BasedModify | { id: number | BasedModify; }'. + +51 $index: -1, +   ~~~~~~ + + test/references/referencesIndex.ts:11:11 +  11 friends: { +    ~~~~~~~~~~ +  12 items: { +   ~~~~~~~~~~~~~~~~~~~~ + ... +  15 }, +   ~~~~~~~~~~~~~~ +  16 }, +   ~~~~~~~~~~~ + The expected type comes from property 'friends' which is declared here on type '{ name?: string | null | undefined; friends?: { add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify<...> | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined; }' + +test/references/referencesIndex.ts:62:11 - error TS2322: Type '{ update: { id: BasedCreatePromise; $index: number; }[]; }' is not assignable to type '{ add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify | { id: number | BasedModify<...>; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined'. + Types of property 'update' are incompatible. + Type '{ id: BasedCreatePromise; $index: number; }[]' is not assignable to type '(number | BasedModify | { id: number | BasedModify; })[]'. + Object literal may only specify known properties, and '$index' does not exist in type 'BasedModify | { id: number | BasedModify; }'. + +62 $index: -1, +   ~~~~~~ + + test/references/referencesIndex.ts:11:11 +  11 friends: { +    ~~~~~~~~~~ +  12 items: { +   ~~~~~~~~~~~~~~~~~~~~ + ... +  15 }, +   ~~~~~~~~~~~~~~ +  16 }, +   ~~~~~~~~~~~ + The expected type comes from property 'friends' which is declared here on type '{ name?: string | null | undefined; friends?: { add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify<...> | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined; }' + +test/references/referencesIndex.ts:68:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +68 deepEqual(await db.query2('user', john).include('*', 'friends').get(), { +   ~~~~~~~ + +test/references/referencesIndex.ts:86:11 - error TS2322: Type '{ update: { id: BasedCreatePromise; $index: number; }[]; }' is not assignable to type '{ add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify | { id: number | BasedModify<...>; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined'. + Types of property 'update' are incompatible. + Type '{ id: BasedCreatePromise; $index: number; }[]' is not assignable to type '(number | BasedModify | { id: number | BasedModify; })[]'. + Object literal may only specify known properties, and '$index' does not exist in type 'BasedModify | { id: number | BasedModify; }'. + +86 $index: 0, +   ~~~~~~ + + test/references/referencesIndex.ts:11:11 +  11 friends: { +    ~~~~~~~~~~ +  12 items: { +   ~~~~~~~~~~~~~~~~~~~~ + ... +  15 }, +   ~~~~~~~~~~~~~~ +  16 }, +   ~~~~~~~~~~~ + The expected type comes from property 'friends' which is declared here on type '{ name?: string | null | undefined; friends?: { add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify<...> | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined; }' + +test/references/referencesIndex.ts:92:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +92 deepEqual(await db.query2('user', john).include('*', 'friends').get(), { +   ~~~~~~~ + +test/references/referencesIndex.ts:111:11 - error TS2322: Type '{ update: { id: BasedCreatePromise; $index: number; }[]; }' is not assignable to type '{ add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify | { id: number | BasedModify<...>; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined'. + Types of property 'update' are incompatible. + Type '{ id: BasedCreatePromise; $index: number; }[]' is not assignable to type '(number | BasedModify | { id: number | BasedModify; })[]'. + Object literal may only specify known properties, and '$index' does not exist in type 'BasedModify | { id: number | BasedModify; }'. + +111 $index: 2, +   ~~~~~~ + + test/references/referencesIndex.ts:11:11 +  11 friends: { +    ~~~~~~~~~~ +  12 items: { +   ~~~~~~~~~~~~~~~~~~~~ + ... +  15 }, +   ~~~~~~~~~~~~~~ +  16 }, +   ~~~~~~~~~~~ + The expected type comes from property 'friends' which is declared here on type '{ name?: string | null | undefined; friends?: { add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify<...> | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined; }' + +test/references/referencesIndex.ts:117:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +117 deepEqual(await db.query2('user', john).include('*', 'friends').get(), { +   ~~~~~~~ + +test/references/referencesIndex.ts:133:11 - error TS2322: Type '{ update: { id: BasedCreatePromise; $index: number; }[]; }' is not assignable to type '{ add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify | { id: number | BasedModify<...>; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined'. + Types of property 'update' are incompatible. + Type '{ id: BasedCreatePromise; $index: number; }[]' is not assignable to type '(number | BasedModify | { id: number | BasedModify; })[]'. + Object literal may only specify known properties, and '$index' does not exist in type 'BasedModify | { id: number | BasedModify; }'. + +133 $index: -1, +   ~~~~~~ + + test/references/referencesIndex.ts:11:11 +  11 friends: { +    ~~~~~~~~~~ +  12 items: { +   ~~~~~~~~~~~~~~~~~~~~ + ... +  15 }, +   ~~~~~~~~~~~~~~ +  16 }, +   ~~~~~~~~~~~ + The expected type comes from property 'friends' which is declared here on type '{ name?: string | null | undefined; friends?: { add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify<...> | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined; }' + +test/references/referencesIndex.ts:139:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +139 deepEqual(await db.query2('user', john).include('*', 'friends').get(), { +   ~~~~~~~ + +test/references/referencesIndex.ts:156:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +156 deepEqual(await db.query2('user', john).include('*', 'friends').get(), { +   ~~~~~~~ + +test/references/referencesIndex.ts:169:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +169 deepEqual(await db.query2('user', john).include('*', 'friends').get(), { +   ~~~~~~~ + +test/references/referencesIndex.ts:213:11 - error TS2322: Type '{ update: { id: BasedCreatePromise; $index: number; }[]; }' is not assignable to type '{ add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify | { id: number | BasedModify<...>; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined'. + Types of property 'update' are incompatible. + Type '{ id: BasedCreatePromise; $index: number; }[]' is not assignable to type '(number | BasedModify | { id: number | BasedModify; })[]'. + Object literal may only specify known properties, and '$index' does not exist in type 'BasedModify | { id: number | BasedModify; }'. + +213 $index: 2, +   ~~~~~~ + + test/references/referencesIndex.ts:182:11 + 182 friends: { +    ~~~~~~~~~~ + 183 items: { +   ~~~~~~~~~~~~~~~~~~~~ + ... + 186 }, +   ~~~~~~~~~~~~~~ + 187 }, +   ~~~~~~~~~~~ + The expected type comes from property 'friends' which is declared here on type '{ name?: string | null | undefined; friends?: { add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify<...> | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined; }' + +test/references/referencesIndex.ts:219:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +219 deepEqual(await db.query2('user', john).include('*', 'friends').get(), { +   ~~~~~~~ + +test/references/referencesModify.ts:49:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +49 await db.query2('user').include('*', 'friends').get(), +   ~~~~~~~ + +test/references/referencesModify.ts:67:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +67 await db.query2('user').include('*', 'friends').get(), +   ~~~~~~~ + +test/references/referencesModify.ts:88:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +88 await db.query2('user').include('*', 'friends').get(), +   ~~~~~~~ + +test/references/referencesModify.ts:186:35 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly a: { props: { readonly name: { type: "string"; }; readonly bees: Omit<...> & { ...; }; }; }; readonly b: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +186 deepEqual((await db.query2('a').include('bees').get())[0].bees[0].id, 2) +   ~~~~~~~ + +test/references/referencesModify.ts:191:35 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly a: { props: { readonly name: { type: "string"; }; readonly bees: Omit<...> & { ...; }; }; }; readonly b: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +191 deepEqual((await db.query2('a').include('bees').get())[0].bees.length, 1) +   ~~~~~~~ + +test/references/referencesModify.ts:192:35 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly a: { props: { readonly name: { type: "string"; }; readonly bees: Omit<...> & { ...; }; }; }; readonly b: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +192 deepEqual((await db.query2('a').include('bees').get())[0].bees[0].id, 2) +   ~~~~~~~ + +test/save/blockHash.ts:50:44 - error TS2339: Property 'checksum' does not exist on type '{ id: number; title: string; body: string; }[]'. + +50 (await client.query2('article').get()).checksum, +   ~~~~~~~~ + +test/save/blockHash.ts:51:42 - error TS2339: Property 'checksum' does not exist on type '{ id: number; title: string; body: string; }[]'. + +51 (await client.query2('story').get()).checksum, +   ~~~~~~~~ + +test/save/save.ts:197:46 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly group: { props: { readonly name: { readonly type: "string"; }; readonly users: Omit<...> & { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; },...'. + +197 const users1 = await client.query2('user').include('group').get() +   ~~~~~~~ + +test/save/save.ts:198:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly group: { props: { readonly name: { readonly type: "string"; }; readonly users: Omit<...> & { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; },...'. + +198 const users2 = await client2.query2('user').include('group').get() +   ~~~~~~~ + +test/save/save.ts:258:37 - error TS2345: Argument of type '{ readonly locales: { readonly en: {}; readonly fi: { readonly fallback: readonly ["en"]; }; }; readonly types: { readonly article: { readonly props: { readonly title: { readonly type: "text"; }; readonly body: { readonly type: "text"; }; }; }; }; }' is not assignable to parameter of type 'StrictSchema'. + Type '{ readonly locales: { readonly en: {}; readonly fi: { readonly fallback: readonly ["en"]; }; }; readonly types: { readonly article: { readonly props: { readonly title: { readonly type: "text"; }; readonly body: { readonly type: "text"; }; }; }; }; }' is not assignable to type '{ version?: string | undefined; types: SchemaTypes; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; locales?: Partial<...> | undefined; } & { ...; } & Omit<...> & { ...; }'. + Type '{ readonly locales: { readonly en: {}; readonly fi: { readonly fallback: readonly ["en"]; }; }; readonly types: { readonly article: { readonly props: { readonly title: { readonly type: "text"; }; readonly body: { readonly type: "text"; }; }; }; }; }' is not assignable to type '{ version?: string | undefined; types: SchemaTypes; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; locales?: Partial<...> | undefined; }'. + The types of 'locales.en' are incompatible between these types. + Property 'fallback' is missing in type '{}' but required in type '{ fallback: ("id" | "none" | "aa" | "ab" | "af" | "ak" | "sq" | "am" | "ar" | "an" | "hy" | "as" | "av" | "ae" | "ay" | "az" | "eu" | "be" | "bn" | "bi" | "bs" | "br" | "bg" | "my" | "ca" | ... 121 more ... | "cnr")[]; }'. + +258 const client = await db.setSchema(schema) +   ~~~~~~ + + src/schema/schema/locales.ts:7:5 + 7 fallback: LangName[] +    ~~~~~~~~ + 'fallback' is declared here. + +test/save/save.ts:290:32 - error TS2344: Type '{ readonly locales: { readonly en: {}; readonly fi: { readonly fallback: readonly ["en"]; }; }; readonly types: { readonly article: { readonly props: { readonly title: { readonly type: "text"; }; readonly body: { readonly type: "text"; }; }; }; }; }' does not satisfy the constraint 'SchemaIn'. + Type '{ readonly locales: { readonly en: {}; readonly fi: { readonly fallback: readonly ["en"]; }; }; readonly types: { readonly article: { readonly props: { readonly title: { readonly type: "text"; }; readonly body: { readonly type: "text"; }; }; }; }; }' is not assignable to type 'Schema'. + Type '{ readonly locales: { readonly en: {}; readonly fi: { readonly fallback: readonly ["en"]; }; }; readonly types: { readonly article: { readonly props: { readonly title: { readonly type: "text"; }; readonly body: { readonly type: "text"; }; }; }; }; }' is not assignable to type '{ version?: string | undefined; types: SchemaTypes; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; locales?: Partial<...> | undefined; }'. + The types of 'locales.en' are incompatible between these types. + Property 'fallback' is missing in type '{}' but required in type '{ fallback: ("id" | "none" | "aa" | "ab" | "af" | "ak" | "sq" | "am" | "ar" | "an" | "hy" | "as" | "av" | "ae" | "ay" | "az" | "eu" | "be" | "bn" | "bi" | "bs" | "br" | "bg" | "my" | "ca" | ... 121 more ... | "cnr")[]; }'. + +290 const client2 = new DbClient({ +   ~~~~~~~~~~~~~ + + src/schema/schema/locales.ts:7:5 + 7 fallback: LangName[] +    ~~~~~~~~ + 'fallback' is declared here. + +test/save/save.ts:366:15 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +366 await db2.query2('person').include('name', 'books').get(), +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/save/save.ts:367:14 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +367 await db.query2('person').include('name', 'books').get(), +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/save/save.ts:644:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly book: { props: { readonly name: { readonly type: "string"; readonly max: 16; }; readonly isbn: { ...; }; readonly owner: { ...; } & { ...; }; }; }; readonly per...'. + +644 .filter('alias', 'includes', 'slim') +   ~~~~~~ + +test/save/save.ts:652:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly book: { props: { readonly name: { readonly type: "string"; readonly max: 16; }; readonly isbn: { ...; }; readonly owner: { ...; } & { ...; }; }; }; readonly per...'. + +652 .filter('alias', 'includes', 'slim') +   ~~~~~~ + +test/save/save.ts:662:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly book: { props: { readonly name: { readonly type: "string"; readonly max: 16; }; readonly isbn: { ...; }; readonly owner: { ...; } & { ...; }; }; }; readonly per...'. + +662 .filter('alias', 'includes', 'slick') +   ~~~~~~ + +test/save/save.ts:670:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly book: { props: { readonly name: { readonly type: "string"; readonly max: 16; }; readonly isbn: { ...; }; readonly owner: { ...; } & { ...; }; }; }; readonly per...'. + +670 .filter('alias', 'includes', 'slick') +   ~~~~~~ + +test/save/save.ts:680:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly book: { props: { readonly name: { readonly type: "string"; readonly max: 16; }; readonly isbn: { ...; }; readonly owner: { ...; } & { ...; }; }; }; readonly per...'. + +680 .filter('alias', 'includes', 'boss') +   ~~~~~~ + +test/save/save.ts:688:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly book: { props: { readonly name: { readonly type: "string"; readonly max: 16; }; readonly isbn: { ...; }; readonly owner: { ...; } & { ...; }; }; }; readonly per...'. + +688 .filter('alias', 'includes', 'boss') +   ~~~~~~ + +test/save/save.ts:696:36 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly book: { props: { readonly name: { readonly type: "string"; readonly max: 16; }; readonly isbn: { ...; }; readonly owner: { ...; } & { ...; }; }; }; readonly per...'. + +696 await client2.query2('person').include('name', 'alias', 'books').get(), +   ~~~~~~~ + +test/save/save.ts:697:35 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly book: { props: { readonly name: { readonly type: "string"; readonly max: 16; }; readonly isbn: { ...; }; readonly owner: { ...; } & { ...; }; }; }; readonly per...'. + +697 await client.query2('person').include('name', 'alias', 'books').get(), +   ~~~~~~~ + +test/save/save.ts:799:26 - error TS2345: Argument of type 'BasedDb' is not assignable to parameter of type 'DbServer'. + Type 'BasedDb' is missing the following properties from type 'DbServer': dbCtxExternal, migrating, saveInProgress, activeReaders, and 17 more. + +799 t.after(() => t.backup(db2)) +   ~~~ + +test/save/saveEdge.ts:43:48 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bestFriend: { ...; } & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +43 deepEqual(await client.query2('user', user2).include('**').get(), { +   ~~~~~~~ + +test/save/saveRange.ts:63:41 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; readonly story: { ...; }; }; }; }; locales: Part...'. + +63 deepEqual(await client.query2('user').include('age').range(0, 1).get(), [ +   ~~~~~~~ + +test/save/saveRange.ts:112:42 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; readonly story: { ...; }; }; }; }; locales: Part...'. + +112 deepEqual(await client2.query2('user').include('age').range(0, 1).get(), [ +   ~~~~~~~ + +test/save/saveRange.ts:121:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; readonly story: { ...; }; }; }; }; locales: Part...'. + +121 .include('age') +   ~~~~~~~ + +test/save/saveRange.ts:132:42 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; readonly story: { ...; }; }; }; }; locales: Part...'. + +132 deepEqual(await client2.query2('user').include('name').range(0, 2).get(), [ +   ~~~~~~~ + +test/save/saveRange.ts:146:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; readonly story: { ...; }; }; }; }; locales: Part...'. + +146 .include('name') +   ~~~~~~~ + +test/save/saveRange.ts:387:9 - error TS2353: Object literal may only specify known properties, and 'id' does not exist in type '{ y?: NumInc | null | undefined; }'. + +387 id: i, +   ~~ + +test/scenarios/e-commerce.ts:1:24 - error TS2307: Cannot find module '../../src/db-client/_modify/error.js' or its corresponding type declarations. + +1 import { errors } from '../../src/db-client/_modify/error.js' +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/scenarios/e-commerce.ts:334:39 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; readonly maxBytes: 50; }; ... 4 more ...; readonly reviews: Omit<...> & { ...; }; }; }; readonly cate...'. + +334 await client.query2('user').sort('lastLogin', 'asc').get(), +   ~~~~ + +test/scenarios/e-commerce.ts:343:14 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +343 .query2('product') +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/e-commerce.ts:356:14 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +356 .query2('user', userId) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/e-commerce.ts:370:14 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +370 .query2('review') +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/e-commerce.ts:392:14 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +392 .query2('product') +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/e-commerce.ts:486:36 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; readonly maxBytes: 50; }; ... 4 more ...; readonly reviews: Omit<...> & { ...; }; }; }; readonly cate...'. + +486 await client.query2('product').range(0, 10_000_000).get() +   ~~~~~ + +test/scenarios/northwind.ts:12:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +12 const r1 = await db.query2('region').include('*').get() +   ~~~~~~~ + +test/scenarios/northwind.ts:33:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +33 const r2 = await db.query2('employees').include('firstName', 'lastName').get() +   ~~~~~~~ + +test/scenarios/northwind.ts:50:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +50 .include('firstName', 'lastName') +   ~~~~~~~ + +test/scenarios/northwind.ts:70:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +70 .include('orderDate', 'shippedDate', 'customer.id', 'freight') +   ~~~~~~~ + +test/scenarios/northwind.ts:101:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +101 .include('title', 'firstName', 'lastName') +   ~~~~~~~ + +test/scenarios/northwind.ts:150:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +150 .include('firstName', 'lastName', 'region') +   ~~~~~~~ + +test/scenarios/northwind.ts:168:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +168 .include('firstName', 'lastName', 'region') +   ~~~~~~~ + +test/scenarios/northwind.ts:197:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +197 .include('firstName', 'lastName', 'title', 'city', 'region') +   ~~~~~~~ + +test/scenarios/northwind.ts:238:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +238 .include('companyName', 'contactTitle', 'city', 'country') +   ~~~~~~~ + +test/scenarios/northwind.ts:307:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +307 .filter('product', '=', 3) +   ~~~~~~ + +test/scenarios/northwind.ts:317:44 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +317 const r13 = await db.query2('employees').groupBy('city').count().get() +   ~~~~~~~ + +test/scenarios/northwind.ts:337:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +337 .include('orderDate', 'customer.companyName') +   ~~~~~~~ + +test/scenarios/northwind.ts:377:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +377 .include( +   ~~~~~~~ + +test/scenarios/northwind.ts:429:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +429 .filter('country', '=', 'Mexico') +   ~~~~~~ + +test/scenarios/northwind.ts:511:6 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +511 .sort('unitPrice', 'desc') +   ~~~~ + +test/scenarios/northwind.ts:564:6 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +564 .sort('unitPrice', 'desc') +   ~~~~ + +test/scenarios/northwind.ts:607:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +607 .filter('country', '=', ['Germany', 'France', 'UK']) +   ~~~~~~ + +test/scenarios/northwind.ts:662:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +662 .filter('unitPrice', '..', [10, 20]) +   ~~~~~~ + +test/scenarios/northwind.ts:923:45 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +923 const r24 = (await db.query2('customers').include('companyName').get()).map( +   ~~~~~~~ + +test/scenarios/northwind.ts:1032:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1032 .include('contactName', 'city', 'country') +   ~~~~~~~ + +test/scenarios/northwind.ts:1038:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1038 .include('contactName', 'city', 'country') +   ~~~~~~~ + +test/scenarios/northwind.ts:1090:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1090 .include('city', 'country') +   ~~~~~~~ + +test/scenarios/northwind.ts:1096:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1096 .include('city', 'country') +   ~~~~~~~ + +test/scenarios/northwind.ts:1135:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1135 .include('*') +   ~~~~~~~ + +test/scenarios/northwind.ts:1166:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1166 .include('*') +   ~~~~~~~ + +test/scenarios/northwind.ts:1193:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1193 .include('id') +   ~~~~~~~ + +test/scenarios/northwind.ts:1202:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1202 .include('*') +   ~~~~~~~ + +test/scenarios/northwind.ts:1218:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1218 .include('customer.companyName', 'orderDate') +   ~~~~~~~ + +test/scenarios/northwind.ts:1288:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1288 .include('companyName', (q) => q('orders').include('id')) +   ~~~~~~~ + +test/scenarios/northwind.ts:1376:5 - error TS2531: Object is possibly 'null'. + +1376 (await db.query2('customers', { customerId: 'WELLI' }).get()).id!, +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/scenarios/northwind.ts:1383:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1383 .filter('customer', '=', wandk) +   ~~~~~~ + +test/scenarios/northwind.ts:1395:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1395 const orders = await db.query2('orders').include('customer.id').get() +   ~~~~~~~ + +test/scenarios/northwind.ts:1407:33 - error TS2339: Property 'companyName' does not exist on type '{ id: any; }'. + +1407 companyName: customer.companyName, +   ~~~~~~~~~~~ + +test/scenarios/northwind.ts:1413:31 - error TS2339: Property 'companyName' does not exist on type '{ id: any; }'. + +1413 companyName: customer.companyName, +   ~~~~~~~~~~~ + +test/scenarios/northwind.ts:1457:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1457 .include('customerId', 'companyName', 'city') +   ~~~~~~~ + +test/scenarios/northwind.ts:1487:41 - error TS2339: Property 'min' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1487 deepEqual(await db.query2('products').min('unitPrice').get(), { +   ~~~ + +test/scenarios/northwind.ts:1496:33 - error TS2339: Property 'min' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1496 await db.query2('products').min('unitPrice').groupBy('category').get(), +   ~~~ + +test/scenarios/northwind.ts:1512:41 - error TS2339: Property 'max' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1512 deepEqual(await db.query2('products').max('unitPrice').get(), { +   ~~~ + +test/scenarios/northwind.ts:1519:41 - error TS2339: Property 'count' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1519 deepEqual(await db.query2('products').count().get(), { count: 77 }) +   ~~~~~ + +test/scenarios/northwind.ts:1525:41 - error TS2339: Property 'count' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1525 deepEqual(await db.query2('products').count().groupBy('category').get(), { +   ~~~~~ + +test/scenarios/northwind.ts:1539:45 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1539 deepEqual(await db.query2('orderDetails').sum('quantity').get(), { +   ~~~ + +test/scenarios/northwind.ts:1550:8 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1550 .sum('quantity') +   ~~~ + +test/scenarios/northwind.ts:1563:8 - error TS2339: Property 'sum' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1563 .sum('quantity') +   ~~~ + +test/scenarios/northwind.ts:1584:41 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1584 deepEqual(await db.query2('products').avg('unitPrice').get(), { +   ~~~ + +test/scenarios/northwind.ts:1595:8 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1595 .avg('unitPrice') +   ~~~ + +test/scenarios/northwind.ts:1606:33 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1606 await db.query2('products').avg('unitPrice').groupBy('category').get(), +   ~~~ + +test/scenarios/northwind.ts:1638:45 - error TS2339: Property 'avg' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1638 deepEqual(await db.query2('orderDetails').avg('discountAmount').get(), { +   ~~~ + +test/scenarios/nycTaxi.ts:429:7 - error TS2322: Type 'string | number' is not assignable to type 'string | null | undefined'. + Type 'number' is not assignable to type 'string'. + +429 borough: taxiZoneLookup[i + 1], +   ~~~~~~~ + +test/scenarios/nycTaxi.ts:430:7 - error TS2322: Type 'string | number' is not assignable to type 'string | null | undefined'. + Type 'number' is not assignable to type 'string'. + +430 zone: taxiZoneLookup[i + 2], +   ~~~~ + +test/scenarios/nycTaxi.ts:431:7 - error TS2322: Type 'string | number' is not assignable to type 'string | null | undefined'. + Type 'number' is not assignable to type 'string'. + +431 serviceZone: taxiZoneLookup[i + 3], +   ~~~~~~~~~~~ + +test/scenarios/nycTaxi.ts:438:7 - error TS2322: Type 'string | number' is not assignable to type 'string | null | undefined'. + Type 'number' is not assignable to type 'string'. + +438 name: rates[i + 1], +   ~~~~ + +test/scenarios/nycTaxi.ts:471:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +471 .include('id') +   ~~~~~~~ + +test/scenarios/nycTaxi.ts:476:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +476 .include('id') +   ~~~~~~~ + +test/scenarios/nycTaxi.ts:481:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +481 .include('id') +   ~~~~~~~ + +test/scenarios/nycTaxi.ts:486:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +486 .include('id') +   ~~~~~~~ + +test/scenarios/nycTaxi.ts:541:27 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +541 await db.query2('zone').include('borough').get().inspect() +   ~~~~~~~ + +test/scenarios/nycTaxi.ts:551:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +551 .include( +   ~~~~~~~ + +test/scenarios/nycTaxi.ts:560:27 - error TS2339: Property 'count' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +560 await db.query2('trip').count().groupBy('dropoffLoc').get().inspect() +   ~~~~~ + +test/scenarios/nycTaxi.ts:561:27 - error TS2339: Property 'count' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +561 await db.query2('trip').count().groupBy('paymentType').get().inspect() +   ~~~~~ + +test/scenarios/nycTaxi.ts:563:27 - error TS2339: Property 'count' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +563 await db.query2('trip').count().get().inspect() +   ~~~~~ + +test/scenarios/nycTaxi.ts:596:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +596 .filter('pickupYear', '>=', 2022) +   ~~~~~~ + +test/scenarios/nycTaxi.ts:607:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +607 .include('name', (select) => { +   ~~~~~~~ + +test/scenarios/nycTaxi.ts:621:6 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +621 .groupBy('pickup', { step: 'day', timeZone: 'America/New_York' }) +   ~~~~~~~ + +test/scenarios/nycTaxi.ts:633:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +633 .filter('pickupHour', '>=', 7) +   ~~~~~~ + +test/scenarios/nycTaxi.ts:642:6 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +642 .groupBy('pickup', { step: 'dow', timeZone: 'America/New_York' }) +   ~~~~~~~ + +test/scenarios/nycTaxi.ts:659:6 - error TS2339: Property 'groupBy' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +659 .groupBy('pickupDropoffLocs') +   ~~~~~~~ + +test/scenarios/nycTaxi.ts:670:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly zone: { props: { readonly locationId: { ...; }; ... 4 more ...; readonly dropoffs: { ...; }; }; }; readonly rate: { ...; }; readonly vendor: { ...; }; readonly ...'. + +670 .filter('pickupHour', '>=', 7) +   ~~~~~~ + +test/scenarios/vote.ts:155:10 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +155 .query2('round', final) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/vote.ts:174:10 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +174 .query2('round', final) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/vote.ts:303:10 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +303 .query2('payment', realIds) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/vote.ts:392:15 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +392 (await db.query2('vote').range(0, 1e6).get()).length, +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/voteEdges.ts:132:12 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +132 await db.query2('vote').get().inspect(1) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/voteEdges.ts:133:12 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +133 await db.query2('round', final).include('*', '**').get().inspect(1) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/voteLargeAmounts.perf.ts:138:26 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +138 const cnt = await db.query2('vote').count().get() +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/voteLargeAmounts.perf.ts:163:8 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +163 .query2('vote') +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/scenarios/voteLargeAmounts.perf.ts:185:7 - error TS2345: Argument of type 'BasedDb' is not assignable to parameter of type 'DbServer'. + Type 'BasedDb' is missing the following properties from type 'DbServer': dbCtxExternal, migrating, saveInProgress, activeReaders, and 17 more. + +185 db, +   ~~ + +test/scenarios/voteLargeAmounts.perf.ts:199:16 - error TS2339: Property 'flushTime' does not exist on type 'DbClient'. + +199 client.flushTime = 10 +   ~~~~~~~~~ + +test/scenarios/voteStorage.ts:131:46 - error TS2345: Argument of type '{ fingerprint: string; status: "WebhookSuccess"; round: number; }' is not assignable to parameter of type '{ [x: string]: { add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined; ... 11 more ...; createdAt?: s...'. + Property 'fingerprint' is incompatible with index signature. + Type 'string' is not assignable to type '{ add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify | { id: number | BasedModify<...>; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined'. + +131 const payment = client.create('payment', { +   ~ +132 fingerprint: `blablabla-${i}`, +  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +... +134 round: final, +  ~~~~~~~~~~~~~~~~~~~ +135 }) +  ~~~~~ + +test/scenarios/voteStorage.ts:136:40 - error TS2345: Argument of type '{ fingerprint: string; payment: BasedCreatePromise; round: number; }' is not assignable to parameter of type '{ [x: string]: { add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined; ... 4 more ...; round?: number...'. + Property 'countries' is missing in type '{ fingerprint: string; payment: BasedCreatePromise; round: number; }' but required in type '{ [x: string]: { add?: (number | BasedModify | { id: number | BasedModify; })[] | undefined; update?: (number | BasedModify | { ...; })[] | undefined; delete?: (number | BasedModify<...>)[] | undefined; } | (number | ... 1 more ... | { ...; })[] | null | undefined; ... 4 more ...; round?: number...'. + +136 const vote = client.create('vote', { +   ~ +137 fingerprint: `blablabla-vote-${i}`, +  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +... +139 round: final, +  ~~~~~~~~~~~~~~~~~~~ +140 }) +  ~~~~~ + + test/scenarios/voteStorage.ts:125:9 + 125 countries: voteCountrySchema, +    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + 'countries' is declared here. + +test/scenarios/voteStorage.ts:186:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly round: { props: { readonly votes: Omit<{ ...; }, "items"> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +186 (await client.query2('round', final).include('votes').get()).votes.length, +   ~~~~~~~ + +test/scenarios/voteStorage.ts:195:47 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly round: { props: { readonly votes: Omit<{ ...; }, "items"> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +195 await client.query2('vote', randomId).include('round').get(), +   ~~~~~~~ + +test/scenarios/voteStorage.ts:201:47 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly round: { props: { readonly votes: Omit<{ ...; }, "items"> & { ...; }; }; }; readonly vote: { ...; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +201 const votes = await client.query2('vote').range(0, 1e6).include('id').get() +   ~~~~~ + +test/schema/parse/edges.ts:36:17 - error TS2322: Type '{ enum: string[]; }' is not assignable to type 'SchemaString | SchemaBinary | SchemaBoolean | SchemaCardinality | SchemaJson | SchemaNumber | ... 6 more ... | SchemaReference<...>'. + Type '{ enum: string[]; }' is not assignable to type 'SchemaEnum'. + Property 'type' is missing in type '{ enum: string[]; }' but required in type '{ type: "enum"; }'. + +36 $option: { enum: ['cool', 'dazzling'] }, +   ~~~~~~~ + + dist/schema/schema/enum.d.ts:5:5 + 5 type: 'enum'; +    ~~~~ + 'type' is declared here. + +test/schema/parse/edges.ts:40:15 - error TS2741: Property 'type' is missing in type '{ ref: string; prop: string; $nerds: { type: "boolean"; }; }' but required in type 'Omit, keyof Base>'. + +40 items: { +   ~~~~~ + + dist/schema/schema/reference.d.ts:18:5 + 18 type: 'reference'; +    ~~~~ + 'type' is declared here. + +test/schema/parse/edges.ts:54:15 - error TS2322: Type '{ enum: string[]; }' is not assignable to type 'SchemaString | SchemaBinary | SchemaBoolean | SchemaCardinality | SchemaJson | SchemaNumber | ... 6 more ... | SchemaReference<...>'. + Type '{ enum: string[]; }' is not assignable to type 'SchemaEnum'. + Property 'type' is missing in type '{ enum: string[]; }' but required in type '{ type: "enum"; }'. + +54 $option: { enum: ['cool', 'dazzling'] }, +   ~~~~~~~ + + dist/schema/schema/enum.d.ts:5:5 + 5 type: 'enum'; +    ~~~~ + 'type' is declared here. + +test/schema/parse/edges.ts:57:15 - error TS2741: Property 'type' is missing in type '{ ref: string; prop: string; $nerds: { type: "boolean"; }; }' but required in type 'Omit, keyof Base>'. + +57 items: { +   ~~~~~ + + dist/schema/schema/reference.d.ts:18:5 + 18 type: 'reference'; +    ~~~~ + 'type' is declared here. + +test/schema/parse/infer.ts:1:36 - error TS2307: Cannot find module '../../src/schema.js' or its corresponding type declarations. + +1 import type { Infer, Schema } from '../../src/schema.js' +   ~~~~~~~~~~~~~~~~~~~~~ + +test/schema/parse/references.ts:211:17 - error TS2578: Unused '@ts-expect-error' directive. + +211 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/schema/parse/schema.ts:12:7 - error TS2578: Unused '@ts-expect-error' directive. + +12 // @ts-expect-error +   ~~~~~~~~~~~~~~~~~~~ + +test/schema/props/write.ts:20:8 - error TS2307: Cannot find module '../../../src/schema/defs/props/separate.js' or its corresponding type declarations. + +20 } from '../../../src/schema/defs/props/separate.js' +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/schema/serialize/serialize.ts:46:13 - error TS2353: Object literal may only specify known properties, and 'required' does not exist in type '{ fallback: ("id" | "none" | "aa" | "ab" | "af" | "ak" | "sq" | "am" | "ar" | "an" | "hy" | "as" | "av" | "ae" | "ay" | "az" | "eu" | "be" | "bn" | "bi" | "bs" | "br" | "bg" | "my" | "ca" | ... 121 more ... | "cnr")[]; }'. + +46 en: { required: true }, +   ~~~~~~~~ + +test/schema/serialize/serialize.ts:47:7 - error TS2741: Property 'fallback' is missing in type '{}' but required in type '{ fallback: ("id" | "none" | "aa" | "ab" | "af" | "ak" | "sq" | "am" | "ar" | "an" | "hy" | "as" | "av" | "ae" | "ay" | "az" | "eu" | "be" | "bn" | "bi" | "bs" | "br" | "bg" | "my" | "ca" | ... 121 more ... | "cnr")[]; }'. + +47 nl: {}, +   ~~ + + dist/schema/schema/locales.d.ts:5:5 + 5 fallback: LangName[]; +    ~~~~~~~~ + 'fallback' is declared here. + +test/schema/serialize/serialize.ts:153:13 - error TS2353: Object literal may only specify known properties, and 'required' does not exist in type '{ fallback: ("id" | "none" | "aa" | "ab" | "af" | "ak" | "sq" | "am" | "ar" | "an" | "hy" | "as" | "av" | "ae" | "ay" | "az" | "eu" | "be" | "bn" | "bi" | "bs" | "br" | "bg" | "my" | "ca" | ... 121 more ... | "cnr")[]; }'. + +153 en: { required: true }, +   ~~~~~~~~ + +test/schema/serialize/serialize.ts:154:7 - error TS2741: Property 'fallback' is missing in type '{}' but required in type '{ fallback: ("id" | "none" | "aa" | "ab" | "af" | "ak" | "sq" | "am" | "ar" | "an" | "hy" | "as" | "av" | "ae" | "ay" | "az" | "eu" | "be" | "bn" | "bi" | "bs" | "br" | "bg" | "my" | "ca" | ... 121 more ... | "cnr")[]; }'. + +154 nl: {}, +   ~~ + + dist/schema/schema/locales.d.ts:5:5 + 5 fallback: LangName[]; +    ~~~~~~~~ + 'fallback' is declared here. + +test/search.ts:29:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly body: { readonly type: "string"; readonly compression: "none"; }; readonly nr: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 m...'. + +29 .filter('body', 'like', 'article') +   ~~~~~~ + +test/search.ts:41:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly body: { readonly type: "string"; readonly compression: "none"; }; readonly nr: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 m...'. + +41 .filter('body', 'like', 'snurfelpants') +   ~~~~~~ + +test/search.ts:53:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly body: { readonly type: "string"; readonly compression: "none"; }; readonly nr: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 m...'. + +53 .filter('body', 'like', ['snurfelpants', 'article']) +   ~~~~~~ + +test/search.ts:65:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly body: { readonly type: "string"; readonly compression: "none"; }; readonly nr: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 m...'. + +65 .filter('body', 'like', 'kxngdom') +   ~~~~~~ + +test/search.ts:78:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly body: { readonly type: "string"; readonly compression: "none"; }; readonly nr: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 m...'. + +78 .filter('body', 'like', 'derperp') +   ~~~~~~ + +test/search.ts:91:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly body: { readonly type: "string"; readonly compression: "none"; }; readonly nr: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 m...'. + +91 .filter('body', 'like', 'kxngdom', { score: 0 }) +   ~~~~~~ + +test/search.ts:128:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +128 .search('Netherlands', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:140:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +140 .search('giraffe', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:152:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +152 .search('kingdom', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:165:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +165 .search('Netherlands', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:178:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +178 .search('giraffe', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:191:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +191 .search('derp', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:204:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +204 .search('giraffe first', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:216:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +216 .search('first', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:229:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +229 .search('second', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:242:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +242 .search('giraffe first', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:255:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +255 .search('italy netherlands', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:268:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +268 .search('italy netherlands', 'body', 'title') +   ~~~~~~ + +test/search.ts:281:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +281 .search('italy netherlands', 'body', 'title') +   ~~~~~~ + +test/search.ts:314:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +314 .search('giraffe first', 'body') +   ~~~~~~ + +test/search.ts:327:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +327 .search('derp derp', 'body', 'title') +   ~~~~~~ + +test/search.ts:374:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +374 .search('first', 'body') +   ~~~~~~ + +test/search.ts:386:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +386 .search('first', 'body') +   ~~~~~~ + +test/search.ts:420:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly body: { readonly type: "string"; readonly compression: "none"; }; readonly nr: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 m...'. + +420 .filter('body', 'like', 'mihailovitsin') +   ~~~~~~ + +test/search.ts:431:10 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly body: { readonly type: "string"; readonly compression: "none"; }; readonly nr: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 m...'. + +431 .filter('body', 'like', 'mihailovitšin') +   ~~~~~~ + +test/search.ts:461:8 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly date: { readonly type: "uint32"; }; readonly title: { ...; }; readonly body: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 mor...'. + +461 .search('giraffe first', { body: 0, title: 1 }) +   ~~~~~~ + +test/search.ts:486:40 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly name: { readonly type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +486 deepEqual(await db.query2('article').search('Kavel').get(), [ +   ~~~~~~ + +test/serializeQueryDef.ts:33:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +33 deepEqual(await db.query2('user').filter('isNice', '=', true).get(), [ +   ~~~~~~ + +test/serializeQueryDef.ts:37:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +37 deepEqual(await db.query2('user').filter('isNice').get(), [ +   ~~~~~~ + +test/serializeQueryDef.ts:41:37 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly isNice: { type: "boolean"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +41 deepEqual(await db.query2('user').filter('isNice', false).get(), [ +   ~~~~~~ + +test/shared/test.ts:89:45 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +89 let x = await client.query2(type).include('*', '**').get() +   ~~~~~~~ + +test/shared/test.ts:92:50 - error TS2339: Property 'count' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +92 counts.push((await client.query2(type).count().get()).count) +   ~~~~~ + +test/simpleQuery.ts:36:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly age: { readonly type: "uint32"; }; readonly name: { ...; }; readonly countryCode: { ...; }; readonly location: Omit<...> & { ...; }; }...'. + +36 deepEqual(await db.query2('user').include('id').get(), [{ id: 1 }], 'Id only') +   ~~~~~~~ + +test/simpleQuery.ts:39:29 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly age: { readonly type: "uint32"; }; readonly name: { ...; }; readonly countryCode: { ...; }; readonly location: Omit<...> & { ...; }; }...'. + +39 await db.query2('user').filter('age', '<', 20).include('id', 'age').get(), +   ~~~~~~ + +test/simpleQuery.ts:44:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly age: { readonly type: "uint32"; }; readonly name: { ...; }; readonly countryCode: { ...; }; readonly location: Omit<...> & { ...; }; }...'. + +44 await db.query2('user').include('*').get(), +   ~~~~~~~ + +test/singleRef.ts:100:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bla: { readonly type: "uint32"; }; readonly simple: { ...; } & { ...; }; readonly name: { ...; }; }; }; readonly simple: { ...; }; }; ...'. + +100 deepEqual(await client.query2('simple').include('user.name').get(), [ +   ~~~~~~~ + +test/singleRef.ts:114:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly bla: { readonly type: "uint32"; }; readonly simple: { ...; } & { ...; }; readonly name: { ...; }; }; }; readonly simple: { ...; }; }; ...'. + +114 deepEqual(await client.query2('simple').include('user.name').get(), [ +   ~~~~~~~ + +test/singleRef.ts:197:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; readonly simple: { ...; } & { ...; }; }; }; readonly blup: { ...;...'. + +197 deepEqual(await db.query2('blup').include('flap').get(), [ +   ~~~~~~~ + +test/singleRef.ts:204:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; readonly simple: { ...; } & { ...; }; }; }; readonly blup: { ...;...'. + +204 const result1 = await db.query2('user').include('myBlup.flap').get() +   ~~~~~~~ + +test/singleRef.ts:210:44 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; readonly simple: { ...; } & { ...; }; }; }; readonly blup: { ...;...'. + +210 const result = await db.query2('simple').include('user.myBlup.flap').get() +   ~~~~~~~ + +test/singleRef.ts:216:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; readonly simple: { ...; } & { ...; }; }; }; readonly blup: { ...;...'. + +216 deepEqual(await db.query2('user').include('simple').get(), [ +   ~~~~~~~ + +test/singleRef.ts:229:39 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; readonly simple: { ...; } & { ...; }; }; }; readonly blup: { ...;...'. + +229 deepEqual(await db.query2('simple').include('user').get(), [ +   ~~~~~~~ + +test/singleRef.ts:236:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; readonly simple: { ...; } & { ...; }; }; }; readonly blup: { ...;...'. + +236 deepEqual(await db.query2('user').include('simple').get(), [ +   ~~~~~~~ + +test/singleRef.ts:304:39 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; readonly simple: { ...; } & { ...; }; readonly admin: { ...; } & ...'. + +304 deepEqual(await db.query2('simple').include('admin.user').get(), [ +   ~~~~~~~ + +test/singleRef.ts:406:39 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +406 deepEqual(await db.query2('simple').include('id').range(0, 1).get(), [ +   ~~~~~~~ + +test/singleRef.ts:411:31 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +411 await db.query2('simple').include('user').range(0, 1).get(), +   ~~~~~~~ + +test/singleRef.ts:422:31 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "simple", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number[] | undefined'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "simple", id: number | (Partial; }, "simple">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number | (Partial; }, "simple">> & { ...; })'. + Type 'undefined' is not assignable to type 'number | (Partial; }, "simple">> & { ...; })'. + +422 await db.query2('simple', lastRes).include('user.location').get(), +   ~~~~~~~ + + +test/singleRef.ts:422:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +422 await db.query2('simple', lastRes).include('user.location').get(), +   ~~~~~~~ + +test/singleRef.ts:434:31 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "simple", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number[] | undefined'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "simple", id: number | (Partial; }, "simple">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number | (Partial; }, "simple">> & { ...; })'. + Type 'undefined' is not assignable to type 'number | (Partial; }, "simple">> & { ...; })'. + +434 await db.query2('simple', lastRes).include('user').get(), +   ~~~~~~~ + + +test/singleRef.ts:434:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +434 await db.query2('simple', lastRes).include('user').get(), +   ~~~~~~~ + +test/singleRef.ts:454:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +454 .include('user.myBlup') +   ~~~~~~~ + +test/singleRef.ts:467:31 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "simple", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number[] | undefined'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "simple", id: number | (Partial; }, "simple">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number | (Partial; }, "simple">> & { ...; })'. + Type 'undefined' is not assignable to type 'number | (Partial; }, "simple">> & { ...; })'. + +467 await db.query2('simple', lastRes).include('user.myBlup').get(), +   ~~~~~~~ + + +test/singleRef.ts:467:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +467 await db.query2('simple', lastRes).include('user.myBlup').get(), +   ~~~~~~~ + +test/singleRef.ts:476:31 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "simple", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number[] | undefined'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "simple", id: number | (Partial; }, "simple">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number | (Partial; }, "simple">> & { ...; })'. + Type 'undefined' is not assignable to type 'number | (Partial; }, "simple">> & { ...; })'. + +476 await db.query2('simple', lastRes).include('user.myBlup', 'lilBlup').get(), +   ~~~~~~~ + + +test/singleRef.ts:476:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +476 await db.query2('simple', lastRes).include('user.myBlup', 'lilBlup').get(), +   ~~~~~~~ + +test/singleRef.ts:486:32 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "simple", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number[] | undefined'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "simple", id: number | (Partial; }, "simple">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number | (Partial; }, "simple">> & { ...; })'. + Type 'undefined' is not assignable to type 'number | (Partial; }, "simple">> & { ...; })'. + +486 (await db.query2('simple', lastRes).include('user.myBlup').get()).node() +   ~~~~~~~ + + +test/singleRef.ts:486:41 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +486 (await db.query2('simple', lastRes).include('user.myBlup').get()).node() +   ~~~~~~~ + +test/singleRef.ts:495:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +495 .include('user.myBlup', 'lilBlup', 'user.name') +   ~~~~~~~ + +test/singleRef.ts:513:31 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "simple", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number[] | undefined'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "simple", id: number | (Partial; }, "simple">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number | (Partial; }, "simple">> & { ...; })'. + Type 'undefined' is not assignable to type 'number | (Partial; }, "simple">> & { ...; })'. + +513 await db.query2('simple', lastRes).include('user.location.label').get(), +   ~~~~~~~ + + +test/singleRef.ts:513:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +513 await db.query2('simple', lastRes).include('user.location.label').get(), +   ~~~~~~~ + +test/singleRef.ts:517:39 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "simple", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number[] | undefined'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "simple", id: number | (Partial; }, "simple">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number | (Partial; }, "simple">> & { ...; })'. + Type 'undefined' is not assignable to type 'number | (Partial; }, "simple">> & { ...; })'. + +517 deepEqual(await db.query2('simple', lastRes).include('user.location').get(), { +   ~~~~~~~ + + +test/singleRef.ts:517:48 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +517 deepEqual(await db.query2('simple', lastRes).include('user.location').get(), { +   ~~~~~~~ + +test/singleRef.ts:525:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +525 .include('user.myBlup', 'lilBlup') +   ~~~~~~~ + +test/singleRef.ts:551:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +551 .include('user', 'user.myBlup') +   ~~~~~~~ + +test/singleRef.ts:575:25 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "simple", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number[] | undefined'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "simple", id: number | (Partial; }, "simple">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise | undefined' is not assignable to parameter of type 'number | (Partial; }, "simple">> & { ...; })'. + Type 'undefined' is not assignable to type 'number | (Partial; }, "simple">> & { ...; })'. + +575 .query2('simple', lastRes) +   ~~~~~~~ + + +test/singleRef.ts:576:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +576 .include('user', 'user.myBlup', 'lilBlup') +   ~~~~~~~ + +test/singleRef.ts:658:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly myBlup: { ...; } & { ...; }; readonly simple: { ...; } & { ...; }; }; }; readonly blup: {...'. + +658 .include('user', 'user.myBlup', 'lilBlup') +   ~~~~~~~ + +test/singleRef.ts:673:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly myBlup: { ...; } & { ...; }; readonly simple: { ...; } & { ...; }; }; }; readonly blup: {...'. + +673 .filter('age', '=', 5) +   ~~~~~~ + +test/singleRefQuery.ts:108:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; readonly simple: { ...; } & { ...; }; readonly name: { ...; }; };...'. + +108 .filter('user.myBlup.age', '=', 10) +   ~~~~~~ + +test/singleRefQuery.ts:123:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; readonly simple: { ...; } & { ...; }; readonly name: { ...; }; };...'. + +123 .filter('lilBlup.age', '=', 20) +   ~~~~~~ + +test/sort/sort.perf.ts:27:9 - error TS2365: Operator '<' cannot be applied to types 'void' and 'number'. + +27 equal(dbTime < 1000, true, 'db modify should not take longer then 1s') +   ~~~~~~~~~~~~~ + +test/sort/sort.perf.ts:39:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +39 .include('age', 'name', 'email') +   ~~~~~~~ + +test/sort/sort.ts:56:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +56 await db.query2('user').sort('age', 'desc').include('email', 'age').get(), +   ~~~~ + +test/sort/sort.ts:68:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +68 await db.query2('user').sort('age', 'asc').include('email', 'age').get(), +   ~~~~ + +test/sort/sort.ts:82:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +82 await db.query2('user').sort('email', 'asc').include('email', 'age').get(), +   ~~~~ + +test/sort/sort.ts:94:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +94 await db.query2('user').sort('email', 'desc').include('email', 'age').get(), +   ~~~~ + +test/sort/sort.ts:114:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +114 await db.query2('user').sort('email').include('email', 'age').get(), +   ~~~~ + +test/sort/sort.ts:127:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +127 await db.query2('user').sort('age').include('email', 'age').get(), +   ~~~~ + +test/sort/sort.ts:146:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +146 await db.query2('user').sort('email').include('email', 'age').get(), +   ~~~~ + +test/sort/sort.ts:165:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +165 await db.query2('user').sort('age').include('email', 'age').get(), +   ~~~~ + +test/sort/sort.ts:184:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +184 await db.query2('user').sort('age').include('email', 'age').get(), +   ~~~~ + +test/sort/sort.ts:196:37 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +196 deepEqual(await db.query2('user').sort('age').include('email', 'age').get(), [ +   ~~~~ + +test/sort/sort.ts:218:34 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +218 await db.query2('user', ids).include('name', 'age').sort('age').get(), +   ~~~~~~~ + +test/sort/sort.ts:237:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +237 .include('name', 'age') +   ~~~~~~~ + +test/sort/sort.ts:263:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +263 .include('name', 'age') +   ~~~~~~~ + +test/sort/sort.ts:293:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +293 .include('name', 'age') +   ~~~~~~~ + +test/sort/sort.ts:322:36 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +322 (await db.query2('user', ids2).include('name', 'age', 'email').get()) +   ~~~~~~~ + +test/sort/sort.ts:332:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +332 .include('name', 'age', 'email') +   ~~~~~~~ + +test/sort/sort.ts:344:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +344 .include('name', 'age', 'email') +   ~~~~~~~ + +test/sort/sort.ts:360:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +360 .include('name', 'age', 'email') +   ~~~~~~~ + +test/sort/sort.ts:378:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +378 .include('name', 'age', 'email') +   ~~~~~~~ + +test/sort/sort.ts:394:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; readonly email: { ...; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +394 .include('name', 'age', 'email') +   ~~~~~~~ + +test/sort/sort.ts:444:33 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly gender: { readonly type: "uint32"; }; readonly age: { ...; }; readonly name: { ...; }; readonly email: { ...; }; }; }; }; locales: Par...'. + +444 await client.query2('user').include('name').sort('age').range(0, 2).get(), +   ~~~~~~~ + +test/sort/sort.ts:452:33 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly gender: { readonly type: "uint32"; }; readonly age: { ...; }; readonly name: { ...; }; readonly email: { ...; }; }; }; }; locales: Par...'. + +452 await client.query2('user').include('name').sort('age').range(0, 2).get(), +   ~~~~~~~ + +test/sort/sort.ts:460:33 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly gender: { readonly type: "uint32"; }; readonly age: { ...; }; readonly name: { ...; }; readonly email: { ...; }; }; }; }; locales: Par...'. + +460 await client.query2('user').include('name').sort('name').range(0, 2).get(), +   ~~~~~~~ + +test/sort/sort.ts:485:34 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly gender: { readonly type: "uint32"; }; readonly age: { ...; }; readonly name: { ...; }; readonly email: { ...; }; }; }; }; locales: Par...'. + +485 await client2.query2('user').include('name').sort('name').range(0, 2).get(), +   ~~~~~~~ + +test/sort/sort.ts:512:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +512 await db.query2('dialog').sort('fun', 'desc').get() +   ~~~~ + +test/sort/sort.ts:531:31 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +531 await db.query2('dialog').sort('fun', 'desc').get(), +   ~~~~ + +test/sort/sort.ts:557:39 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +557 deepEqual(await db.query2('dialog').sort('fun', 'desc').get(), [ +   ~~~~ + +test/sort/sort.ts:569:39 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +569 deepEqual(await db.query2('dialog').sort('fun', 'desc').get(), [ +   ~~~~ + +test/sort/sort.ts:595:39 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "string"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +595 deepEqual(await db.query2('dialog').sort('fun', 'desc').get(), [ +   ~~~~ + +test/sort/sortAlias.ts:28:14 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +28 await db.query2('article').sort('email', 'desc').get(), +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/sort/sortAlias.ts:44:14 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +44 await db.query2('article').sort('email', 'desc').get(), +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/sort/sortAlias.ts:57:14 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +57 await db.query2('article').sort('email', 'desc').get(), +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/sort/sortAlias.ts:73:14 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +73 await db.query2('article').sort('email', 'desc').get(), +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/sort/sortBinary.ts:38:39 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly binary: { props: { readonly data: { readonly type: "binary"; }; readonly name: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +38 deepEqual(await db.query2('binary').include('name', 'data').get(), [ +   ~~~~~~~ + +test/sort/sortBinary.ts:45:31 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly binary: { props: { readonly data: { readonly type: "binary"; }; readonly name: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +45 await db.query2('binary').sort('data').include('name', 'data').get(), +   ~~~~ + +test/sort/sortBinary.ts:57:8 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly binary: { props: { readonly data: { readonly type: "binary"; }; readonly name: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +57 .sort('data', 'desc') +   ~~~~ + +test/sort/sortById.ts:33:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly derp: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 m...'. + +33 await db.query2('user').include('name').sort('id', 'asc').get(), +   ~~~~~~~ + +test/sort/sortById.ts:39:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly derp: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 m...'. + +39 await db.query2('user').include('name').sort('id', 'desc').get(), +   ~~~~~~~ + +test/sort/sortById.ts:51:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly derp: { ...; }; readonly friends: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 m...'. + +51 await db.query2('user').include('name', 'friends.name').range(0, 1).get(), +   ~~~~~~~ + +test/sort/sortEnum.ts:37:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly status: { type: "enum"; enum: string[]; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +37 .filter('status', '=', ['a', 'b', 'c']) +   ~~~~~~ + +test/sort/sortEnum.ts:43:23 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly status: { type: "enum"; enum: string[]; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +43 db.query2('user').filter('status', '=', ['d']).range(0, 600).get(), +   ~~~~~~ + +test/sort/sortHll.ts:55:8 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly count: { ...; }; readonly brazilians: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ....'. + +55 .sort('brazilians', 'desc') +   ~~~~ + +test/sort/sortHll.ts:74:32 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly count: { ...; }; readonly brazilians: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ....'. + +74 await db.query2('article').sort('count', 'asc').include('derp').get(), +   ~~~~ + +test/sort/sortHll.ts:97:8 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly count: { ...; }; readonly brazilians: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ....'. + +97 .sort('count', 'asc') +   ~~~~ + +test/sort/sortHll.ts:144:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly count: { ...; }; readonly brazilians: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ....'. + +144 .filter('id', '=', testRecordId) +   ~~~~~~ + +test/sort/sortHll.ts:170:32 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly count: { ...; }; readonly brazilians: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ....'. + +170 await db.query2('article').sort('count', 'desc').include('count').get(), +   ~~~~ + +test/sort/sortHll.ts:191:8 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly count: { ...; }; readonly brazilians: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ....'. + +191 .sort('brazilians', 'desc') +   ~~~~ + +test/sort/sortHll.ts:205:5 - error TS2322: Type 'Uint8Array' is not assignable to type 'string | string[] | null | undefined'. + Type 'Uint8Array' is missing the following properties from type 'string[]': pop, push, concat, shift, and 6 more. + +205 count: xxHash64(ENCODER.encode('name1')), +   ~~~~~ + +test/sort/sortHll.ts:227:32 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly derp: { type: "number"; }; readonly count: { ...; }; readonly brazilians: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ....'. + +227 await db.query2('article').sort('count', 'desc').include('count').get(), +   ~~~~ + +test/sort/sortIds.ts:31:9 - error TS2322: Type 'number' is not assignable to type '0 | 1 | 2 | 3 | 4 | 5 | null | undefined'. + +31 bla: ~~(Math.random() * 5), +   ~~~ + +test/sort/sortIds.ts:39:41 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly mep: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +39 isSorted(await db.query2('user', ids).sort('age').get(), 'age') +   ~~~~ + +test/sort/sortIds.ts:40:41 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly mep: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +40 isSorted(await db.query2('user', ids).sort('name').get(), 'name') +   ~~~~ + +test/sort/sortIds.ts:41:41 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly mep: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +41 isSorted(await db.query2('user', ids).sort('flap').get(), 'flap') +   ~~~~ + +test/sort/sortIds.ts:42:41 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly mep: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +42 isSorted(await db.query2('user', ids).sort('blurf').get(), 'blurf') +   ~~~~ + +test/sort/sortIds.ts:43:41 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly mep: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +43 isSorted(await db.query2('user', ids).sort('bla').get(), 'bla') +   ~~~~ + +test/sort/sortIds.ts:44:41 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; ... 5 more ...; readonly mep: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +44 isSorted(await db.query2('user', ids).sort('mep').get(), 'mep') +   ~~~~ + +test/sort/sortIds.ts:96:10 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly flap: { readonly type: "uint32"; }; readonly name: { ...; }; readonly articles: Omit<...> & { ...; }; }; }; readonly article: { ...; }...'. + +96 .include((s) => s('contributors').sort('flap')) +   ~~~~~~~ + +test/sort/sortNodeId.ts:27:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly person: { props: { readonly name: { readonly type: "string"; }; readonly age: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +27 .include('name') +   ~~~~~~~ + +test/sort/sortNumber.ts:52:39 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly example: { props: { readonly enum: { type: "enum"; enum: string[]; }; ... 8 more ...; readonly i32: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +52 isSorted(await db.query2('example').sort('u32').include('u32').get(), 'u32') +   ~~~~ + +test/sort/sortNumber.ts:54:32 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly example: { props: { readonly enum: { type: "enum"; enum: string[]; }; ... 8 more ...; readonly i32: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +54 await db.query2('example').sort('boolean').include('boolean').get(), +   ~~~~ + +test/sort/sortNumber.ts:57:39 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly example: { props: { readonly enum: { type: "enum"; enum: string[]; }; ... 8 more ...; readonly i32: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +57 isSorted(await db.query2('example').sort('u8').include('u8').get(), 'u8') +   ~~~~ + +test/sort/sortNumber.ts:58:39 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly example: { props: { readonly enum: { type: "enum"; enum: string[]; }; ... 8 more ...; readonly i32: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +58 isSorted(await db.query2('example').sort('i8').include('i8').get(), 'i8') +   ~~~~ + +test/sort/sortNumber.ts:59:39 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly example: { props: { readonly enum: { type: "enum"; enum: string[]; }; ... 8 more ...; readonly i32: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +59 isSorted(await db.query2('example').sort('i16').include('i16').get(), 'i16') +   ~~~~ + +test/sort/sortNumber.ts:60:39 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly example: { props: { readonly enum: { type: "enum"; enum: string[]; }; ... 8 more ...; readonly i32: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +60 isSorted(await db.query2('example').sort('i32').include('i32').get(), 'i32') +   ~~~~ + +test/sort/sortNumber.ts:62:32 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly example: { props: { readonly enum: { type: "enum"; enum: string[]; }; ... 8 more ...; readonly i32: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +62 await db.query2('example').sort('number').include('number').get(), +   ~~~~ + +test/sort/sortNumber.ts:66:32 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly example: { props: { readonly enum: { type: "enum"; enum: string[]; }; ... 8 more ...; readonly i32: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +66 await db.query2('example').sort('timestamp').include('timestamp').get(), +   ~~~~ + +test/sort/sortNumber.ts:73:8 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly example: { props: { readonly enum: { type: "enum"; enum: string[]; }; ... 8 more ...; readonly i32: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +73 .sort('enum') +   ~~~~ + +test/sort/sortNumber.ts:80:39 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly example: { props: { readonly enum: { type: "enum"; enum: string[]; }; ... 8 more ...; readonly i32: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +80 isSorted(await db.query2('example').sort('u32').include('u32').get(), 'u32') +   ~~~~ + +test/sort/sortNumber.ts:84:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly example: { props: { readonly enum: { type: "enum"; enum: string[]; }; ... 8 more ...; readonly i32: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ...,...'. + +84 .include('enum') +   ~~~~~~~ + +test/sort/sortString.ts:70:11 - error TS2365: Operator '<' cannot be applied to types 'void' and 'number'. + +70 equal(dbTime < 1000, true, 'db modify should not take longer then 1s') +   ~~~~~~~~~~~~~ + +test/sort/sortString.ts:80:10 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +80 .query2('article') +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/sort/sortString.ts:91:10 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +91 .query2('article') +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/sort/sortString.ts:149:32 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly article: { props: { readonly name: { readonly type: "string"; readonly maxBytes: 20; }; readonly nr: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ......'. + +149 await db.query2('article').include('name', 'nr').sort('name', 'desc').get(), +   ~~~~~~~ + +test/sort/sortTimestamp.ts:43:6 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly event: { props: { readonly flap: { type: "number"; }; readonly startTime: { ...; }; readonly name: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +43 .sort('startTime', 'asc') +   ~~~~ + +test/sort/sortTimestamp.ts:56:6 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly event: { props: { readonly flap: { type: "number"; }; readonly startTime: { ...; }; readonly name: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +56 .sort('startTime', 'asc') +   ~~~~ + +test/sort/sortTimestamp.ts:72:6 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly event: { props: { readonly flap: { type: "number"; }; readonly startTime: { ...; }; readonly name: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +72 .sort('startTime', 'asc') +   ~~~~ + +test/sort/sortTimestamp.ts:85:6 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly event: { props: { readonly flap: { type: "number"; }; readonly startTime: { ...; }; readonly name: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +85 .sort('startTime', 'desc') +   ~~~~ + +test/sort/sortTimestamp.ts:101:6 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly event: { props: { readonly flap: { type: "number"; }; readonly startTime: { ...; }; readonly name: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +101 .sort('startTime', 'asc') +   ~~~~ + +test/sort/sortTimestamp.ts:114:6 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly event: { props: { readonly flap: { type: "number"; }; readonly startTime: { ...; }; readonly name: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., ...'. + +114 .sort('startTime', 'desc') +   ~~~~ + +test/sort/sortTimestamp.ts:152:30 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly event: { props: { readonly derp: { type: "boolean"; }; readonly flap: { ...; }; readonly startTime: { ...; }; readonly name: { ...; }; }; }; }; locales: Partial...'. + +152 await db.query2('event').sort('startTime', 'asc').get(), +   ~~~~ + +test/sort/sortTimestamp.ts:162:36 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly event: { props: { readonly derp: { type: "boolean"; }; readonly flap: { ...; }; readonly startTime: { ...; }; readonly name: { ...; }; }; }; }; locales: Partial...'. + +162 await db.query2('event').sort('startTime', 'asc').get(), +   ~~~~ + +test/string.ts:42:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; ... 5 more ...; readonly location: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 ...'. + +42 deepEqual(await db.query2('user').include('name', 'snurp').get(), [ +   ~~~~~~~ + +test/string.ts:50:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; ... 5 more ...; readonly location: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 ...'. + +50 deepEqual(await db.query2('user').include('name', 'snurp', 'age').get(), [ +   ~~~~~~~ + +test/string.ts:62:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; ... 5 more ...; readonly location: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 ...'. + +62 .include( +   ~~~~~~~ + +test/string.ts:87:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; ... 5 more ...; readonly location: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 ...'. + +87 deepEqual(await db.query2('user').include('location.label').get(), [ +   ~~~~~~~ + +test/string.ts:96:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; ... 5 more ...; readonly location: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 ...'. + +96 deepEqual(await db.query2('user').include('location').get(), [ +   ~~~~~~~ + +test/string.ts:107:37 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; ... 5 more ...; readonly location: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 ...'. + +107 deepEqual(await db.query2('user').include('location', 'burp').get(), [ +   ~~~~~~~ + +test/string.ts:122:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { readonly type: "string"; }; ... 5 more ...; readonly location: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 ...'. + +122 .include( +   ~~~~~~~ + +test/string.ts:258:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +258 .include('user.name', 'user.myBlup.name') +   ~~~~~~~ + +test/string.ts:283:31 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly myBlup: { readonly ref: "blup"; readonly prop: "user"; } & { ...; }; ... 7 more ...; readonly location: Omit<...> & { ...; }; }; }; re...'. + +283 await db.query2('simple').include('user.name', 'user.myBlup.name').get(), +   ~~~~~~~ + +test/string.ts:322:30 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "file", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "file", id: number | (Partial; }, "file">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "file">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial; }, "file">> & { ...; }'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +322 (await db.query2('file', file).get()).node().contents, +   ~~~~ + + test/string.ts:322:30 + 322 (await db.query2('file', file).get()).node().contents, +    ~~~~ + Did you forget to use 'await'? + +test/string.ts:322:43 - error TS2339: Property 'node' does not exist on type '{ id: number; contents: string; }[]'. + +322 (await db.query2('file', file).get()).node().contents, +   ~~~~ + +test/string.ts:333:41 - error TS2339: Property 'size' does not exist on type '{ id: number; contents: string; }[]'. + +333 equal((await db.query2('file').get()).size > 1000 * 1e3, true) +   ~~~~ + +test/string.ts:357:7 - error TS2322: Type 'Buffer' is not assignable to type 'string'. + +357 contents: x, +   ~~~~~~~~ + +test/string.ts:398:30 - error TS2769: No overload matches this call. + Overload 1 of 2, '(type: "file", id?: number[] | undefined): BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { ...; }; locales: Partial<...>; }, ... 5 more ..., undefined>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number[]'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is missing the following properties from type 'number[]': length, pop, push, concat, and 35 more. + Overload 2 of 2, '(type: "file", id: number | (Partial; }, "file">> & { ...; })): BasedQuery2<...>', gave the following error. + Argument of type 'BasedCreatePromise' is not assignable to parameter of type 'number | (Partial; }, "file">> & { ...; })'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type 'Partial; }, "f...'. + Type 'BasedModify<(schema: SchemaOut, type: string, payload: Record, buf: AutoSizedUint8Array, lang: LangCodeEnum) => void>' is not assignable to type '{ [Symbol.toStringTag]?: undefined; }'. + Types of property '[Symbol.toStringTag]' are incompatible. + Type '"BasedModify"' is not assignable to type 'undefined'. + +398 (await db.query2('file', file).get()).node().contents, +   ~~~~ + + test/string.ts:398:30 + 398 (await db.query2('file', file).get()).node().contents, +    ~~~~ + Did you forget to use 'await'? + +test/string.ts:398:43 - error TS2339: Property 'node' does not exist on type '{ id: number; name: string; contents: string; }[]'. + +398 (await db.query2('file', file).get()).node().contents, +   ~~~~ + +test/string.ts:451:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly file: { props: { readonly contentsUncompressed: { ...; }; readonly contentsCompressed: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +451 .include('contentsUncompressed') +   ~~~~~~~ + +test/string.ts:457:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly file: { props: { readonly contentsUncompressed: { ...; }; readonly contentsCompressed: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +457 .include('contentsCompressed') +   ~~~~~~~ + +test/subscription/subscription.perf.ts:36:16 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +36 const q = db.query2('user', 1) +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/subscription/subscription.perf.ts:49:9 - error TS2345: Argument of type 'BasedDb' is not assignable to parameter of type 'DbServer'. + Type 'BasedDb' is missing the following properties from type 'DbServer': dbCtxExternal, migrating, saveInProgress, activeReaders, and 17 more. + +49 db, +   ~~ + +test/subscription/subscription.perf.ts:90:16 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +90 const q = db.query2('user', 1).include('flap') +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/subscription/subscription.perf.ts:104:12 - error TS2339: Property 'addIdSubscription' does not exist on type '{ externalFromInt(address: BigInt): any; intFromExternal(external: any): BigInt; query: (q: Uint8Array, dbCtx: any) => ArrayBuffer | null; ... 14 more ...; selvaLangAll: () => string; }'. + +104 native.addIdSubscription(db.server.dbCtxExternal, q.subscriptionBuffer!) +   ~~~~~~~~~~~~~~~~~ + +test/subscription/subscription.ts:16:40 - error TS2554: Expected 1 arguments, but got 2. + +16 hooks: getDefaultHooks(server, subTime), +   ~~~~~~~ + +test/subscription/subscription.ts:46:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +46 .include('derp') +   ~~~~~~~ + +test/subscription/subscription.ts:81:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +81 .include('lang') +   ~~~~~~~ + +test/subscription/subscription.ts:114:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +114 .include('location') +   ~~~~~~~ + +test/subscription/subscription.ts:149:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +149 .include('derp') +   ~~~~~~~ + +test/subscription/subscription.ts:194:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +194 .include('items') +   ~~~~~~~ + +test/subscription/subscriptionId.ts:15:40 - error TS2554: Expected 1 arguments, but got 2. + +15 hooks: getDefaultHooks(server, time), +   ~~~~ + +test/subscription/subscriptionId.ts:45:47 - error TS2339: Property 'subscribe' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +45 const close = clients[0].query2('user', id).subscribe((d) => { +   ~~~~~~~~~ + +test/subscription/subscriptionId.ts:51:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +51 .include('name') +   ~~~~~~~ + +test/subscription/subscriptionId.ts:107:47 - error TS2339: Property 'subscribe' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +107 const close = clients[0].query2('user', id).subscribe((d) => { +   ~~~~~~~~~ + +test/subscription/subscriptionId.ts:112:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +112 .include('name') +   ~~~~~~~ + +test/subscription/subscriptionId.ts:127:16 - error TS2339: Property 'subscriptions' does not exist on type 'DbServer'. + +127 equal(server.subscriptions.active, 0, 'remove all subs') +   ~~~~~~~~~~~~~ + +test/subscription/subscriptionIdPartial.ts:15:40 - error TS2554: Expected 1 arguments, but got 2. + +15 hooks: getDefaultHooks(server, 1), +   ~ + +test/subscription/subscriptionIdPartial.ts:49:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +49 .filter('x', '>', 5) +   ~~~~~~ + +test/subscription/subscriptionIdPartial.ts:97:47 - error TS2339: Property 'subscribe' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +97 const close = clients[0].query2('user', id).subscribe((d) => { +   ~~~~~~~~~ + +test/subscription/subscriptionIdPartial.ts:103:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +103 .include('x', 'gurk', 'rurp', 'flap') +   ~~~~~~~ + +test/subscription/subscriptionIdRemove.ts:15:40 - error TS2554: Expected 1 arguments, but got 2. + +15 hooks: getDefaultHooks(server, 1), +   ~ + +test/subscription/subscriptionIdRemove.ts:71:58 - error TS2339: Property 'subscribe' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +71 const subscription = clients[0].query2('user', id).subscribe(() => { +   ~~~~~~~~~ + +test/subscription/subscriptionIdRemove.ts:242:16 - error TS2339: Property 'subscriptions' does not exist on type 'DbServer'. + +242 equal(server.subscriptions.active, 0) +   ~~~~~~~~~~~~~ + +test/subscription/subscriptionMulti.perf.ts:15:40 - error TS2554: Expected 1 arguments, but got 2. + +15 hooks: getDefaultHooks(server, subTime), +   ~~~~~~~ + +test/subscription/subscriptionMulti.perf.ts:47:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +47 .filter('derp', '>', 1e6 - 10) +   ~~~~~~ + +test/subscription/subscriptionNow.ts:15:40 - error TS2554: Expected 1 arguments, but got 2. + +15 hooks: getDefaultHooks(server, 100), +   ~~~ + +test/subscription/subscriptionNow.ts:70:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +70 .filter('date', '<', 'now - 2s') +   ~~~~~~ + +test/subscription/subscriptionNow.ts:78:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +78 .filter('date', '<', 'now - 2s') +   ~~~~~~ + +test/subscription/subscriptionNow.ts:93:16 - error TS2339: Property 'subscriptions' does not exist on type 'DbServer'. + +93 equal(server.subscriptions.active, 0, 'Removed all active subs') +   ~~~~~~~~~~~~~ + +test/subscription/subscriptionNow.ts:94:16 - error TS2339: Property 'subscriptions' does not exist on type 'DbServer'. + +94 equal(server.subscriptions.now.listeners.size, 0, 'Remove all now listeners') +   ~~~~~~~~~~~~~ + +test/subscription/subscriptionNow.ts:117:6 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +117 .locale('en') +   ~~~~~~ + +test/subscription/subscriptionNow.ts:143:16 - error TS2339: Property 'subscriptions' does not exist on type 'DbServer'. + +143 equal(server.subscriptions.active, 0, 'Removed all active subs') +   ~~~~~~~~~~~~~ + +test/subscription/subscriptionNow.ts:144:16 - error TS2339: Property 'subscriptions' does not exist on type 'DbServer'. + +144 equal(server.subscriptions.now.listeners.size, 0, 'Remove all now listeners') +   ~~~~~~~~~~~~~ + +test/subscription/subscriptionSchemaChanges.ts:16:40 - error TS2554: Expected 1 arguments, but got 2. + +16 hooks: getDefaultHooks(server, 10), +   ~~ + +test/subscription/subscriptionSchemaChanges.ts:52:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +52 .include('derp', 'lang') +   ~~~~~~~ + +test/subscription/subscriptionSchemaChanges.ts:136:6 - error TS2551: Property 'query2' does not exist on type 'BasedDb'. Did you mean 'query'? + +136 db.query2('user').subscribe((res) => { +   ~~~~~~ + + src/index.ts:101:3 + 101 query: DbClient['query'] = function (this: BasedDb) { +    ~~~~~ + 'query' is declared here. + +test/subscription/subscriptionWorkers.perf.ts:64:7 - error TS2345: Argument of type 'BasedDb' is not assignable to parameter of type 'DbServer'. + Type 'BasedDb' is missing the following properties from type 'DbServer': dbCtxExternal, migrating, saveInProgress, activeReaders, and 17 more. + +64 db, +   ~~ + +test/subscription/subscriptionWorkers.perf.ts:71:14 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2'. + +71 .filter('fromCountry', '=', ['AE', 'NL']) +   ~~~~~~ + +test/subscription/subscriptionWorkers.perf.ts:76:16 - error TS2339: Property 'flushTime' does not exist on type 'DbClient'. + +76 client.flushTime = 0 +   ~~~~~~~~~ + +test/text/text.ts:32:31 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +32 await db.query2('dialog').include('id', 'fun').get(), +   ~~~~~~~ + +test/text/text.ts:47:31 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +47 await db.query2('dialog').include('id').get(), +   ~~~~~~~ + +test/text/text.ts:57:31 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +57 await db.query2('dialog').locale('it').include('id', 'fun').get(), +   ~~~~~~ + +test/text/text.ts:70:8 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +70 .locale('it') +   ~~~~~~ + +test/text/text.ts:81:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +81 .include('id', 'fun') +   ~~~~~~~ + +test/text/text.ts:100:8 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +100 .locale('it') +   ~~~~~~ + +test/text/text.ts:116:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +116 .include('id', 'fun') +   ~~~~~~~ + +test/text/text.ts:126:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +126 .include('id', 'fun') +   ~~~~~~~ + +test/text/text.ts:145:8 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +145 .locale('en') +   ~~~~~~ + +test/text/text.ts:167:31 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +167 await db.query2('dialog').include('id', 'fun').locale('fi').get(), +   ~~~~~~~ + +test/text/text.ts:191:31 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +191 await db.query2('dialog').include('id', 'fun').locale('fi').get(), +   ~~~~~~~ + +test/text/text.ts:236:8 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +236 .locale('fi') +   ~~~~~~ + +test/text/text.ts:252:8 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +252 .locale('fi') +   ~~~~~~ + +test/text/text.ts:272:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +272 .include('fun.en') +   ~~~~~~~ + +test/text/text.ts:315:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +315 .include('id', 'fun') +   ~~~~~~~ + +test/text/text.ts:334:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +334 .include('id', 'fun') +   ~~~~~~~ + +test/text/text.ts:351:31 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +351 await db.query2('dialog').include('id', 'fun').search('snurp', 'fun').get(), +   ~~~~~~~ + +test/text/text.ts:366:31 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +366 await db.query2('dialog').include('id', 'fun').search('derp', 'fun').get(), +   ~~~~~~~ + +test/text/text.ts:383:8 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +383 .locale('fi') +   ~~~~~~ + +test/text/text.ts:394:8 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +394 .locale('en') +   ~~~~~~ + +test/text/text.ts:411:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly fun: { readonly type: "text"; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +411 .include('id', 'fun') +   ~~~~~~~ + +test/text/text.ts:431:13 - error TS2322: Type '{ en: { required: true; }; fr: { required: true; }; }' is not assignable to type '((Partial>> | ("id" | ... 145 more ... | "cnr")[]) & (Partial<...> | ... 1 more ... | (...'. + Types of property 'en' are incompatible. + Object literal may only specify known properties, and 'required' does not exist in type '{ fallback: ("id" | "none" | "aa" | "ab" | "af" | "ak" | "sq" | "am" | "ar" | "an" | "hy" | "as" | "av" | "ae" | "ay" | "az" | "eu" | "be" | "bn" | "bi" | "bs" | "br" | "bg" | "my" | "ca" | ... 121 more ... | "cnr")[]; } | Partial<...> | ({ ...; } & Partial<...>) | (Partial<...> & { ...; })'. + +431 en: { required: true }, +   ~~~~~~~~ + + src/schema/schema/schema.ts:31:3 + 31 locales?: SchemaLocales +    ~~~~~~~ + The expected type comes from property 'locales' which is declared here on type 'StrictSchema' + +test/text/text.ts:453:40 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +453 deepEqual(await db.query2('country').include('*').get(), [ +   ~~~~~~~ + +test/text/text.ts:464:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +464 deepEqual(await db.query2('contestant').include('*', 'country').get(), [ +   ~~~~~~~ + +test/text/text.ts:497:29 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +497 await db.query2('dialog').locale('fi').sort('fun', 'desc').get() +   ~~~~~~ + +test/text/text.ts:524:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +524 .include('fun') +   ~~~~~~~ + +test/text/text.ts:554:31 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +554 await db.query2('dialog').include('fun').sort('fun.fi', 'desc').get(), +   ~~~~~~~ + +test/text/text.ts:567:8 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +567 .locale('en') +   ~~~~~~ + +test/text/text.ts:587:8 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +587 .locale('fi') +   ~~~~~~ + +test/text/text.ts:620:8 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +620 .locale('fi') +   ~~~~~~ + +test/text/text.ts:644:39 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +644 deepEqual(await db.query2('dialog').locale('fi').sort('snurf', 'desc').get(), [ +   ~~~~~~ + +test/text/text.ts:663:39 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +663 deepEqual(await db.query2('dialog').locale('fi').sort('snurf', 'desc').get(), [ +   ~~~~~~ + +test/text/text.ts:670:39 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +670 deepEqual(await db.query2('dialog').locale('fi').sort('fun').get(), [ +   ~~~~~~ + +test/text/text.ts:683:39 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +683 deepEqual(await db.query2('dialog').locale('fi').sort('fun').get(), [ +   ~~~~~~ + +test/text/text.ts:700:39 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +700 deepEqual(await db.query2('dialog').locale('fi').sort('fun').get(), [ +   ~~~~~~ + +test/text/text.ts:718:31 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +718 await db.query2('dialog').locale('fi').sort('fun').get(), +   ~~~~~~ + +test/text/text.ts:736:31 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +736 await db.query2('dialog').locale('fi').sort('fun').get(), +   ~~~~~~ + +test/text/text.ts:755:7 - error TS2322: Type 'null' is not assignable to type 'string | undefined'. + +755 fi: null, +   ~~ + +test/text/text.ts:761:31 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly dialog: { props: { readonly snurf: { type: "string"; }; readonly fun: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +761 await db.query2('dialog').locale('fi').sort('fun').get(), +   ~~~~~~ + +test/text/text.ts:932:20 - error TS2531: Object is possibly 'null'. + +932 const checksum = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/text/text.ts:932:59 - error TS2339: Property 'checksum' does not exist on type '{ id: number; article: { en: string; it: string; }; }'. + +932 const checksum = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~ + +test/text/text.ts:941:21 - error TS2531: Object is possibly 'null'. + +941 const checksum2 = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +test/text/text.ts:941:60 - error TS2339: Property 'checksum' does not exist on type '{ id: number; article: { en: string; it: string; }; }'. + +941 const checksum2 = (await db.query2('user', user1).get()).checksum +   ~~~~~~~~ + +test/text/textFallback.ts:72:8 - error TS2339: Property 'locale' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly project: { props: { readonly createdAt: { ...; }; readonly title: { ...; }; readonly description: { ...; }; readonly abstract: { ...; }; }; }; }; locales: Parti...'. + +72 .locale('nl') +   ~~~~~~ + +test/text/textFilter.ts:89:32 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly project: { props: { readonly createdAt: { ...; }; readonly title: { ...; }; readonly description: { ...; }; readonly abstract: { ...; }; }; }; }; locales: Parti...'. + +89 await db.query2('project').search(term, 'title', 'abstract').get() +   ~~~~~~ + +test/text/textFilter.ts:103:36 - error TS2339: Property 'search' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly project: { props: { readonly createdAt: { ...; }; readonly title: { ...; }; readonly description: { ...; }; readonly abstract: { ...; }; }; }; }; locales: Parti...'. + +103 await db.query2('project').search(term, 'title', 'abstract').get() +   ~~~~~~ + +test/text/textFilter.ts:190:30 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly function: { props: { readonly name: { type: "alias"; }; ... 10 more ...; readonly "measurements.history": { ...; }; }; }; readonly measurement: { ...; }; readon...'. + +190 await db.query2('event').filter('msg', 'includes', 'derp').get(), +   ~~~~~~ + +test/type-gen/examples/helloWorld/index.ts:1:31 - error TS2307: Cannot find module '@based/functions' or its corresponding type declarations. + +1 import { BasedFunction } from '@based/functions' +   ~~~~~~~~~~~~~~~~~~ + +test/type-gen/examples/query/index.ts:1:36 - error TS2307: Cannot find module '@based/functions' or its corresponding type declarations. + +1 import { BasedQueryFunction } from '@based/functions' +   ~~~~~~~~~~~~~~~~~~ + +test/update.ts:204:40 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly mep: { props: { readonly a: { readonly type: "uint32"; }; readonly countryCode: { ...; }; readonly b: { ...; }; readonly c: { ...; }; }; }; readonly snurp: { .....'. + +204 equal((await db.query2('snurp', ids).range(0, 100).get()).length, 100) +   ~~~~~ + +test/update.ts:206:40 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly mep: { props: { readonly a: { readonly type: "uint32"; }; readonly countryCode: { ...; }; readonly b: { ...; }; readonly c: { ...; }; }; }; readonly snurp: { .....'. + +206 equal((await db.query2('snurp', ids).range(10, 110).get()).length, 100) +   ~~~~~ + +test/update.ts:211:8 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly mep: { props: { readonly a: { readonly type: "uint32"; }; readonly countryCode: { ...; }; readonly b: { ...; }; readonly c: { ...; }; }; }; readonly snurp: { .....'. + +211 .range(1e5, 1e5 + 2) +   ~~~~~ + +test/update.ts:241:43 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly mep: { props: { readonly a: { readonly type: "uint32"; }; readonly countryCode: { ...; }; readonly b: { ...; }; readonly c: { ...; }; }; }; readonly snurp: { .....'. + +241 promises.push(db.query2('snurp', i).include('a').get()) +   ~~~~~~~ + +test/upsert.ts:86:45 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { [x: string]: { partial?: boolean | undefined; ... 4 more ...; props: any; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +86 deepEqual(await client1.query2('article').include('*', '**').get(), [ +   ~~~~~~~ + +test/validation/validation.ts:162:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly json: { type: "json"; }; ... 17 more ...; readonly connections: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +162 .include('cardinality') +   ~~~~~~~ + +test/validation/validation.ts:286:29 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly json: { type: "json"; }; ... 17 more ...; readonly connections: Omit<...> & { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more .....'. + +286 await db.query2('user').include('name', 'friend').get(), +   ~~~~~~~ + +test/validation/validation.ts:605:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +605 .filter('friend.description.en', '=', 'nice') +   ~~~~~~ + +test/validation/validation.ts:663:29 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +663 () => db.query2('user').filter('rating', 'includes', 1).get(), +   ~~~~~~ + +test/validation/validation.ts:680:29 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +680 () => db.query2('user').filter('friend', 'includes', 1).get(), +   ~~~~~~ + +test/validation/validation.ts:708:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +708 .filter('name', 'includes', '') +   ~~~~~~ + +test/validation/validation.ts:718:8 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +718 .filter('name', 'includes', '') +   ~~~~~~ + +test/validation/validation.ts:776:27 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +776 await db.query2('user').sort('drip', 'desc').get() +   ~~~~ + +test/validation/validation.ts:780:31 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +780 await db.query2('user').sort('flurp').get() +   ~~~~ + +test/validation/validation.ts:792:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +792 await db.query2('user').sort('connections').get() +   ~~~~ + +test/validation/validation.ts:796:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +796 await db.query2('user').sort('friend').get() +   ~~~~ + +test/validation/validation.ts:800:32 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +800 await db.query2('user', 1).sort('drip').get() +   ~~~~ + +test/validation/validation.ts:803:31 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +803 await db.query2('user', []).sort('drip').get() +   ~~~~ + +test/validation/validation.ts:805:38 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +805 await db.query2('user', [1, 2, 3]).sort('drip').get() +   ~~~~ + +test/validation/validation.ts:808:29 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly todo: { props: { readonly done: { type: "boolean"; }; ... 4 more ...; readonly body: { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5...'. + +808 await db.query2('user').sort('drip').range(0, -10).get() +   ~~~~ + +test/validation/validation.ts:929:27 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +929 await db.query2('user').range(0, 5).get() +   ~~~~~ + +test/validation/validation.ts:930:27 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +930 await db.query2('user').range(1, 10).get() +   ~~~~~ + +test/validation/validation.ts:931:27 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +931 await db.query2('user').range(0, 1).get() +   ~~~~~ + +test/validation/validation.ts:932:27 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +932 await db.query2('user').range(100, 101).get() +   ~~~~~ + +test/validation/validation.ts:933:27 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +933 await db.query2('user').range(1000, 1001).get() +   ~~~~~ + +test/validation/validation.ts:934:27 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +934 await db.query2('user').range(0, undefined).get() +   ~~~~~ + +test/validation/validation.ts:937:29 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +937 await db.query2('user').range(0, 0).get() +   ~~~~~ + +test/validation/validation.ts:941:29 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +941 await db.query2('user').range(5, 5).get() +   ~~~~~ + +test/validation/validation.ts:945:29 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +945 await db.query2('user').range(4294967295, 4294967295).get() +   ~~~~~ + +test/validation/validation.ts:1024:29 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1024 await db.query2('user').range(5, 3).get() +   ~~~~~ + +test/validation/validation.ts:1027:27 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1027 await db.query2('user').filter('rating', '>', 0).range(0, 5).get() +   ~~~~~~ + +test/validation/validation.ts:1028:27 - error TS2339: Property 'sort' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1028 await db.query2('user').sort('rating').range(0, 5).get() +   ~~~~ + +test/validation/validation.ts:1029:27 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1029 await db.query2('user').include('name').range(0, 5).get() +   ~~~~~~~ + +test/validation/validation.ts:1033:6 - error TS2339: Property 'range' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly rating: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1033 .range(0, 5) +   ~~~~~ + +test/validation/validation.ts:1079:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly binaryData: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1079 .filter('binaryData', '=', Buffer.from([1, 2, 3, 4])) +   ~~~~~~ + +test/validation/validation.ts:1084:6 - error TS2339: Property 'filter' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly user: { props: { readonly name: { type: "string"; }; readonly binaryData: { ...; }; }; }; }; locales: Partial<...>; }, ... 5 more ..., undefined>'. + +1084 .filter('binaryData', '=', new Uint8Array([5, 6, 7, 8])) +   ~~~~~~ + +test/validation/validationReferences.ts:68:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly flap: { props: { readonly x: { readonly ref: "user"; readonly prop: "y"; } & { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5 more .....'. + +68 .include('name', 'connections.id') +   ~~~~~~~ + +test/validation/validationReferences.ts:177:46 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly flap: { props: { readonly x: { readonly ref: "user"; readonly prop: "y"; } & { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5 more .....'. + +177 await db.query2('user', userWithFriends).include('name', 'friends').get(), +   ~~~~~~~ + +test/validation/validationReferences.ts:198:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly flap: { props: { readonly x: { readonly ref: "user"; readonly prop: "y"; } & { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5 more .....'. + +198 .include( +   ~~~~~~~ + +test/validation/validationReferences.ts:244:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2<{ version?: string | undefined; defaultTimezone?: string | undefined; migrations?: SchemaMigrations | undefined; hash: number; types: { readonly flap: { props: { readonly x: { readonly ref: "user"; readonly prop: "y"; } & { ...; }; }; }; readonly user: { ...; }; }; locales: Partial<...>; }, ... 5 more .....'. + +244 .include( +   ~~~~~~~ + +test/vector.ts:50:39 - error TS2339: Property 'include' does not exist on type 'BasedQuery2'. + +50 const res = await db.query2('data').include('name', 'a').get() +   ~~~~~~~ + +test/vector.ts:63:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2'. + +63 .include('name') +   ~~~~~~~ + +test/vector.ts:70:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2'. + +70 .include('name') +   ~~~~~~~ + +test/vector.ts:83:6 - error TS2339: Property 'include' does not exist on type 'BasedQuery2'. + +83 .include('name') +   ~~~~~~~ + +test/vector.ts:105:8 - error TS2339: Property 'include' does not exist on type 'BasedQuery2'. + +105 .include('name') +   ~~~~~~~ + + +Found 1233 errors in 187 files. + +Errors Files + 1 scripts/repl.ts:6 + 2 scripts/test_push_exports.ts:13 + 48 src/db-client/query2/index.ts:73 + 3 src/db-client/query2/types.ts:347 + 26 test/aggregate/basic.ts:55 + 14 test/aggregate/deep.ts:57 + 2 test/aggregate/experimental.ts:90 + 13 test/aggregate/groupBY.ts:52 + 6 test/aggregate/multiple.ts:77 + 10 test/aggregate/overall.perf.ts:41 + 19 test/aggregate/temporal.ts:32 + 5 test/aggregate/validation.ts:29 + 8 test/alias/alias.ts:43 + 1 test/alias/filter.ts:33 + 1 test/alignModify.ts:45 + 1 test/based-client/addSpecs.ts:57 + 4 test/based-client/authorize.ts:139 + 1 test/based-client/authorizeOnSpec.ts:94 + 2 test/based-client/browser/index.ts:78 + 4 test/based-client/dbQuery.ts:32 + 2 test/based-client/error.ts:140 + 1 test/based-client/functions.ts:3 + 1 test/based-client/functionsPerf.ts:2 + 20 test/based-client/get.ts:2 + 6 test/based-client/hooks.ts:2 + 1 test/based-client/http.ts:2 + 1 test/based-client/httpGet.ts:2 + 1 test/based-client/installFunctions.ts:2 + 2 test/based-client/lazyConnect.ts:2 + 1 test/based-client/memLeaks.ts:2 + 2 test/based-client/messages.ts:2 + 1 test/based-client/nestedChannelSimple.ts:2 + 13 test/based-client/nestedFunctions.ts:3 + 3 test/based-client/nestedFunctionsError.ts:2 + 4 test/based-client/nestedQuerySimple.ts:2 + 5 test/based-client/null.ts:2 + 2 test/based-client/payloadPerf.ts:85 + 5 test/based-client/persist.ts:2 + 16 test/based-client/protocolContentType.ts:3 + 3 test/based-client/query.ts:2 + 3 test/based-client/queryCache.ts:2 + 18 test/based-client/queryCtxBound.ts:2 + 2 test/based-client/queryDiff.ts:2 + 10 test/based-client/queryErrorHandling.ts:2 + 2 test/based-client/queryInstancePerf.ts:2 + 3 test/based-client/queryReusedDiff.ts:2 + 4 test/based-client/queryUint8Payload.ts:2 + 1 test/based-client/rateLimit.ts:2 + 2 test/based-client/reEvaluateAuthState.ts.ts:2 + 3 test/based-client/relay.ts:2 + 1 test/based-client/reload.ts:2 + 4 test/based-client/ssr.ts:2 + 1 test/based-client/stream.ts:2 + 1 test/based-client/streamChunks.ts:4 + 1 test/based-client/streamHttp.ts:2 + 1 test/based-client/streamNested.ts:2 + 2 test/based-client/throttle.ts:2 + 2 test/based-client/verifyAuthState.ts:2 + 5 test/bench.perf.ts:54 + 1 test/benchmarks/utils.ts:1 + 10 test/bigNode.perf.ts:37 + 6 test/binary.ts:21 + 3 test/boolean.ts:34 + 1 test/capped.ts:131 + 16 test/cardinality.ts:45 + 2 test/clientServer.perf.ts:43 + 5 test/clientServer.ts:27 + 3 test/colvec.ts:63 + 2 test/concurrency.perf.ts:46 + 4 test/copy.ts:63 + 2 test/crc32c.ts:115 + 4 test/db-schema/schemaUpdates.ts:45 + 10 test/default.ts:46 + 3 test/delete.perf.ts:58 + 7 test/delete.ts:39 + 9 test/dependent.ts:66 + 2 test/edges/edgeFilterNested.ts:45 + 1 test/edges/edgeNumbers.ts:41 + 18 test/edges/edges.ts:113 + 12 test/edges/edgesMain.ts:60 + 3 test/edges/edgesReference.ts:58 + 18 test/edges/edgesReferences.ts:100 + 5 test/edges/edgeText.ts:39 + 1 test/edges/edgeType.ts:92 + 8 test/enum.ts:34 + 2 test/errors.ts:23 + 10 test/exists.ts:32 + 4 test/expire.ts:40 + 4 test/filter/api.ts:33 + 2 test/filter/edges.ts:55 + 42 test/filter/filter.ts:44 + 4 test/filter/or.ts:21 + 2 test/filter/references.ts:51 + 6 test/filter/referencesField.ts:32 + 34 test/filter/string.ts:18 + 1 test/HLLunion.ts:86 + 14 test/hooks.ts:67 + 5 test/include/include.ts:26 + 12 test/include/includeMeta.ts:39 + 2 test/include/includeNested.ts:26 + 7 test/include/includeSlice.ts:58 + 3 test/include/referencesField.ts:34 + 6 test/include/thread.perf.ts:79 + 1 test/isModified.perf.ts:23 + 4 test/json.ts:82 + 2 test/locales.ts:39 + 2 test/mem.ts:57 + 2 test/migration.ts:227 + 2 test/modify/props/binary.ts:63 + 9 test/modify/props/boolean.ts:94 + 2 test/modify/props/cardinality.ts:74 + 3 test/modify/props/default.ts:102 + 2 test/modify/props/enum.ts:65 + 2 test/modify/props/json.ts:66 + 2 test/modify/props/mixed.ts:41 + 1 test/modify/props/numbers.ts:241 + 4 test/modify/props/object.ts:56 + 8 test/modify/props/references.ts:26 + 6 test/modify/props/string.ts:111 + 3 test/modify/props/text.ts:71 + 2 test/modify/props/timestamp.ts:100 + 6 test/modify/props/vector.ts:108 + 2 test/protocol/schema.ts:59 + 1 test/query-ast/validate.perf.ts:1 + 7 test/query/db.ts:66 + 23 test/query/types.ts:110 + 8 test/queryResponse.ts:25 + 2 test/range.ts:59 + 2 test/raw.ts:21 + 34 test/references/references.ts:61 + 14 test/references/referencesIndex.ts:37 + 6 test/references/referencesModify.ts:49 + 2 test/save/blockHash.ts:50 + 15 test/save/save.ts:197 + 1 test/save/saveEdge.ts:43 + 6 test/save/saveRange.ts:63 + 7 test/scenarios/e-commerce.ts:1 + 47 test/scenarios/northwind.ts:12 + 20 test/scenarios/nycTaxi.ts:429 + 4 test/scenarios/vote.ts:155 + 2 test/scenarios/voteEdges.ts:132 + 4 test/scenarios/voteLargeAmounts.perf.ts:138 + 5 test/scenarios/voteStorage.ts:131 + 4 test/schema/parse/edges.ts:36 + 1 test/schema/parse/infer.ts:1 + 1 test/schema/parse/references.ts:211 + 1 test/schema/parse/schema.ts:12 + 1 test/schema/props/write.ts:20 + 4 test/schema/serialize/serialize.ts:46 + 27 test/search.ts:29 + 3 test/serializeQueryDef.ts:33 + 2 test/shared/test.ts:89 + 3 test/simpleQuery.ts:36 + 33 test/singleRef.ts:100 + 2 test/singleRefQuery.ts:108 + 2 test/sort/sort.perf.ts:27 + 29 test/sort/sort.ts:56 + 4 test/sort/sortAlias.ts:28 + 3 test/sort/sortBinary.ts:38 + 3 test/sort/sortById.ts:33 + 2 test/sort/sortEnum.ts:37 + 8 test/sort/sortHll.ts:55 + 8 test/sort/sortIds.ts:31 + 1 test/sort/sortNodeId.ts:27 + 11 test/sort/sortNumber.ts:52 + 4 test/sort/sortString.ts:70 + 8 test/sort/sortTimestamp.ts:43 + 17 test/string.ts:42 + 4 test/subscription/subscription.perf.ts:36 + 6 test/subscription/subscription.ts:16 + 6 test/subscription/subscriptionId.ts:15 + 4 test/subscription/subscriptionIdPartial.ts:15 + 3 test/subscription/subscriptionIdRemove.ts:15 + 2 test/subscription/subscriptionMulti.perf.ts:15 + 8 test/subscription/subscriptionNow.ts:15 + 3 test/subscription/subscriptionSchemaChanges.ts:16 + 3 test/subscription/subscriptionWorkers.perf.ts:64 + 43 test/text/text.ts:32 + 1 test/text/textFallback.ts:72 + 3 test/text/textFilter.ts:89 + 1 test/type-gen/examples/helloWorld/index.ts:1 + 1 test/type-gen/examples/query/index.ts:1 + 4 test/update.ts:204 + 1 test/upsert.ts:86 + 31 test/validation/validation.ts:162 + 4 test/validation/validationReferences.ts:68 + 5 test/vector.ts:50 diff --git a/native/db/lifeTime.zig b/native/db/lifeTime.zig index 4405e20b97..b44172a812 100644 --- a/native/db/lifeTime.zig +++ b/native/db/lifeTime.zig @@ -3,6 +3,7 @@ const std = @import("std"); const napi = @import("../napi.zig"); const dump = @import("../selva/dump.zig"); const selva = @import("../selva/selva.zig").c; +const jemalloc = @import("../jemalloc.zig"); const dbCtx = @import("ctx.zig"); pub fn start(env: napi.Env, info: napi.Info) callconv(.c) napi.Value { @@ -20,12 +21,19 @@ pub fn stop(napi_env: napi.Env, info: napi.Info) callconv(.c) napi.Value { fn startInternal(env: napi.Env, info: napi.Info) !napi.Value { // does this make double things with valgrind? Ask marco dbCtx.init(); - const args = try napi.getArgs(3, env, info); + const args = try napi.getArgs(4, env, info); const fsPath = try napi.get([]u8, env, args[1]); const nrThreads = try napi.get(u16, env, args[2]); const ctx = try dbCtx.createDbCtx(env, args[0], fsPath, nrThreads); - ctx.selva = selva.selva_db_create(); + const selvaSchema = try napi.get([]u8, env, args[3]); + + ctx.selva = selva.selva_db_create(selvaSchema.len, selvaSchema.ptr); + if (ctx.selva == null) { + return errors.jsThrow(env, "Failed to create a db"); + } _ = selva.selva_db_chdir(ctx.selva, fsPath.ptr, fsPath.len); // TODO Handle error? + ctx.ids = jemalloc.alloc(u32, selva.selva_get_max_type(ctx.selva)); + var externalNapi: napi.Value = undefined; ctx.initialized = true; _ = napi.c.napi_create_external(env, ctx, null, null, &externalNapi); diff --git a/native/errors.zig b/native/errors.zig index 9019a4c2da..4877f6534a 100644 --- a/native/errors.zig +++ b/native/errors.zig @@ -75,8 +75,3 @@ pub const DbError = error{ pub const DbIncludeError = error{ EDGE_FROM_WEAKREF, }; - -pub const ClientError = enum(u8) { - null = 0, - nx = 1, -}; diff --git a/native/modify/common.zig b/native/modify/common.zig index 462798277e..10ed065764 100644 --- a/native/modify/common.zig +++ b/native/modify/common.zig @@ -22,7 +22,6 @@ pub const ModifyCtx = struct { node: ?Node.Node, fieldType: t.PropType, db: *DbCtx, - // dirtyRanges: std.AutoArrayHashMap(u64, f64), subTypes: ?*Subscription.TypeSubscriptionCtx, // prob want to add subs here idSubs: ?[]*Subscription.Sub, batch: []u8, diff --git a/native/modify/create.zig b/native/modify/create.zig deleted file mode 100644 index e1af71d574..0000000000 --- a/native/modify/create.zig +++ /dev/null @@ -1,106 +0,0 @@ -const Modify = @import("common.zig"); -const selva = @import("../selva/selva.zig"); -const Node = @import("../selva/node.zig"); -const Fields = @import("../selva/fields.zig"); -const utils = @import("../utils.zig"); -const sort = @import("../sort/sort.zig"); -const errors = @import("../errors.zig"); -const references = @import("references.zig"); -const reference = @import("reference.zig"); -const std = @import("std"); -const lib = @import("../lib.zig"); -const subs = @import("subscription.zig"); -const t = @import("../types.zig"); - -const read = utils.read; - -const ModifyCtx = Modify.ModifyCtx; -const getOrCreateShard = Modify.getOrCreateShard; -const getSortIndex = Modify.getSortIndex; - -pub fn createField(ctx: *ModifyCtx, data: []u8) !usize { - // subs.stage(ctx, subs.Op.create); - - switch (ctx.fieldType) { - t.PropType.references => { - return references.writeReferences(ctx, data); - }, - t.PropType.reference => { - return reference.updateReference(ctx, data); - }, - t.PropType.vector => { - const len = read(u32, data, 0); - const padding = data[4]; - const slice = data[8 - padding .. len + 4]; - try Fields.setMicroBuffer(ctx.node.?, ctx.fieldSchema.?, slice); - return len + 4; - }, - t.PropType.colVec => { - const len = read(u32, data, 0); - const padding = data[4]; - const slice = data[8 - padding .. len + 4]; - Fields.setColvec(ctx.typeEntry.?, ctx.id, ctx.fieldSchema.?, slice); - return len + 4; - }, - t.PropType.cardinality => { - const hllMode = if (data[0] == 0) true else false; - const hllPrecision = data[1]; - const offset = 2; - const len = read(u32, data, offset); - const hll = try Fields.ensurePropTypeString(ctx, ctx.fieldSchema.?); - selva.c.hll_init(hll, hllPrecision, hllMode); - var i: usize = 4 + offset; - while (i < (len * 8) + offset) { - const hash = read(u64, data, i); - selva.c.hll_add(hll, hash); - i += 8; - } - const newCount = selva.c.hll_count(hll); - addSortIndexOnCreation(ctx, newCount[0..4]) catch null; - return len * 8 + 6; - }, - else => { - const len = read(u32, data, 0); - const slice = data[4 .. len + 4]; - addSortIndexOnCreation(ctx, slice) catch null; - if (ctx.fieldType == t.PropType.alias) { - if (slice.len > 0) { - const old = try Fields.setAlias(ctx.typeEntry.?, ctx.id, ctx.field, slice); - if (old > 0) { - if (ctx.currentSortIndex != null) { - sort.remove(ctx.thread.decompressor, ctx.currentSortIndex.?, slice, Node.getNode(ctx.typeEntry.?, old).?); - } - selva.markDirty(ctx, ctx.typeId, old); - } - } - } else { - try Fields.set(ctx.node.?, ctx.fieldSchema.?, slice); - } - return len + 4; - }, - } -} - -pub fn addSortIndexOnCreation(ctx: *ModifyCtx, slice: []u8) !void { - if (ctx.field == 0) { - if (ctx.typeSortIndex != null) { - var it = ctx.typeSortIndex.?.main.iterator(); - while (it.next()) |entry| { - const sI = entry.value_ptr.*; - sort.insert(ctx.thread.decompressor, sI, slice, ctx.node.?); - } - } - } else if (ctx.currentSortIndex != null) { - sort.insert(ctx.thread.decompressor, ctx.currentSortIndex.?, slice, ctx.node.?); - } else if (ctx.typeSortIndex != null and ctx.fieldType == t.PropType.text) { - const sIndex = sort.getSortIndex( - ctx.db.sortIndexes.get(ctx.typeId), - ctx.field, - 0, - @enumFromInt(slice[0]), - ); - if (sIndex) |s| { - sort.insert(ctx.thread.decompressor, s, slice, ctx.node.?); - } - } -} diff --git a/native/modify/delete.zig b/native/modify/delete.zig index 5cb8c4de8e..9afe3d7e82 100644 --- a/native/modify/delete.zig +++ b/native/modify/delete.zig @@ -85,14 +85,6 @@ pub fn deleteField(ctx: *ModifyCtx) !usize { if (e != error.SELVA_ENOENT) return e; }; } else { - if (ctx.fieldType == t.PropType.reference) { - const fs = ctx.fieldSchema.?; - const dstType = try Node.getRefDstType(ctx.db, fs); - const oldRefDst = Node.getNodeFromReference(dstType, References.getReference(ctx.node.?, fs)); - if (oldRefDst) |dstNode| { - Selva.markDirty(ctx, Node.getNodeTypeId(dstNode), Node.getNodeId(dstNode)); - } - } try Fields.deleteField(ctx, ctx.node.?, ctx.fieldSchema.?); } return 0; diff --git a/native/modify/edges.zig b/native/modify/edges.zig deleted file mode 100644 index b804ae2e8e..0000000000 --- a/native/modify/edges.zig +++ /dev/null @@ -1,123 +0,0 @@ -const Modify = @import("common.zig"); -const selva = @import("../selva/selva.zig").c; -const Schema = @import("../selva/schema.zig"); -const Node = @import("../selva/node.zig"); -const Fields = @import("../selva/fields.zig"); -const References = @import("../selva/references.zig"); -const utils = @import("../utils.zig"); -const errors = @import("../errors.zig"); -const update = @import("update.zig"); -const std = @import("std"); -const t = @import("../types.zig"); - -const read = utils.read; -const copy = utils.copy; -const ModifyCtx = Modify.ModifyCtx; - -fn isMainEmpty(val: []u8) bool { - var b = false; - for (val) |byte| { - b = b or byte != 0; - } - return b == false; -} - -pub fn writeEdges( - ctx: *ModifyCtx, - ref: References.ReferenceLarge, - data: []u8, -) !void { - var i: usize = 0; - const edgeConstraint = Schema.getEdgeFieldConstraint(ctx.fieldSchema.?); - const edgeNode = try Node.ensureRefEdgeNode(ctx, ctx.node.?, edgeConstraint, ref); - const edgeId = ref.*.edge; - const edgeTypeId = edgeConstraint.*.edge_node_type; - if (edgeId > ctx.db.ids[edgeTypeId - 1]) { - ctx.db.ids[edgeTypeId - 1] = edgeId; - } - - while (i < data.len) { - const op: t.ModOp = @enumFromInt(data[i]); - const prop = data[i + 1]; - const propType: t.PropType = @enumFromInt(data[i + 2]); - i += 3; - - const edgeFieldSchema = Schema.getEdgeFieldSchema(ctx.db, edgeConstraint, prop) catch { - std.log.err("Edge field schema not found\n", .{}); - return; - }; - - var len: u32 = undefined; - var offset: u32 = 0; - - if (op == t.ModOp.updatePartial) { - len = read(u32, data, i); - const totalMainBufferLen = read(u16, data, i + 4); - offset = 6; - const mainBufferOffset = len - totalMainBufferLen; - const val = Fields.get(null, edgeNode, edgeFieldSchema, propType); - if (!isMainEmpty(val)) { - const edgeData = data[i + offset + mainBufferOffset .. i + len + offset]; - var j: usize = offset + i; - while (j < mainBufferOffset + offset + i) : (j += 6) { - const start = read(u16, data, j); - const l = read(u16, data, j + 2); - const fieldOp: t.ModOp = @enumFromInt(data[j + 4]); - if (fieldOp == t.ModOp.increment or fieldOp == t.ModOp.decrement) { - _ = update.incrementBuffer(op, @enumFromInt(data[j + 5]), val, edgeData); - } else { - copy(u8, val, edgeData[start .. start + l], start); - } - } - } else { - const edgeData = data[i + offset + mainBufferOffset .. i + len + offset]; - try Fields.set(edgeNode, edgeFieldSchema, edgeData); - } - } else switch (propType) { - t.PropType.reference => { - len = 4; - offset = 0; - const dstId = read(u32, data, i + offset); - if (Node.getNode(try Node.getRefDstType(ctx.db, edgeFieldSchema), dstId)) |dstNode| { - _ = try References.writeReference(ctx, edgeNode, edgeFieldSchema, dstNode); - } else { - return errors.SelvaError.SELVA_ENOENT; - } - }, - t.PropType.references => { - len = read(u32, data, i); - offset = 4; - const edgeData = data[i + offset .. i + offset + len]; - // fix start - const address = @intFromPtr(edgeData.ptr); - const delta: u8 = @truncate(address % 4); - const d = if (delta == 0) 0 else 4 - delta; - const aligned = edgeData[d .. edgeData.len - 3 + d]; - if (d != 0) { - utils.move(aligned, edgeData[0 .. edgeData.len - 3]); - } - const u32ids = read([]u32, aligned, 0); - try References.putReferences(ctx, edgeNode, edgeFieldSchema, u32ids); - }, - t.PropType.cardinality => { - len = read(u32, data, i); - offset = 4; - const hll = try Fields.ensureEdgePropTypeString(ctx, ctx.node.?, edgeConstraint, ref, edgeFieldSchema); - selva.hll_init(hll, 8, true); // TBD: to get optionals from buffer - var it: usize = i + offset; - while (it < len) { - const hash = read(u64, data, it); - selva.hll_add(hll, hash); - it += 8; - } - }, - else => { - len = read(u32, data, i); - offset = 4; - const edgeData = data[i + offset .. i + offset + len]; - try Fields.set(edgeNode, edgeFieldSchema, edgeData); - }, - } - i += offset + len; - } -} diff --git a/native/modify/modify.zig b/native/modify/modify.zig index 0713ebffa8..8b61d8a287 100644 --- a/native/modify/modify.zig +++ b/native/modify/modify.zig @@ -5,33 +5,31 @@ const Schema = @import("../selva/schema.zig"); const Node = @import("../selva/node.zig"); const Fields = @import("../selva/fields.zig"); const References = @import("../selva/references.zig"); -const Modify = @import("common.zig"); -const createField = @import("create.zig").createField; -const deleteFieldSortIndex = @import("delete.zig").deleteFieldSortIndex; -const deleteField = @import("delete.zig").deleteField; -const deleteTextLang = @import("delete.zig").deleteTextLang; -const addEmptyToSortIndex = @import("sort.zig").addEmptyToSortIndex; -const addEmptyTextToSortIndex = @import("sort.zig").addEmptyTextToSortIndex; const utils = @import("../utils.zig"); -const Update = @import("update.zig"); -const dbSort = @import("../sort/sort.zig"); -const config = @import("config"); -const errors = @import("../errors.zig"); const Thread = @import("../thread/thread.zig"); const t = @import("../types.zig"); const DbCtx = @import("../db/ctx.zig").DbCtx; - -const updateField = Update.updateField; -const updatePartialField = Update.updatePartialField; -const increment = Update.increment; -const read = utils.read; -const write = utils.write; -const assert = std.debug.assert; -const ModifyCtx = Modify.ModifyCtx; - const subs = @import("subscription.zig"); pub const subscription = subs.suscription; +const resItemSize = utils.sizeOf(t.ModifyResultItem); +inline fn applyInc(comptime T: type, current: []u8, value: []u8, start: u16, incrementPositive: bool) void { + const curr = utils.read(T, current, start); + const inc = utils.read(T, value, 0); + if (incrementPositive) { + const res = if (@typeInfo(T) == .float) curr + inc else curr +% inc; + utils.write(value, res, 0); + } else { + const res = if (@typeInfo(T) == .float) curr - inc else curr -% inc; + utils.write(value, res, 0); + } +} + +inline fn writeResult(res: *t.ModifyResultItem, id: u32, err: t.ModifyError) void { + const ptr = @as([*]u8, @ptrCast(res)); + utils.write(ptr[0..4], id, 0); + ptr[4] = @intFromEnum(err); +} // ----------NAPI------------- pub fn modifyThread(env: napi.Env, info: napi.Info) callconv(.c) napi.Value { @@ -41,309 +39,366 @@ pub fn modifyThread(env: napi.Env, info: napi.Info) callconv(.c) napi.Value { ) catch undefined; return null; } + fn modifyInternalThread(env: napi.Env, info: napi.Info) !void { const args = try napi.getArgs(2, env, info); const batch = try napi.get([]u8, env, args[0]); const dbCtx = try napi.get(*DbCtx, env, args[1]); try dbCtx.threads.modify(batch); } -// ----------------------- -fn switchType(ctx: *ModifyCtx, typeId: u16) !void { - ctx.typeId = typeId; - ctx.typeEntry = try Node.getType(ctx.db, ctx.typeId); - ctx.typeSortIndex = dbSort.getTypeSortIndexes(ctx.db, ctx.typeId); - // ctx.subTypes = ctx.thread.subscriptions.types.get(ctx.typeId); - // if (ctx.subTypes) |st| { - // st.typeModified = true; - // } - ctx.node = null; -} +pub fn modifyProps(db: *DbCtx, typeEntry: Node.Type, node: Node.Node, data: []u8, items: []u8) !void { + selva.markDirty(db, typeEntry, Node.getNodeId(node)); -fn writeoutPrevNodeId(ctx: *ModifyCtx, resultLen: *u32, prevNodeId: u32, result: []u8) void { - if (prevNodeId != 0) { - utils.write(result, prevNodeId, resultLen.*); - utils.writeAs(u8, result, ctx.err, resultLen.* + 4); - ctx.err = errors.ClientError.null; - resultLen.* += 5; - } -} - -fn newNode(ctx: *ModifyCtx) !void { - const id = ctx.db.ids[ctx.typeId - 1] + 1; - ctx.node = try Node.upsertNode(ctx, ctx.typeEntry.?, id); - ctx.id = id; - ctx.db.ids[ctx.typeId - 1] = id; - selva.markDirty(ctx, ctx.typeId, id); -} + var j: usize = 0; + while (j < data.len) { + const propId = data[j]; + const propSchema = try Schema.getFieldSchema(typeEntry, propId); -fn newNodeRing(ctx: *ModifyCtx, maxId: u32) !void { - const nextId = ctx.db.ids[ctx.typeId - 1] % maxId + 1; - ctx.node = Node.getNode(ctx.typeEntry.?, nextId); + if (propId == 0) { + // main handling + const main = utils.readNext(t.ModifyMainHeader, data, &j); + const current = Fields.get(typeEntry, node, propSchema, t.PropType.microBuffer); + const value = data[j .. j + main.size]; - if (ctx.node) |oldNode| { - Node.flushNode(ctx, ctx.typeEntry.?, oldNode); - } else { - ctx.node = try Node.upsertNode(ctx, ctx.typeEntry.?, nextId); - } + if (main.increment) { + switch (main.type) { + .number => applyInc(f64, current, value, main.start, main.incrementPositive), + .timestamp => applyInc(i64, current, value, main.start, main.incrementPositive), + .int8, .uint8 => applyInc(u8, current, value, main.start, main.incrementPositive), + .int16, .uint16 => applyInc(u16, current, value, main.start, main.incrementPositive), + .int32, .uint32 => applyInc(u32, current, value, main.start, main.incrementPositive), + else => {}, + } + } + if (main.expire and main.size == 8) { + const typeId = Node.getNodeTypeId(node); + const id = Node.getNodeId(node); + Node.expireNode(db, typeId, id, @divTrunc(utils.read(i64, value, 0), 1000)); + } + utils.copy(u8, current, value, main.start); + j += main.size; + } else { + // separate handling + const prop = utils.readNext(t.ModifyPropHeader, data, &j); + const value = data[j .. j + prop.size]; + switch (prop.type) { + .text => { + if (prop.size == 0) { + try Fields.deleteField(db, node, propSchema); + continue; + } + var k: usize = 0; + while (k < value.len) { + const textSize = utils.read(u32, value, k); + k += 4; + const textValue = value[k .. k + textSize]; + k += textSize; + try Fields.setText(node, propSchema, textValue); + } + }, + .alias => { + const id = Node.getNodeId(node); + if (prop.size == 0) { + try Fields.delAlias(typeEntry, id, prop.id); + continue; + } + const prev = try Fields.setAlias(typeEntry, id, prop.id, value); + if (prev > 0) { + // TODO sort for everything + // if (ctx.currentSortIndex != null) { + // sort.remove(ctx.thread.decompressor, ctx.currentSortIndex.?, slice, Node.getNode(ctx.typeEntry.?, prev).?); + // } + } + }, + .cardinality => { + if (prop.size == 0) { + try Fields.deleteField(db, node, propSchema); + continue; + } + var k: usize = 0; + const cardinality = utils.readNext(t.ModifyCardinalityHeader, value, &k); + var hll = selva.c.selva_fields_get_selva_string(node, propSchema); + if (hll == null) { // TODO check if this is null after delete! + hll = try Fields.ensurePropTypeString(node, propSchema); + selva.c.hll_init(hll, cardinality.precision, cardinality.sparse); + } + while (k < value.len) { + const hash = utils.read(u64, value, k); + selva.c.hll_add(hll, hash); + k += 8; + } + }, + .reference => { + if (prop.size == 0) { + try Fields.deleteField(db, node, propSchema); + continue; + } + const refTypeId = Schema.getRefTypeIdFromFieldSchema(propSchema); + const refTypeEntry = try Node.getType(db, refTypeId); + var k: usize = 0; + const meta = utils.readNext(t.ModifyReferenceMetaHeader, value, &k); + var refId = meta.id; + if (meta.isTmp) refId = utils.read(u32, items, refId * resItemSize); + if (Node.getNode(refTypeEntry, refId)) |dst| { + const ref = try References.writeReference(db, node, propSchema, dst); + if (meta.size != 0) { + if (ref) |r| { + const edgeProps = value[k .. k + meta.size]; + const edgeConstraint = Schema.getEdgeFieldConstraint(propSchema); + const edgeType = try Node.getType(db, edgeConstraint.edge_node_type); + if (Node.getEdgeNode(db, edgeConstraint, r)) |edgeNode| { + try modifyProps(db, edgeType, edgeNode, edgeProps, items); + } // TODO else error? + } + } + } + }, + .references => { + if (prop.size == 0) { + try Fields.deleteField(db, node, propSchema); + continue; + } + var k: usize = 0; + if (@as(t.ModifyReferences, @enumFromInt(value[0])) == t.ModifyReferences.clear) { + References.clearReferences(db, node, propSchema); + k += 1; + } + while (k < value.len) { + const references = utils.readNext(t.ModifyReferencesHeader, value, &k); + const refs = value[k .. k + references.size]; + switch (references.op) { + .ids => { + const offset = utils.alignLeft(u32, refs); + const u32Ids = utils.read([]u32, refs[4 - offset .. refs.len - offset], 0); + try References.putReferences(db, node, propSchema, u32Ids); + }, + .tmpIds => { + const offset = utils.alignLeft(u32, refs); + const u32Ids = utils.read([]u32, refs[4 - offset .. refs.len - offset], 0); + for (u32Ids) |*id| id.* = utils.read(u32, items, id.* * resItemSize); + try References.putReferences(db, node, propSchema, u32Ids); + }, + .idsWithMeta => { + const refTypeId = Schema.getRefTypeIdFromFieldSchema(propSchema); + const refTypeEntry = try Node.getType(db, refTypeId); + const edgeConstraint = Schema.getEdgeFieldConstraint(propSchema); + const count = utils.read(u32, refs, 0); + var x: usize = 4; + References.preallocReferences2(db, node, propSchema, count); + while (x < refs.len) { + const meta = utils.readNext(t.ModifyReferencesMetaHeader, refs, &x); + var refId = meta.id; + if (meta.isTmp) refId = utils.read(u32, items, refId * resItemSize); + if (Node.getNode(refTypeEntry, refId)) |dst| { + const ref = try References.insertReference(db, node, propSchema, dst, meta.index, meta.withIndex); + if (meta.size != 0) { + const edgeProps = refs[x .. x + meta.size]; + if (Node.getEdgeNode(db, edgeConstraint, ref.p.large)) |edgeNode| { + const edgeType = try Node.getType(db, edgeConstraint.edge_node_type); + try modifyProps(db, edgeType, edgeNode, edgeProps, items); + } // TODO else err? + } + } - ctx.id = nextId; - ctx.db.ids[ctx.typeId - 1] = nextId; - selva.markDirty(ctx, ctx.typeId, nextId); -} + x += meta.size; + } + }, + .delIds => { + const offset = utils.alignLeft(u32, refs); + const u32Ids = utils.read([]u32, refs[4 - offset .. refs.len - offset], 0); + for (u32Ids) |id| try References.deleteReference(db, node, propSchema, id); + }, + .delTmpIds => { + const offset = utils.alignLeft(u32, refs); + const u32Ids = utils.read([]u32, refs[4 - offset .. refs.len - offset], 0); + for (u32Ids) |*id| { + const realId = utils.read(u32, items, id.* * resItemSize); + try References.deleteReference(db, node, propSchema, realId); + } + }, + else => {}, + } -fn getLargeRef(db: *DbCtx, node: Node.Node, fs: Schema.FieldSchema, dstId: u32) ?References.ReferenceLarge { - if (dstId == 0) { // assume reference - return References.getReference(node, fs); - } else { // references - if (References.getReferences(false, true, db, node, fs)) |iterator| { - const refs = iterator.refs; - const any = References.referencesGet(refs, dstId); - if (any.type == selva.c.SELVA_NODE_REFERENCE_LARGE) { - return any.p.large; + k += references.size; + } + }, + .colVec => { + if (prop.size == 0) { + Fields.clearColvec(typeEntry, node, propSchema); + continue; + } + Fields.setColvec(typeEntry, node, propSchema, value); + }, + else => { + if (prop.size == 0) { + try Fields.deleteField(db, node, propSchema); + continue; + } + try Fields.set(node, propSchema, value); + }, } + + j += prop.size; } } - return null; } -fn switchEdgeId(ctx: *ModifyCtx, srcId: u32, dstId: u32, refField: u8) !u32 { - var prevNodeId: u32 = 0; - - if (srcId == 0 or ctx.node == null) { - return 0; - } - - const fs = Schema.getFieldSchema(ctx.typeEntry, refField) catch { - return 0; - }; - ctx.fieldSchema = fs; - - if (getLargeRef(ctx.db, ctx.node.?, fs, dstId)) |ref| { - const efc = Schema.getEdgeFieldConstraint(fs); - switchType(ctx, efc.edge_node_type) catch { - return 0; - }; - const edgeNode = Node.ensureRefEdgeNode(ctx, ctx.node.?, efc, ref) catch { - return 0; - }; - const edgeId = ref.*.edge; +const UpsertResult = struct { + node: Node.Node, + created: bool, +}; - // if its zero then we don't want to switch (for upsert) - prevNodeId = ctx.id; - ctx.id = edgeId; - ctx.node = edgeNode; - if (ctx.node == null) { - ctx.err = errors.ClientError.nx; - } else { - // try subs.checkId(ctx); - // It would be even better if we'd mark it dirty only in the case - // something was actually changed. - selva.markDirty(ctx, ctx.typeId, ctx.id); +inline fn upsertTarget(db: *DbCtx, typeId: u16, typeEntry: Node.Type, data: []u8) !UpsertResult { + var j: usize = 0; + while (j < data.len) { + const prop = utils.readNext(t.ModifyPropHeader, data, &j); + const value = data[j .. j + prop.size]; + if (prop.type == t.PropType.alias) { + if (Fields.getAliasByName(typeEntry, prop.id, value)) |node| { + return .{ .node = node, .created = false }; + } } + j += prop.size; } - - return prevNodeId; + const id = db.ids[typeId - 1] + 1; + const node = try Node.upsertNode(typeEntry, id); + db.ids[typeId - 1] = id; + return .{ .node = node, .created = true }; } -pub fn writeData(ctx: *ModifyCtx, buf: []u8) !usize { +pub fn modify( + thread: *Thread.Thread, + buf: []u8, + db: *DbCtx, +) !void { var i: usize = 0; + var j: u32 = 4; + const header = utils.readNext(t.ModifyHeader, buf, &i); + const size = header.count * resItemSize; + const result = try thread.modify.result(j + size, header.opId, header.opType); + const items = result[j..]; while (i < buf.len) { - const op: t.ModOp = @enumFromInt(buf[i]); - const data: []u8 = buf[i + 1 ..]; + const op: t.Modify = @enumFromInt(buf[i]); switch (op) { - .padding => { - i += 1; + .create => { + const create = utils.read(t.ModifyCreateHeader, buf, i); + i += utils.sizeOf(t.ModifyCreateHeader); + const typeEntry = try Node.getType(db, create.type); + const data: []u8 = buf[i .. i + create.size]; + const id = db.ids[create.type - 1] + 1; + const node = try Node.upsertNode(typeEntry, id); + modifyProps(db, typeEntry, node, data, items) catch { + // handle errors + }; + db.ids[create.type - 1] = id; + utils.write(result, id, j); + utils.write(result, t.ModifyError.null, j + 4); + i += create.size; }, - .switchProp => { - ctx.field = data[0]; - i += 3; - ctx.fieldSchema = try Schema.getFieldSchema(ctx.typeEntry.?, ctx.field); - ctx.fieldType = @enumFromInt(data[1]); - if (ctx.field != 0) { - ctx.currentSortIndex = dbSort.getSortIndex( - ctx.typeSortIndex, - ctx.field, - 0, - t.LangCode.none, - ); + .createRing => { + const create = utils.read(t.ModifyCreateRingHeader, buf, i); + i += utils.sizeOf(t.ModifyCreateRingHeader); + const typeEntry = try Node.getType(db, create.type); + const data: []u8 = buf[i .. i + create.size]; + const nextId = db.ids[create.type - 1] % create.maxNodeId + 1; + var node = Node.getNode(typeEntry, nextId); + if (node) |oldNode| { + Node.flushNode(db, typeEntry, oldNode); } else { - ctx.currentSortIndex = null; - } - }, - .deleteNode => { - if (ctx.node) |node| { - // subs.stage(ctx, subs.Op.deleteNode); - Node.deleteNode(ctx, ctx.typeEntry.?, node) catch {}; - ctx.node = null; + node = try Node.upsertNode(typeEntry, nextId); } - i += 1; + modifyProps(db, typeEntry, node.?, data, items) catch { + // handle errors + }; + db.ids[create.type - 1] = nextId; + utils.write(result, nextId, j); + utils.write(result, t.ModifyError.null, j + 4); + i += create.size; }, - .deleteTextField => { - const lang: t.LangCode = @enumFromInt(data[0]); - deleteTextLang(ctx, lang); - i += 2; - }, - .switchIdCreate => { - writeoutPrevNodeId(ctx, &ctx.resultLen, ctx.id, ctx.result); - try newNode(ctx); - i += 1; - }, - .switchIdCreateRing => { - writeoutPrevNodeId(ctx, &ctx.resultLen, ctx.id, ctx.result); - const maxNodeId = read(u32, data, 0); - try newNodeRing(ctx, maxNodeId); - i += 5; - }, - .switchIdCreateUnsafe => { - writeoutPrevNodeId(ctx, &ctx.resultLen, ctx.id, ctx.result); - ctx.id = read(u32, data, 0); - if (ctx.id > ctx.db.ids[ctx.typeId - 1]) { - ctx.db.ids[ctx.typeId - 1] = ctx.id; - } - ctx.node = try Node.upsertNode(ctx, ctx.typeEntry.?, ctx.id); - selva.markDirty(ctx, ctx.typeId, ctx.id); - i += 5; - }, - .switchIdUpdate => { - const id = read(u32, data, 0); - if (id != 0) { - writeoutPrevNodeId(ctx, &ctx.resultLen, ctx.id, ctx.result); - // if its zero then we don't want to switch (for upsert) - ctx.id = id; - ctx.node = Node.getNode(ctx.typeEntry.?, ctx.id); - if (ctx.node == null) { - ctx.err = errors.ClientError.nx; - } else { - // try subs.checkId(ctx); - // It would be even better if we'd mark it dirty only in the case - // something was actually changed. - selva.markDirty(ctx, ctx.typeId, ctx.id); - } + .update => { + const update = utils.read(t.ModifyUpdateHeader, buf, i); + i += utils.sizeOf(t.ModifyUpdateHeader); + const typeEntry = try Node.getType(db, update.type); + var id = update.id; + if (update.isTmp) id = utils.read(u32, items, id * resItemSize); + if (Node.getNode(typeEntry, id)) |node| { + const data: []u8 = buf[i .. i + update.size]; + modifyProps(db, typeEntry, node, data, items) catch { + // handle errors + }; + utils.write(result, id, j); + utils.write(result, t.ModifyError.null, j + 4); + } else { + utils.write(result, id, j); + utils.write(result, t.ModifyError.nx, j + 4); } - i += 5; + i += update.size; }, - // .switchEdgeId => { - // const srcId = read(u32, data, 0); - // const dstId = read(u32, data, 4); - // const refField = read(u8, data, 8); - // const prevNodeId = try switchEdgeId(ctx, srcId, dstId, refField); - // writeoutPrevNodeId(ctx, &ctx.resultLen, prevNodeId, ctx.result); - // i += 10; - // }, .upsert => { - const writeIndex = read(u32, data, 0); - const updateIndex = read(u32, data, 4); - var nextIndex: u32 = writeIndex; - var j: u32 = 8; - while (j < writeIndex) { - const prop = read(u8, data, j); - const len = read(u32, data, j + 1); - const val = data[j + 5 .. j + 5 + len]; - if (Fields.getAliasByName(ctx.typeEntry.?, prop, val)) |node| { - write(data, Node.getNodeId(node), updateIndex + 1); - nextIndex = updateIndex; - break; - } - j = j + 5 + len; + const upsert = utils.read(t.ModifyCreateHeader, buf, i); + i += utils.sizeOf(t.ModifyCreateHeader); + const target = buf[i .. i + upsert.size]; + i += upsert.size; + const typeEntry = try Node.getType(db, upsert.type); + const upsertRes = try upsertTarget(db, upsert.type, typeEntry, target); + if (upsertRes.created) { + try modifyProps(db, typeEntry, upsertRes.node, target, items); } - i += nextIndex + 1; + const dataSize = utils.read(u32, buf, i); + i += 4; + const data = buf[i .. i + dataSize]; + modifyProps(db, typeEntry, upsertRes.node, data, items) catch { + // handle errors + }; + const id = Node.getNodeId(upsertRes.node); + utils.write(result, id, j); + utils.write(result, t.ModifyError.null, j + 4); + i += dataSize; }, .insert => { - const writeIndex = read(u32, data, 0); - const endIndex = read(u32, data, 4); - var nextIndex: u32 = writeIndex; - var j: u32 = 8; - while (j < writeIndex) { - const prop = read(u8, data, j); - const len = read(u32, data, j + 1); - const val = data[j + 5 .. j + 5 + len]; - if (Fields.getAliasByName(ctx.typeEntry.?, prop, val)) |node| { - const id = Node.getNodeId(node); - write(buf, id, ctx.resultLen); - write(buf, errors.ClientError.null, ctx.resultLen + 4); - ctx.resultLen += 5; - nextIndex = endIndex; - break; - } - j = j + 5 + len; + const insert = utils.read(t.ModifyCreateHeader, buf, i); + i += utils.sizeOf(t.ModifyCreateHeader); + const target = buf[i .. i + insert.size]; + i += insert.size; + const typeEntry = try Node.getType(db, insert.type); + const upsertRes = try upsertTarget(db, insert.type, typeEntry, target); + const dataSize = utils.read(u32, buf, i); + const id = Node.getNodeId(upsertRes.node); + i += 4; + if (upsertRes.created) { + try modifyProps(db, typeEntry, upsertRes.node, target, items); + const data = buf[i .. i + dataSize]; + modifyProps(db, typeEntry, upsertRes.node, data, items) catch { + // handle errors + }; } - i += nextIndex + 1; - }, - .switchType => { - try switchType(ctx, read(u16, data, 0)); - i += 3; - }, - .addEmptySort => { - i += try addEmptyToSortIndex(ctx, data) + 1; - }, - .addEmptySortText => { - i += try addEmptyTextToSortIndex(ctx, data) + 1; + utils.write(result, id, j); + utils.write(result, t.ModifyError.null, j + 4); + i += dataSize; }, .delete => { - i += try deleteField(ctx) + 1; - }, - .deleteSortIndex => { - i += try deleteFieldSortIndex(ctx) + 1; - }, - .createProp => { - i += try createField(ctx, data) + 1; - }, - .updateProp => { - i += try updateField(ctx, data) + 1; - }, - .updatePartial => { - i += try updatePartialField(ctx, data) + 1; - }, - .increment, .decrement => { - i += try increment(ctx, data, op) + 1; - }, - .expire => { - Node.expireNode(ctx, ctx.typeId, ctx.id, std.time.timestamp() + read(u32, data, 0)); - i += 5; + const delete = utils.read(t.ModifyDeleteHeader, buf, i); + i += utils.sizeOf(t.ModifyDeleteHeader); + const typeEntry = try Node.getType(db, delete.type); + var id = delete.id; + if (delete.isTmp) id = utils.read(u32, items, id * resItemSize); + utils.write(result, id, j); + utils.write(result, t.ModifyError.null, j + 4); + if (Node.getNode(typeEntry, id)) |node| { + Node.deleteNode(db, typeEntry, node) catch { + // handle errors + }; + } }, } + j += resItemSize; } - return i; -} - -pub fn modify( - thread: *Thread.Thread, - batch: []u8, - dbCtx: *DbCtx, - opType: t.OpType, -) !void { - const modifyId = read(u32, batch, 0); - const nodeCount = read(u32, batch, 13); - const expectedLen = 4 + nodeCount * 5; - var ctx: ModifyCtx = .{ - .result = try thread.modify.result(expectedLen, modifyId, opType), - .resultLen = 4, - .field = undefined, - .typeId = 0, - .id = 0, - .currentSortIndex = null, - .typeSortIndex = null, - .node = null, - .typeEntry = null, - .fieldSchema = null, - .fieldType = t.PropType.null, - .db = dbCtx, - .batch = batch, - .err = errors.ClientError.null, - .idSubs = null, - .subTypes = null, - .thread = thread, - }; - - _ = try writeData(&ctx, batch[13 + 4 ..]); - - Node.expire(&ctx); - writeoutPrevNodeId(&ctx, &ctx.resultLen, ctx.id, ctx.result); - write(ctx.result, ctx.resultLen, 0); - - if (ctx.resultLen < expectedLen) { - @memset(ctx.result[ctx.resultLen..expectedLen], 0); - } + // 1. expire will just be checked on query + // 2. subscription will handle timers + // 3. keep Node.expire(db) for internal cleanup + Node.expire(db); + utils.write(result, j, 0); + if (j < size) @memset(result[j..size], 0); } diff --git a/native/modify/reference.zig b/native/modify/reference.zig deleted file mode 100644 index d28d1eaf24..0000000000 --- a/native/modify/reference.zig +++ /dev/null @@ -1,62 +0,0 @@ -const Schema = @import("../selva/schema.zig"); -const Node = @import("../selva/node.zig"); -const References = @import("../selva/references.zig"); -const read = @import("../utils.zig").read; -const Modify = @import("common.zig"); -const errors = @import("../errors.zig"); -const std = @import("std"); -const ModifyCtx = Modify.ModifyCtx; -const edge = @import("edges.zig"); -const RefEdgeOp = @import("../types.zig").RefEdgeOp; - -pub fn updateReference(ctx: *ModifyCtx, data: []u8) !usize { - const op: RefEdgeOp = @enumFromInt(data[0]); - const hasEdges = RefEdgeOp.hasEdges(op); - const isTmpId = RefEdgeOp.isTmpId(op); - const refTypeId = Schema.getRefTypeIdFromFieldSchema(ctx.fieldSchema.?); - const refTypeEntry = try Node.getType(ctx.db, refTypeId); - var id = read(u32, data, 1); - - if (isTmpId) { - id = Modify.resolveTmpId(ctx, id); - } - - if (ctx.id == id and ctx.typeId == refTypeId) { - // don't ref yourself - return 5; - } - - var ref: ?References.ReferenceLarge = null; - - const oldRefDst = References.getReference(ctx.node.?, ctx.fieldSchema.?); - const dstType = try Node.getRefDstType(ctx.db, ctx.fieldSchema.?); - const dstNode = Node.getNodeFromReference(dstType, oldRefDst); - - if (dstNode) |d| { - if (Node.getNodeId(d) == id) { - ref = oldRefDst; - } - } - - if (ref == null) { - if (Node.getNode(refTypeEntry, id)) |dst| { - ref = try References.writeReference(ctx, ctx.node.?, ctx.fieldSchema.?, dst); - } else { - return 5; //TODO WARN errors.SelvaError.SELVA_ENOENT - } - } - - if (hasEdges) { - const totalEdgesLen = read(u32, data, 5); - const len = 5 + totalEdgesLen; - if (ref) |r| { - const edges = data[9..len]; - try edge.writeEdges(ctx, r, edges); - } else { - std.log.err("EDGE MODIFY / Cannot find select ref to {d} \n", .{id}); - } - return len; - } - - return 5; -} diff --git a/native/modify/references.zig b/native/modify/references.zig deleted file mode 100644 index 9d98ed4696..0000000000 --- a/native/modify/references.zig +++ /dev/null @@ -1,186 +0,0 @@ -const assert = std.debug.assert; -const selva = @import("../selva/selva.zig").c; -const Schema = @import("../selva/schema.zig"); -const Node = @import("../selva/node.zig"); -const References = @import("../selva/references.zig"); -const read = @import("../utils.zig").read; -const Modify = @import("common.zig"); -const errors = @import("../errors.zig"); -const std = @import("std"); -const edge = @import("edges.zig"); -const t = @import("../types.zig"); -const RefEdgeOp = t.RefEdgeOp; -const move = @import("../utils.zig").move; - -const ModifyCtx = Modify.ModifyCtx; - -pub fn writeReferences(ctx: *ModifyCtx, buf: []u8) !usize { - var i: usize = 0; - while (i < buf.len) { - const op: t.RefOp = @enumFromInt(buf[i]); - const data: []u8 = buf[i + 1 ..]; - i += 1; - switch (op) { - t.RefOp.set => { - const len: usize = read(u32, data, 0); - const u8IdsUnaligned = data[4 .. 4 + len]; - const address = @intFromPtr(u8IdsUnaligned.ptr); - const offset: u8 = @truncate(address % 4); - const u8IdsAligned = data[4 - offset .. 4 + len - offset]; - if (offset != 0) move(u8IdsAligned, u8IdsUnaligned); - const u32Ids = read([]u32, u8IdsAligned, 0); - try References.putReferences( - ctx, - ctx.node.?, - ctx.fieldSchema.?, - u32Ids, - ); - i += 4 + len; - }, - t.RefOp.setIndex => { - // i += 1; - }, - t.RefOp.setTmp => { - // i += 1; - }, - t.RefOp.setEdge => { - // i += 1; - }, - t.RefOp.setIndexTmp => { - // i += 1; - }, - t.RefOp.setEdgeIndex => { - // i += 1; - }, - t.RefOp.setEdgeIndexTmp => { - // i += 1;/ - }, - t.RefOp.setEdgeTmp => { - // i += 1; - }, - t.RefOp.clear => { - References.clearReferences(ctx, ctx.node.?, ctx.fieldSchema.?); - // i += 1; - }, - t.RefOp.del => { - // i += 1; - }, - t.RefOp.end => break, - } - } - - return i; -} - -pub fn updateReferences(ctx: *ModifyCtx, data: []u8) !usize { - const len: usize = read(u32, data, 0); - if (ctx.node == null) { - std.log.err("References update id: {d} node does not exist \n", .{ctx.id}); - return len; - } - - const refTypeId = Schema.getRefTypeIdFromFieldSchema(ctx.fieldSchema.?); - const refTypeEntry = try Node.getType(ctx.db, refTypeId); - const refsLen: usize = read(u32, data, 5); - var i: usize = 9; - - References.preallocReferences(ctx, refsLen); - - while (i < len) : (i += 5) { - const op: RefEdgeOp = @enumFromInt(data[i]); - const hasEdgeData = RefEdgeOp.hasEdges(op); - const hasIndex = RefEdgeOp.hasIndex(op); - const isTmpId = RefEdgeOp.isTmpId(op); - var id = read(u32, data, i + 1); - if (isTmpId) { - id = Modify.resolveTmpId(ctx, id); - } - - if (ctx.id == id and ctx.typeId == refTypeId) { - // don't ref yourself - if (hasEdgeData) { - const sizepos = if (hasIndex) i + 9 else i + 5; - const edgelen = read(u32, data, sizepos); - const edgepos = sizepos + 4; - const edges = data[edgepos .. edgepos + edgelen]; - i += edges.len + 4; - } - i += 4; - continue; - } - - const index: i32 = if (hasIndex) read(i32, data, i + 5) else -1; - - var ref: References.ReferenceAny = undefined; - if (Node.getNode(refTypeEntry, id)) |dstNode| { - ref = try References.insertReference(ctx, ctx.node.?, ctx.fieldSchema.?, dstNode, index, hasIndex); - } else { - if (hasEdgeData) { - const sizepos = if (hasIndex) i + 9 else i + 5; - const edgelen = read(u32, data, sizepos); - i += edgelen; - } - i += 4; - // TODO WARN errors.SelvaError.SELVA_ENOENT - continue; - } - - if (hasEdgeData) { - const sizepos = if (hasIndex) i + 9 else i + 5; - const edgelen = read(u32, data, sizepos); - const edgepos = sizepos + 4; - const edges = data[edgepos .. edgepos + edgelen]; - if (ref.type == selva.SELVA_NODE_REFERENCE_LARGE) { - try edge.writeEdges(ctx, ref.p.large, edges); - } - i += edgelen + 4; - } - if (hasIndex) { - i += 4; - } - } - - return len; -} - -pub fn deleteReferences(ctx: *ModifyCtx, data: []u8) !usize { - const len: usize = read(u32, data, 0); - if (ctx.node == null) { - std.log.err("References delete id: {d} node does not exist \n", .{ctx.id}); - return len; - } - var i: usize = 1; - while (i < len) : (i += 4) { - const id = read(u32, data, i + 4); - try References.deleteReference( - ctx, - ctx.node.?, - ctx.fieldSchema.?, - id, - ); - } - return len; -} - -pub fn putReferences(ctx: *ModifyCtx, data: []u8) !usize { - const len: usize = read(u32, data, 0); - if (ctx.node == null) { - std.log.err("References delete id: {d} node does not exist \n", .{ctx.id}); - return len; - } - const idsUnAligned = data[5 .. len + 4]; - const address = @intFromPtr(idsUnAligned.ptr); - const offset: u8 = @truncate(address % 4); - const aligned = data[5 - offset .. len - offset + 4]; - if (offset != 0) { - move(aligned, idsUnAligned); - } - const u32ids = read([]u32, aligned, 0); - try References.putReferences( - ctx, - ctx.node.?, - ctx.fieldSchema.?, - u32ids, - ); - return len; -} diff --git a/native/modify/subscription.zig b/native/modify/subscription.zig index 6e3ff3fa1c..dae41f51d0 100644 --- a/native/modify/subscription.zig +++ b/native/modify/subscription.zig @@ -191,6 +191,10 @@ pub fn suscription(thread: *Thread.Thread, batch: []u8) !void { .expire => { i += 5; }, + .end => { + i += 1; + break; + }, } } } diff --git a/native/modify/update.zig b/native/modify/update.zig index 38ceea1771..9d6b53eefe 100644 --- a/native/modify/update.zig +++ b/native/modify/update.zig @@ -83,11 +83,7 @@ pub fn updateField(ctx: *ModifyCtx, data: []u8) !usize { const hllPrecision = data[1]; const offset = 2; const len = read(u32, data, offset); - var currentData = selva.c.selva_fields_get_selva_string(ctx.node.?, ctx.fieldSchema.?); - if (currentData == null) { - currentData = try Fields.ensurePropTypeString(ctx, ctx.fieldSchema.?); - selva.c.hll_init(currentData, hllPrecision, hllMode); - } + const currentData = Fields.ensureCardinality(ctx.node.?, ctx.fieldSchema.?, hllPrecision, hllMode); var i: usize = 4 + offset; const currentCount = if (ctx.currentSortIndex != null) selva.c.hll_count(currentData) else undefined; while (i < (len * 8) + offset) { diff --git a/native/query/aggregates/aggregates.zig b/native/query/aggregates/aggregates.zig index 06677bde8a..fcbd1aa320 100644 --- a/native/query/aggregates/aggregates.zig +++ b/native/query/aggregates/aggregates.zig @@ -12,94 +12,100 @@ const t = @import("../../types.zig"); const resultHeaderOffset = @import("../../thread/results.zig").resultHeaderOffset; const filter = @import("../filter/filter.zig").filter; +pub const AggCtx = struct { + queryCtx: *Query.QueryCtx, + typeEntry: Node.Type, + limit: u32 = 0, + isSamplingSet: bool = false, + hllAccumulator: ?*Selva.c.struct_selva_string = null, + accumulatorSize: usize = 0, + resultsSize: usize = 0, + hadAccumulated: bool = false, + totalResultsSize: usize = 0, +}; + pub fn iterator( - ctx: *Query.QueryCtx, + aggCtx: *AggCtx, it: anytype, - limit: u32, - comptime hasFilter: bool, + hasFilter: bool, filterBuf: []u8, aggDefs: []u8, accumulatorProp: []u8, - typeEntry: Node.Type, - hllAccumulator: anytype, ) !u32 { var count: u32 = 0; - var hadAccumulated: bool = false; + aggCtx.hadAccumulated = false; while (it.next()) |node| { if (hasFilter) { - if (!try filter(node, ctx, filterBuf)) { + if (!try filter(node, aggCtx.queryCtx, filterBuf)) { continue; } } - aggregateProps(node, typeEntry, aggDefs, accumulatorProp, hllAccumulator, &hadAccumulated); + aggregateProps(node, aggDefs, accumulatorProp, aggCtx); count += 1; - if (count >= limit) break; + if (count >= aggCtx.limit) break; } return count; } pub inline fn aggregateProps( node: Node.Node, - typeEntry: Node.Type, aggDefs: []u8, accumulatorProp: []u8, - hllAccumulator: anytype, - hadAccumulated: *bool, + aggCtx: *AggCtx, ) void { if (aggDefs.len == 0) return; var i: usize = 0; while (i < aggDefs.len) { const currentAggDef = utils.readNext(t.AggProp, aggDefs, &i); - utils.debugPrint("currentAggDef: {any}\n", .{currentAggDef}); - utils.debugPrint("😸 propId: {d}, node {d}\n", .{ currentAggDef.propId, Node.getNodeId(node) }); + // utils.debugPrint("currentAggDef: {any}\n", .{currentAggDef}); + // utils.debugPrint("😸 propId: {d}, node {d}\n", .{ currentAggDef.propId, Node.getNodeId(node) }); var value: []u8 = undefined; if (currentAggDef.aggFunction == t.AggFunction.count) { - accumulate(currentAggDef, accumulatorProp, value, hadAccumulated, null, null); + accumulate(currentAggDef, accumulatorProp, value, aggCtx, null); } else { if (currentAggDef.propId != t.MAIN_PROP and currentAggDef.aggFunction != t.AggFunction.cardinality) { - i += @sizeOf(t.AggProp); + i += utils.sizeOf(t.AggProp); continue; } - const propSchema = Schema.getFieldSchema(typeEntry, currentAggDef.propId) catch { - i += @sizeOf(t.AggProp); + const propSchema = Schema.getFieldSchema(aggCtx.typeEntry, currentAggDef.propId) catch { + i += utils.sizeOf(t.AggProp); continue; }; - if (currentAggDef.aggFunction == t.AggFunction.cardinality) { + if (currentAggDef.aggFunction == .cardinality) { const hllValue = Selva.c.selva_fields_get_selva_string(node, propSchema) orelse null; if (hllValue == null) { - i += @sizeOf(t.AggProp); + i += utils.sizeOf(t.AggProp); continue; } - if (!hadAccumulated.*) { - _ = Selva.c.selva_string_replace(hllAccumulator, null, Selva.c.HLL_INIT_SIZE); - Selva.c.hll_init_like(hllAccumulator, hllValue); + if (!aggCtx.hadAccumulated) { + _ = Selva.c.selva_string_replace(aggCtx.hllAccumulator, null, Selva.c.HLL_INIT_SIZE); + Selva.c.hll_init_like(aggCtx.hllAccumulator, hllValue); } - accumulate(currentAggDef, accumulatorProp, value, hadAccumulated, hllAccumulator, hllValue); + accumulate(currentAggDef, accumulatorProp, value, aggCtx, hllValue); } else { value = Fields.get( - typeEntry, + aggCtx.typeEntry, node, propSchema, currentAggDef.propType, ); if (value.len > 0) { - accumulate(currentAggDef, accumulatorProp, value, hadAccumulated, null, null); + accumulate(currentAggDef, accumulatorProp, value, aggCtx, null); } } } } - hadAccumulated.* = true; + aggCtx.hadAccumulated = true; } pub inline fn accumulate( currentAggDef: t.AggProp, accumulatorProp: []u8, value: []u8, - hadAccumulated: *bool, - hllAccumulator: anytype, + aggCtx: *AggCtx, hllValue: anytype, ) void { const propType = currentAggDef.propType; @@ -112,7 +118,7 @@ pub inline fn accumulate( switch (aggFunction) { .sum => { writeAs(f64, accumulatorProp, read(f64, accumulatorProp, accumulatorPos) + microbufferToF64(propTypeTag, value, start), accumulatorPos); - utils.debugPrint("❤️ v: {d}\n", .{read(f64, accumulatorProp, accumulatorPos)}); + // utils.debugPrint("❤️ v: {d}\n", .{read(f64, accumulatorProp, accumulatorPos)}); }, .avg => { const val = microbufferToF64(propTypeTag, value, start); @@ -126,16 +132,14 @@ pub inline fn accumulate( writeAs(f64, accumulatorProp, sum, accumulatorPos + 8); }, .min => { - // utils.debugPrint("hadAccumulated: {any} {d} {d}\n", .{ hadAccumulated.*, accumulatorPos, microbufferToF64(propTypeTag, value, start) }); - if (!hadAccumulated.*) { + if (!aggCtx.hadAccumulated) { writeAs(f64, accumulatorProp, microbufferToF64(propTypeTag, value, start), accumulatorPos); } else { writeAs(f64, accumulatorProp, @min(read(f64, accumulatorProp, accumulatorPos), microbufferToF64(propTypeTag, value, start)), accumulatorPos); } - // utils.debugPrint("ficou: {d}\n", .{read(f64, accumulatorProp, accumulatorPos)}); }, .max => { - if (!hadAccumulated.*) { + if (!aggCtx.hadAccumulated) { writeAs(f64, accumulatorProp, microbufferToF64(propTypeTag, value, start), accumulatorPos); } else { writeAs(f64, accumulatorProp, @max(read(f64, accumulatorProp, accumulatorPos), microbufferToF64(propTypeTag, value, start)), accumulatorPos); @@ -189,8 +193,8 @@ pub inline fn accumulate( writeAs(u32, accumulatorProp, read(u32, accumulatorProp, accumulatorPos) + 1, accumulatorPos); }, .cardinality => { - Selva.c.hll_union(hllAccumulator, hllValue); - writeAs(u32, accumulatorProp, read(u32, Selva.c.hll_count(hllAccumulator)[0..4], 0), accumulatorPos); + Selva.c.hll_union(aggCtx.hllAccumulator, hllValue); + writeAs(u32, accumulatorProp, read(u32, Selva.c.hll_count(aggCtx.hllAccumulator)[0..4], 0), accumulatorPos); }, else => {}, } @@ -199,15 +203,14 @@ pub inline fn accumulate( } pub inline fn finalizeResults( - ctx: *Query.QueryCtx, + aggCtx: *AggCtx, aggDefs: []u8, accumulatorProp: []u8, - isSamplingSet: bool, initialAggDefOffset: usize, ) !void { var i: usize = initialAggDefOffset; - const initialResultOffset = ctx.thread.query.index; + const initialResultOffset = aggCtx.queryCtx.thread.query.index; while (i < aggDefs.len) { const currentAggDef = utils.readNext(t.AggProp, aggDefs, &i); const aggFunction = currentAggDef.aggFunction; @@ -216,28 +219,28 @@ pub inline fn finalizeResults( switch (aggFunction) { .sum => { - ctx.thread.query.reserveAndWrite(read(f64, accumulatorProp, accumulatorPos), resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(read(f64, accumulatorProp, accumulatorPos), resultPos); }, .max => { - ctx.thread.query.reserveAndWrite(read(f64, accumulatorProp, accumulatorPos), resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(read(f64, accumulatorProp, accumulatorPos), resultPos); }, .min => { - ctx.thread.query.reserveAndWrite(read(f64, accumulatorProp, accumulatorPos), resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(read(f64, accumulatorProp, accumulatorPos), resultPos); }, .avg => { const count = read(u64, accumulatorProp, accumulatorPos); const sum = read(f64, accumulatorProp, accumulatorPos + 8); const mean = sum / @as(f64, @floatFromInt(count)); - ctx.thread.query.reserveAndWrite(@as(f64, @floatCast(mean)), resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(@as(f64, @floatCast(mean)), resultPos); }, .hmean => { const count = read(u64, accumulatorProp, accumulatorPos); if (count != 0) { const isum = read(f64, accumulatorProp, accumulatorPos + 8); const mean = @as(f64, @floatFromInt(count)) / isum; - ctx.thread.query.reserveAndWrite(@as(f64, @floatCast(mean)), resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(@as(f64, @floatCast(mean)), resultPos); } else { - ctx.thread.query.reserveAndWrite(@as(f64, @floatCast(0.0)), resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(@as(f64, @floatCast(0.0)), resultPos); } }, .stddev => { @@ -248,14 +251,14 @@ pub inline fn finalizeResults( const mean = sum / @as(f64, @floatFromInt(count)); const numerator = sum_sq - (sum * sum) / @as(f64, @floatFromInt(count)); const denominator = @as(f64, @floatFromInt(count)) - 1.0; - const variance = if (isSamplingSet) + const variance = if (aggCtx.isSamplingSet) numerator / denominator else (sum_sq / @as(f64, @floatFromInt(count))) - (mean * mean); const stddev = if (variance < 0) 0 else @sqrt(variance); - ctx.thread.query.reserveAndWrite(@as(f64, @floatCast(stddev)), resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(@as(f64, @floatCast(stddev)), resultPos); } else { - ctx.thread.query.reserveAndWrite(@as(f64, @floatCast(0.0)), resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(@as(f64, @floatCast(0.0)), resultPos); } }, .variance => { @@ -266,25 +269,25 @@ pub inline fn finalizeResults( const mean = sum / @as(f64, @floatFromInt(count)); const numerator = sum_sq - (sum * sum) / @as(f64, @floatFromInt(count)); const denominator = @as(f64, @floatFromInt(count)) - 1.0; - var variance = if (isSamplingSet) + var variance = if (aggCtx.isSamplingSet) numerator / denominator else (sum_sq / @as(f64, @floatFromInt(count))) - (mean * mean); if (variance < 0) variance = 0; - ctx.thread.query.reserveAndWrite(@as(f64, @floatCast(variance)), resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(@as(f64, @floatCast(variance)), resultPos); } else { - ctx.thread.query.reserveAndWrite(@as(f64, @floatCast(0.0)), resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(@as(f64, @floatCast(0.0)), resultPos); } }, .count => { const count = read(u32, accumulatorProp, accumulatorPos); - ctx.thread.query.reserveAndWrite(count, resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(count, resultPos); }, .cardinality => { - ctx.thread.query.reserveAndWrite(read(u32, accumulatorProp, accumulatorPos), resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(read(u32, accumulatorProp, accumulatorPos), resultPos); }, else => { - ctx.thread.query.reserveAndWrite(0.0, resultPos); + aggCtx.queryCtx.thread.query.reserveAndWrite(0.0, resultPos); }, } } diff --git a/native/query/aggregates/group.zig b/native/query/aggregates/group.zig index 9e296a74e4..af9d4a342b 100644 --- a/native/query/aggregates/group.zig +++ b/native/query/aggregates/group.zig @@ -15,30 +15,29 @@ const GroupByHashMap = @import("hashMap.zig").GroupByHashMap; const filter = @import("../filter/filter.zig").filter; pub fn iterator( - ctx: *Query.QueryCtx, + aggCtx: *Aggregates.AggCtx, groupByHashMap: *GroupByHashMap, it: anytype, - limit: u32, - comptime hasFilter: bool, + hasFilter: bool, filterBuf: []u8, aggDefs: []u8, - accumulatorSize: usize, - typeEntry: Node.Type, - hllAccumulator: anytype, -) !u32 { +) usize { var count: u32 = 0; - var hadAccumulated: bool = false; + aggCtx.hadAccumulated = false; while (it.next()) |node| { if (hasFilter) { - if (!try filter(node, ctx, filterBuf)) { + if (!try filter(node, aggCtx.queryCtx, filterBuf)) { continue; } } - try aggregatePropsWithGroupBy(groupByHashMap, node, typeEntry, aggDefs, accumulatorSize, hllAccumulator, &hadAccumulated); + aggregatePropsWithGroupBy(groupByHashMap, node, aggDefs, aggCtx) catch { + return 0; + }; count += 1; - if (count >= limit) break; + if (count >= aggCtx.limit) break; } + // utils.debugPrint("count {d}, resultsSize {d}, sumOfDistinctKeyLens {d}\n", .{ count, resultsSize, sumOfDistinctKeyLens.* }); return count; } @@ -55,17 +54,13 @@ inline fn getGrouByKeyValue( if (keyValue.len == 0) return emptyKey; - const key = if (propType == t.PropType.string) - if (propId == 0) - keyValue.ptr[start + 1 .. start + 1 + keyValue[start]] - else - keyValue.ptr[2 + start .. start + keyValue.len - propType.crcLen()] - else if (propType == t.PropType.timestamp) - @constCast(utils.datePart(keyValue.ptr[start .. start + keyValue.len], @enumFromInt(stepType), timezone)) - else if (propType == t.PropType.reference) - Node.getReferenceNodeId(@ptrCast(@alignCast(keyValue.ptr))) - else - keyValue.ptr[start .. start + propType.size()]; + const key = switch (propType) { + .string => if (propId == 0) keyValue.ptr[start + 1 .. start + 1 + keyValue[start]] else keyValue.ptr[2 + start .. start + keyValue.len - propType.crcLen()], + .stringFixed => if (propId == 0) keyValue.ptr[start + 1 .. start + 1 + keyValue[start]] else keyValue.ptr[2 + start .. start + keyValue.len - propType.crcLen()], + .timestamp => @constCast(utils.datePart(keyValue.ptr[start .. start + keyValue.len], @enumFromInt(stepType), timezone)), + .reference => Node.getReferenceNodeId(@ptrCast(@alignCast(keyValue.ptr))), + else => keyValue.ptr[start .. start + propType.size()], + }; return key; } @@ -73,11 +68,8 @@ inline fn getGrouByKeyValue( inline fn aggregatePropsWithGroupBy( groupByHashMap: *GroupByHashMap, node: Node.Node, - typeEntry: Node.Type, aggDefs: []u8, - accumulatorSize: usize, - hllAccumulator: anytype, - hadAccumulated: *bool, + aggCtx: *Aggregates.AggCtx, ) !void { if (aggDefs.len == 0) return; // utils.debugPrint("\n\naggDefs: {any}\n", .{aggDefs}); @@ -89,13 +81,13 @@ inline fn aggregatePropsWithGroupBy( var keyValue: []u8 = undefined; - const propSchema = Schema.getFieldSchema(typeEntry, currentKeyPropDef.propId) catch { - i += @sizeOf(t.GroupByKeyProp); + const propSchema = Schema.getFieldSchema(aggCtx.typeEntry, currentKeyPropDef.propId) catch { + i += utils.sizeOf(t.GroupByKeyProp); return; }; keyValue = Fields.get( - typeEntry, + aggCtx.typeEntry, node, propSchema, currentKeyPropDef.propType, @@ -103,19 +95,23 @@ inline fn aggregatePropsWithGroupBy( const key = getGrouByKeyValue(keyValue, currentKeyPropDef); const hash_map_entry = if (currentKeyPropDef.propType == t.PropType.timestamp and currentKeyPropDef.stepRange != 0) - try groupByHashMap.getOrInsertWithRange(key, accumulatorSize, currentKeyPropDef.stepRange) + try groupByHashMap.getOrInsertWithRange(key, aggCtx.accumulatorSize, currentKeyPropDef.stepRange) else - try groupByHashMap.getOrInsert(key, accumulatorSize); + try groupByHashMap.getOrInsert(key, aggCtx.accumulatorSize); + const accumulatorProp = hash_map_entry.value; - hadAccumulated.* = !hash_map_entry.is_new; + aggCtx.hadAccumulated = !hash_map_entry.is_new; + if (hash_map_entry.is_new) { + aggCtx.totalResultsSize += 2 + key.len + aggCtx.resultsSize; + } + // utils.debugPrint("is_new?: {any}, key: {s} {d}, sumOfDistinctKeyLens: {d}\n", .{ hash_map_entry.is_new, key, key.len, sumOfDistinctKeyLens }); - Aggregates.aggregateProps(node, typeEntry, aggDefs[i..], accumulatorProp, hllAccumulator, hadAccumulated); + Aggregates.aggregateProps(node, aggDefs[i..], accumulatorProp, aggCtx); } pub inline fn finalizeGroupResults( - ctx: *Query.QueryCtx, + aggCtx: *Aggregates.AggCtx, groupByHashMap: *GroupByHashMap, - header: t.AggHeader, aggDefs: []u8, ) !void { var it = groupByHashMap.iterator(); @@ -124,12 +120,34 @@ pub inline fn finalizeGroupResults( const key = entry.key_ptr.*; const keyLen: u16 = @intCast(key.len); if (key.len > 0) { - try ctx.thread.query.append(keyLen); - try ctx.thread.query.append(key); + try aggCtx.queryCtx.thread.query.append(keyLen); + try aggCtx.queryCtx.thread.query.append(key); + } + + const accumulatorProp = entry.value_ptr.*; + + try Aggregates.finalizeResults(aggCtx, aggDefs, accumulatorProp, @bitSizeOf(t.GroupByKeyProp) / 8); + } +} + +pub inline fn finalizeRefsGroupResults( + aggCtx: *Aggregates.AggCtx, + groupByHashMap: *GroupByHashMap, + aggDefs: []u8, +) !void { + var it = groupByHashMap.iterator(); + + while (it.next()) |entry| { + const key = entry.key_ptr.*; + const keyLen: u16 = @intCast(key.len); + + if (key.len > 0) { + try aggCtx.queryCtx.thread.query.append(keyLen); + try aggCtx.queryCtx.thread.query.append(key); } const accumulatorProp = entry.value_ptr.*; - try Aggregates.finalizeResults(ctx, aggDefs, accumulatorProp, header.isSamplingSet, @bitSizeOf(t.GroupByKeyProp) / 8); + try Aggregates.finalizeResults(aggCtx, aggDefs, accumulatorProp, @bitSizeOf(t.GroupByKeyProp) / 8); } } diff --git a/native/query/aggregates/references.zig b/native/query/aggregates/references.zig index 511e6d199b..04d77bf98b 100644 --- a/native/query/aggregates/references.zig +++ b/native/query/aggregates/references.zig @@ -8,8 +8,12 @@ const utils = @import("../../utils.zig"); const t = @import("../../types.zig"); const Aggregates = @import("./aggregates.zig"); const References = @import("../../selva/references.zig"); +const GroupBy = @import("./group.zig"); +const GroupByHashMap = @import("./hashMap.zig").GroupByHashMap; const errors = @import("../../errors.zig"); const accumulate = Aggregates.accumulate; +const std = @import("std"); +const Filter = @import("../filter/filter.zig"); pub inline fn aggregateRefsProps( ctx: *Query.QueryCtx, @@ -18,21 +22,52 @@ pub inline fn aggregateRefsProps( fromType: Selva.Type, i: *usize, ) !void { - utils.debugPrint("i: {d}\n", .{i.*}); + var filter: []u8 = undefined; + const header = utils.readNext(t.AggRefsHeader, q, i); - // i.* += utils.sizeOf(t.AggRefsHeader); - utils.debugPrint("aggregateRefsProps header: {any}\n", .{header}); + // utils.debugPrint("aggregateRefsProps header: {any}\n", .{header}); const accumulatorProp = try ctx.db.allocator.alloc(u8, header.accumulatorSize); @memset(accumulatorProp, 0); defer ctx.db.allocator.free(accumulatorProp); var it = try References.iterator(false, false, ctx.db, from, header.targetProp, fromType); - _ = try Aggregates.iterator(ctx, &it, 1000, false, undefined, q[i.*..], accumulatorProp, it.dstType, undefined); // TODO: hllAcc - // should be wrong because changes with each aggFunc and targetProp - try ctx.thread.query.append(@intFromEnum(t.ReadOp.aggregation)); - try ctx.thread.query.append(header.targetProp); - try ctx.thread.query.append(@as(u32, @sizeOf(f64))); // falty count and cardinalty is u32 - try Aggregates.finalizeResults(ctx, q[i.*..], accumulatorProp, header.isSamplingSet, 0); + const hasFilter = header.filterSize > 0; + if (hasFilter) { + filter = utils.sliceNext(header.filterSize, q, i); + try Filter.prepare(filter, ctx, it.dstType); + } + + const hllAccumulator = Selva.c.selva_string_create(null, Selva.c.HLL_INIT_SIZE, Selva.c.SELVA_STRING_MUTABLE); + defer Selva.c.selva_string_free(hllAccumulator); + + var aggCtx = Aggregates.AggCtx{ + .queryCtx = ctx, + .typeEntry = it.dstType, + .limit = std.math.maxInt(u32), // unlimited in branched queries + .hllAccumulator = hllAccumulator, + .isSamplingSet = header.isSamplingSet, + .accumulatorSize = header.accumulatorSize, + .resultsSize = header.resultsSize, + }; + + if (header.hasGroupBy) { + var groupByHashMap = GroupByHashMap.init(ctx.db.allocator); + defer groupByHashMap.deinit(); + + _ = GroupBy.iterator(&aggCtx, &groupByHashMap, &it, hasFilter, filter, q[i.*..]); + + try ctx.thread.query.append(@intFromEnum(t.ReadOp.aggregation)); + try ctx.thread.query.append(header.targetProp); + try ctx.thread.query.append(@as(u32, @intCast(aggCtx.totalResultsSize))); + try GroupBy.finalizeRefsGroupResults(&aggCtx, &groupByHashMap, q[i.*..]); + } else { + _ = try Aggregates.iterator(&aggCtx, &it, hasFilter, filter, q[i.*..], accumulatorProp); + + try ctx.thread.query.append(@intFromEnum(t.ReadOp.aggregation)); + try ctx.thread.query.append(header.targetProp); + try ctx.thread.query.append(@as(u32, header.resultsSize)); + try Aggregates.finalizeResults(&aggCtx, q[i.*..], accumulatorProp, 0); + } } diff --git a/native/query/filter/compare.zig b/native/query/filter/compare.zig index 2f6c9be381..0a665bb05e 100644 --- a/native/query/filter/compare.zig +++ b/native/query/filter/compare.zig @@ -6,95 +6,74 @@ const Schema = @import("../../selva/schema.zig"); const Fields = @import("../../selva/fields.zig"); const t = @import("../../types.zig"); -// Fns here are non-inline to avoid a too long switch statement in filter - -pub const Op = enum(u8) { - eq = 1, - lt = 2, - gt = 3, - le = 4, - ge = 5, +pub const Function = enum(u8) { + eq, + lt, + gt, + le, + ge, + range, + eqBatch, + eqBatchSmall, + inc, }; -pub const Function = enum { Single, Range, Batch, BatchSmall }; - -pub fn batch(comptime op: Op, T: type, q: []u8, v: []u8, i: usize, c: *t.FilterCondition) bool { +pub fn eqBatch(T: type, q: []u8, v: []const u8, i: usize, c: *t.FilterCondition) bool { const size = utils.sizeOf(T); const vectorLen = 16 / size; const value = utils.readPtr(T, v, c.start).*; const values = utils.toSlice(T, q[i + size - c.offset .. c.size + @alignOf(T) - c.offset]); const len = values.len / size; var j: usize = 0; - switch (op) { - .eq => { - while (j <= (len)) : (j += vectorLen) { - const vec2: @Vector(vectorLen, T) = values[j..][0..vectorLen].*; - if (std.simd.countElementsWithValue(vec2, value) != 0) { - return true; - } - } - }, - else => { - return false; - }, + while (j <= (len)) : (j += vectorLen) { + const vec2: @Vector(vectorLen, T) = values[j..][0..vectorLen].*; + if (std.simd.countElementsWithValue(vec2, value) != 0) { + return true; + } } return false; } -pub fn batchSmall(comptime op: Op, T: type, q: []u8, v: []u8, i: usize, c: *t.FilterCondition) bool { +pub fn eqBatchSmall(T: type, q: []u8, v: []const u8, i: usize, c: *t.FilterCondition) bool { const size = utils.sizeOf(T); const vectorLen = 16 / size; const value = utils.readPtr(T, v, c.start).*; const values = utils.toSlice(T, q[i + size - c.offset .. c.size + @alignOf(T) - c.offset]); const vec: @Vector(vectorLen, T) = values[0..][0..vectorLen].*; - switch (op) { - .eq => { - return (std.simd.countElementsWithValue(vec, value) != 0); - }, - .lt => { - const valueSplat: @Vector(vectorLen, T) = @splat(value); - return @reduce(.Or, valueSplat > vec); - }, - .gt => { - const valueSplat: @Vector(vectorLen, T) = @splat(value); - return @reduce(.Or, valueSplat < vec); - }, - .le => { - const valueSplat: @Vector(vectorLen, T) = @splat(value); - return @reduce(.Or, valueSplat >= vec); - }, - .ge => { - const valueSplat: @Vector(vectorLen, T) = @splat(value); - return @reduce(.Or, valueSplat <= vec); - }, - } + return (std.simd.countElementsWithValue(vec, value) != 0); } -pub fn single(comptime op: Op, T: type, q: []u8, v: []u8, i: usize, c: *t.FilterCondition) bool { - @setEvalBranchQuota(10000); +pub fn eq(comptime T: type, q: []u8, v: []const u8, i: usize, c: *t.FilterCondition) bool { + const val = utils.readPtr(T, v, c.start).*; + const target = utils.readPtr(T, q, i + @alignOf(T) - c.offset).*; + return val == target; +} +pub fn lt(comptime T: type, q: []u8, v: []const u8, i: usize, c: *t.FilterCondition) bool { const val = utils.readPtr(T, v, c.start).*; const target = utils.readPtr(T, q, i + @alignOf(T) - c.offset).*; - switch (op) { - .eq => { - return val == target; - }, - .lt => { - return val < target; - }, - .gt => { - return val > target; - }, - .le => { - return val <= target; - }, - .ge => { - return val >= target; - }, - } + return val < target; +} + +pub fn gt(comptime T: type, q: []u8, v: []const u8, i: usize, c: *t.FilterCondition) bool { + const val = utils.readPtr(T, v, c.start).*; + const target = utils.readPtr(T, q, i + @alignOf(T) - c.offset).*; + return val > target; +} + +pub fn le(comptime T: type, q: []u8, v: []const u8, i: usize, c: *t.FilterCondition) bool { + const val = utils.readPtr(T, v, c.start).*; + const target = utils.readPtr(T, q, i + @alignOf(T) - c.offset).*; + return val <= target; } -pub fn range(T: type, q: []u8, v: []u8, i: usize, c: *t.FilterCondition) bool { +pub fn ge(comptime T: type, q: []u8, v: []const u8, i: usize, c: *t.FilterCondition) bool { + const val = utils.readPtr(T, v, c.start).*; + const target = utils.readPtr(T, q, i + @alignOf(T) - c.offset).*; + return val >= target; +} + +pub fn range(T: type, q: []u8, v: []const u8, i: usize, c: *t.FilterCondition) bool { const size = utils.sizeOf(T); if (T == f64) { // Floats do not support ignore overflow @@ -107,3 +86,90 @@ pub fn range(T: type, q: []u8, v: []u8, i: usize, c: *t.FilterCondition) bool { return (utils.readPtr(T, v, c.start).* -% utils.readPtr(T, q, i + @alignOf(T) - c.offset).*) <= utils.readPtr(T, q, i + (size * 2) - c.offset).*; } + +// put this in variableSize +// this with batching => [a,b,c] quite nice +const vectorLenU8 = std.simd.suggestVectorLength(u8).?; +const indexes = std.simd.iota(u8, vectorLenU8); +const nulls: @Vector(vectorLenU8, u8) = @splat(@as(u8, 255)); + +pub fn include(q: []u8, v: []const u8, qI: usize, c: *t.FilterCondition) bool { + const query: []u8 = q[qI .. c.size + qI]; + var value: []const u8 = undefined; + + // Make the has seperate we also need to use LIKE + // FIX COMPRESS + if (v[0] == 1) { + // compressed + value = v[0..3]; + } else { + value = v[2 .. v.len - 4]; + } + + var i: usize = 0; + const l = value.len; + const ql = query.len; + if (l < vectorLenU8) { + while (i < l) : (i += 1) { + if (value[i] == query[0]) { + if (i + ql - 1 > l) { + return false; + } + var j: usize = 1; + while (j < ql) : (j += 1) { + if (value[i + j] != query[j]) { + break; + } + } + if (j == ql) { + return true; + } + } + } + return false; + } + + const queryVector: @Vector(vectorLenU8, u8) = @splat(query[0]); + + while (i <= (l - vectorLenU8)) : (i += vectorLenU8) { + const h: @Vector(vectorLenU8, u8) = value[i..][0..vectorLenU8].*; + const matches = h == queryVector; + if (@reduce(.Or, matches)) { + if (l > 1) { + const result = @select(u8, matches, indexes, nulls); + const index = @reduce(.Min, result) + i; + if (index + ql - 1 > l) { + return false; + } + var j: usize = 1; + while (j < ql) : (j += 1) { + if (value[index + j] != query[j]) { + break; + } + } + if (j == ql) { + return true; + } + } + } + } + while (i < l and ql <= l - i) : (i += 1) { + const id2 = value[i]; + if (id2 == query[0]) { + if (i + ql - 1 > l) { + return false; + } + var j: usize = 1; + while (j < ql) : (j += 1) { + if (value[i + j] != query[j]) { + break; + } + } + if (j == ql) { + return true; + } + return true; + } + } + return false; +} diff --git a/native/query/filter/filter.zig b/native/query/filter/filter.zig index df490bbd13..1d8c03198e 100644 --- a/native/query/filter/filter.zig +++ b/native/query/filter/filter.zig @@ -4,9 +4,9 @@ const utils = @import("../../utils.zig"); const Node = @import("../../selva/node.zig"); const Schema = @import("../../selva/schema.zig"); const Fields = @import("../../selva/fields.zig"); +const Selva = @import("../../selva/selva.zig"); const t = @import("../../types.zig"); const Compare = @import("compare.zig"); -const Select = @import("select.zig"); const Instruction = @import("instruction.zig"); const COND_ALIGN_BYTES = @alignOf(t.FilterCondition); @@ -18,7 +18,7 @@ pub fn prepare( var i: usize = 0; while (i < q.len) { const headerSize = COND_ALIGN_BYTES + 1 + utils.sizeOf(t.FilterCondition); - var condition: *t.FilterCondition = undefined; + var c: *t.FilterCondition = undefined; // 255 means its unprepared - the condition new index will be set when aligned if (q[i] == 255) { @@ -26,27 +26,28 @@ pub fn prepare( const totalSize = headerSize + condSize; q[i] = COND_ALIGN_BYTES - utils.alignLeft(t.FilterCondition, q[i + 1 .. i + totalSize]) + 1; - condition = utils.readPtr(t.FilterCondition, q, q[i] + i); + c = utils.readPtr(t.FilterCondition, q, q[i] + i); - if (condition.op.compare != t.FilterOpCompare.nextOrIndex) { - condition.fieldSchema = try Schema.getFieldSchema(typeEntry, condition.prop); + if (c.op.compare != t.FilterOpCompare.nextOrIndex) { + c.fieldSchema = try Schema.getFieldSchema(typeEntry, c.prop); } const nextI = q[i] + i + utils.sizeOf(t.FilterCondition); - condition.offset = utils.alignLeftLen(condition.len, q[nextI .. totalSize + i]); + + c.offset = utils.alignLeftLen(c.len, q[nextI .. totalSize + i]); const end = totalSize + i; - switch (condition.op.compare) { + switch (c.op.compare) { .selectLargeRefEdge => { - // const select = utils.readPtr(t.FilterSelect, q, i + q[i] + utils.sizeOf(t.FilterCondition) + @alignOf(t.FilterSelect) - condition.offset); + // const select = utils.readPtr(t.FilterSelect, q, i + q[i] + utils.sizeOf(t.FilterCondition) + @alignOf(t.FilterSelect) - c.offset); // const edgeSelect = utils.readPtr(t.FilterSelect, q, i + q[i] + utils.sizeOf(t.FilterCondition) + @alignOf(t.FilterSelect) - condition.offset); // select.typeEntry = try Node.getType(ctx.db, select.typeId); // try prepare(q[end .. end + select.size], ctx, select.typeEntry); // i = end + select.size; - i = end; + // i = end; }, .selectRef => { - const select = utils.readPtr(t.FilterSelect, q, nextI + @alignOf(t.FilterSelect) - condition.offset); + const select = utils.readPtr(t.FilterSelect, q, nextI + @alignOf(t.FilterSelect) - c.offset); select.typeEntry = try Node.getType(ctx.db, select.typeId); try prepare(q[end .. end + select.size], ctx, select.typeEntry); i = end + select.size; @@ -56,8 +57,8 @@ pub fn prepare( }, } } else { - condition = utils.readPtr(t.FilterCondition, q, q[i] + i + 1); - const totalSize = headerSize + condition.size; + c = utils.readPtr(t.FilterCondition, q, q[i] + i); // + 1 + const totalSize = headerSize + c.size; const end = totalSize + i; i = end; } @@ -81,39 +82,62 @@ inline fn compare( T: type, comptime meta: Instruction.OpMeta, q: []u8, - v: []u8, + v: []const u8, index: usize, c: *t.FilterCondition, ) bool { const res = switch (meta.func) { - .Single => Compare.single(meta.cmp, T, q, v, index, c), - .Range => Compare.range(T, q, v, index, c), - .Batch => Compare.batch(meta.cmp, T, q, v, index, c), - .BatchSmall => Compare.batchSmall(meta.cmp, T, q, v, index, c), + .eq => Compare.eq(T, q, v, index, c), + .le => Compare.le(T, q, v, index, c), + .lt => Compare.lt(T, q, v, index, c), + .ge => Compare.ge(T, q, v, index, c), + .gt => Compare.gt(T, q, v, index, c), + .range => Compare.range(T, q, v, index, c), + .eqBatch => Compare.eqBatch(T, q, v, index, c), + .eqBatchSmall => Compare.eqBatchSmall(T, q, v, index, c), + .inc => Compare.include(q, v, index, c), }; return if (meta.invert) !res else res; } +// MAKE EDGE FILTER + // Check if this becomes better -pub inline fn filter(node: Node.Node, ctx: *Query.QueryCtx, q: []u8) !bool { +pub inline fn filter( + node: Node.Node, + ctx: *Query.QueryCtx, + q: []u8, + // comptime hasEdge: bool, + // edge: if (hasEdge) Node.Node else void, +) !bool { var i: usize = 0; var pass: bool = true; - var v: []u8 = undefined; + var v: []const u8 = undefined; var prop: u8 = 255; - var nextOrIndex: usize = q.len; - while (i < nextOrIndex) { + var end: usize = q.len; + + while (i < end) { const c = utils.readPtr(t.FilterCondition, q, i + q[i]); const index = i + q[i] + utils.sizeOf(t.FilterCondition); var nextIndex = COND_ALIGN_BYTES + 1 + utils.sizeOf(t.FilterCondition) + c.size + i; + if (prop != c.prop) { prop = c.prop; + // if (c.fieldSchema.type == Selva.c.SELVA_FIELD_TYPE_ALIAS) { + // v = try Fields.getAliasByNode(try Node.getType(ctx.db, node), node, c.fieldSchema.field); + // } else { v = Fields.getRaw(node, c.fieldSchema); + // } } + pass = switch (c.op.compare) { .nextOrIndex => blk: { - nextOrIndex = utils.readPtr(u64, q, index + @alignOf(u64) - c.offset).*; + end = utils.readPtr(u64, q, index + @alignOf(u64) - c.offset).*; break :blk true; }, + // .edge => blk: { + // break :blk true; + // }, .selectRef => blk: { const select = utils.readPtr(t.FilterSelect, q, index + @alignOf(t.FilterSelect) - c.offset); nextIndex += select.size; @@ -137,12 +161,14 @@ pub inline fn filter(node: Node.Node, ctx: *Query.QueryCtx, q: []u8) !bool { }; }, }; + if (!pass) { - i = nextOrIndex; - nextOrIndex = q.len; + i = end; + end = q.len; } else { i = nextIndex; } } + return pass; } diff --git a/native/query/filter/instruction.zig b/native/query/filter/instruction.zig index 3a5ecf07d3..ddaeebf8a1 100644 --- a/native/query/filter/instruction.zig +++ b/native/query/filter/instruction.zig @@ -2,37 +2,32 @@ const std = @import("std"); const t = @import("../../types.zig"); const Compare = @import("compare.zig"); -pub const OpMeta = struct { - invert: bool = false, - cmp: Compare.Op = .eq, - func: Compare.Function = .Single, -}; - -fn getCmp(comptime tag: t.FilterOpCompare) Compare.Op { - return switch (tag) { - .lt, .ltBatch, .ltBatchSmall => .lt, - .le, .leBatch, .leBatchSmall => .le, - .gt, .gtBatch, .gtBatchSmall => .gt, - .ge, .geBatch, .geBatchSmall => .ge, - else => .eq, - }; -} +pub const OpMeta = struct { invert: bool = false, func: Compare.Function }; fn getFunc(comptime tag: t.FilterOpCompare) Compare.Function { return switch (tag) { - .range, .nrange => .Range, - .eqBatch, .neqBatch, .ltBatch, .leBatch, .gtBatch, .geBatch => .Batch, - .eqBatchSmall, .neqBatchSmall, .ltBatchSmall, .leBatchSmall, .gtBatchSmall, .geBatchSmall => .BatchSmall, - else => .Single, + .range, .nrange => Compare.Function.range, + .eqBatch, + .neqBatch, + => Compare.Function.eqBatch, + .eqBatchSmall, + .neqBatchSmall, + => Compare.Function.eqBatchSmall, + .eq, .neq => Compare.Function.eq, + .le => Compare.Function.le, + .lt => Compare.Function.le, + .ge => Compare.Function.ge, + .gt => Compare.Function.gt, + .inc, .ninc => Compare.Function.inc, + else => Compare.Function.eq, }; } pub fn parseOp(comptime op: t.FilterOpCompare) OpMeta { return .{ - .cmp = getCmp(op), .func = getFunc(op), .invert = switch (op) { - .neq, .neqBatch, .neqBatchSmall, .nrange => true, + .neq, .neqBatch, .neqBatchSmall, .nrange, .ninc => true, else => false, }, }; diff --git a/native/query/filter/select.zig b/native/query/filter/select.zig deleted file mode 100644 index e1ae595b28..0000000000 --- a/native/query/filter/select.zig +++ /dev/null @@ -1,30 +0,0 @@ -const std = @import("std"); -const Query = @import("../common.zig"); -const utils = @import("../../utils.zig"); -const Node = @import("../../selva/node.zig"); -const Schema = @import("../../selva/schema.zig"); -const Fields = @import("../../selva/fields.zig"); -const t = @import("../../types.zig"); -const Filter = @import("filter.zig"); - -pub fn largeRef(ctx: *Query.QueryCtx, q: []u8, value: []u8, i: *usize) !bool { - const selectReference = utils.readNext(t.FilterSelect, q, i); - const nodeId = utils.read(u32, value, 0); - if (nodeId == 0) { - i.* += selectReference.size; - return false; - } - const refTypeEntry = try Node.getType(ctx.db, selectReference.typeId); - if (Node.getNode(refTypeEntry, nodeId)) |refNode| { - const refPass = try Filter.filter( - refNode, - ctx, - q[i.* .. i.* + selectReference.size], - refTypeEntry, - ); - i.* += selectReference.size; - return refPass; - } - i.* += selectReference.size; - return true; -} diff --git a/native/query/include/append.zig b/native/query/include/append.zig index 25e7469f86..f8941e29a8 100644 --- a/native/query/include/append.zig +++ b/native/query/include/append.zig @@ -1,7 +1,7 @@ const utils = @import("../../utils.zig"); const Thread = @import("../../thread/thread.zig"); const t = @import("../../types.zig"); - +const std = @import("std"); pub inline fn default(thread: *Thread.Thread, prop: u8, value: []u8) !void { if (value.len == 0) { return; diff --git a/native/query/include/include.zig b/native/query/include/include.zig index 48f4783a5e..b84b06ac2b 100644 --- a/native/query/include/include.zig +++ b/native/query/include/include.zig @@ -8,8 +8,8 @@ const Fields = @import("../../selva/fields.zig"); const opts = @import("opts.zig"); const append = @import("append.zig"); const t = @import("../../types.zig"); -const multiple = @import("../multiple.zig"); -const single = @import("../single.zig"); +const Multiple = @import("../multiple/references.zig"); +const Single = @import("../single.zig"); const References = @import("../../selva/references.zig"); const aggregateRefs = @import("../aggregates/references.zig"); @@ -49,17 +49,17 @@ pub fn include( var i: usize = 0; while (i < q.len) { const op: t.IncludeOp = @enumFromInt(q[i]); - + // std.debug.print("includeop: {any} - {any}\n", .{ op, q }); switch (op) { // add .referenceEdge? .reference => { - recursionErrorBoundary(single.reference, node, ctx, q, typeEntry, &i); + recursionErrorBoundary(Single.reference, node, ctx, q, typeEntry, &i); }, .referenceEdge => { - recursionErrorBoundary(single.referenceEdge, node, ctx, q, typeEntry, &i); + recursionErrorBoundary(Single.referenceEdge, node, ctx, q, typeEntry, &i); }, .references => { - recursionErrorBoundary(multiple.references, node, ctx, q, typeEntry, &i); + recursionErrorBoundary(Multiple.references, node, ctx, q, typeEntry, &i); }, .partial => { const header = utils.readNext(t.IncludePartialHeader, q, &i); @@ -120,22 +120,28 @@ pub fn include( .default => { const header = utils.readNext(t.IncludeHeader, q, &i); const value = try get(typeEntry, node, &header); + // std.debug.print("??? value {any} - {any}\n", .{ value, header }); switch (header.propType) { - t.PropType.text, + .text, => { var iter = Fields.textIterator(value); while (iter.next()) |textValue| { try append.stripCrc32(ctx.thread, header.prop, textValue); } }, - t.PropType.binary, t.PropType.string, t.PropType.json => { + .binary, .string, .json => { // utils.printString("derp", value); try append.stripCrc32(ctx.thread, header.prop, value); }, - t.PropType.microBuffer, t.PropType.vector, t.PropType.colVec => { + .microBuffer, .vector, .colVec => { // Fixed size try ctx.thread.query.append(header.prop); - try ctx.thread.query.append(value); + if (value.len == 0) { + const fs = try Schema.getFieldSchema(typeEntry, header.prop); + _ = try ctx.thread.query.reserve(fs.unnamed_0.smb.len); + } else { + try ctx.thread.query.append(value); + } }, else => { try append.default(ctx.thread, header.prop, value); diff --git a/native/query/multiple.zig b/native/query/multiple.zig deleted file mode 100644 index d1590417c9..0000000000 --- a/native/query/multiple.zig +++ /dev/null @@ -1,350 +0,0 @@ -const std = @import("std"); -const utils = @import("../utils.zig"); -const Query = @import("common.zig"); -const Include = @import("include/include.zig"); -const Filter = @import("filter/filter.zig"); -const Node = @import("../selva/node.zig"); -const References = @import("../selva/references.zig"); -const Selva = @import("../selva/selva.zig"); -const Thread = @import("../thread/thread.zig"); -const Schema = @import("../selva/schema.zig"); -const t = @import("../types.zig"); -const Sort = @import("../sort/sort.zig"); -const Aggregates = @import("aggregates/aggregates.zig"); -const GroupBy = @import("aggregates/group.zig"); -const GroupByHashMap = @import("aggregates/hashMap.zig").GroupByHashMap; -const String = @import("../string.zig"); -const writeAs = utils.writeAs; -const read = utils.read; - -fn iterator( - comptime It: t.QueryIteratorType, - ctx: *Query.QueryCtx, - q: []u8, - it: anytype, - header: *const t.QueryHeader, - typeEntry: Node.Type, - i: *usize, -) !u32 { - var offset: u32 = header.offset; - var nodeCnt: u32 = 0; - var filter: []u8 = undefined; - if (It == t.QueryIteratorType.filter) { - filter = utils.sliceNext(header.filterSize, q, i); - try Filter.prepare(filter, ctx, typeEntry); - } - utils.debugPrint("i.* .. i.* + header.includeSize: {d} .. {d}\n", .{ i.*, i.* + header.includeSize }); - const nestedQuery = q[i.* .. i.* + header.includeSize]; - while (offset > 0) { - const node = it.next() orelse return 0; - if (It == t.QueryIteratorType.filter) { - if (try Filter.filter(node, ctx, filter)) { - offset -= 1; - } - } else { - offset -= 1; - } - } - while (it.next()) |node| { - if (It == t.QueryIteratorType.filter) { - if (!try Filter.filter(node, ctx, filter)) { - continue; - } - } - try ctx.thread.query.append(t.ReadOp.id); - try ctx.thread.query.append(Node.getNodeId(node)); - try Include.include(node, ctx, nestedQuery, typeEntry); - nodeCnt += 1; - if (nodeCnt >= header.limit) { - break; - } - } - return nodeCnt; -} - -fn iteratorEdge( - comptime _: t.QueryIteratorType, - ctx: *Query.QueryCtx, - q: []u8, - it: anytype, - header: *const t.QueryHeader, - typeEntry: Node.Type, - i: *usize, -) !u32 { - var offset: u32 = header.offset; - var nodeCnt: u32 = 0; - const nestedQuery = q[i.* .. i.* + header.includeSize]; - const edgeTypeEntry = try Node.getType(ctx.db, header.edgeTypeId); - const edgeQuery = q[i.* + header.includeSize .. i.* + header.includeSize + header.edgeSize]; - while (offset > 0) { - _ = it.next() orelse return 0; - offset -= 1; - } - while (it.nextRef()) |ref| { - try ctx.thread.query.append(t.ReadOp.id); - try ctx.thread.query.append(Node.getNodeId(ref.node)); - try Include.include(ref.node, ctx, nestedQuery, typeEntry); - try ctx.thread.query.append(t.ReadOp.edge); - try Include.include(ref.edge, ctx, edgeQuery, edgeTypeEntry); - nodeCnt += 1; - if (nodeCnt >= header.limit) { - break; - } - } - i.* += header.edgeSize; - return nodeCnt; -} - -const IdsIterator = struct { - ids: []u32, - i: u32, - typeEntry: Node.Type, - pub fn next(self: *IdsIterator) ?Node.Node { - if (self.i == self.ids.len) { - return null; - } - const node = Node.getNode(self.typeEntry, self.ids[self.i]); - self.i += 1; - return node; - } -}; - -pub fn ids( - ctx: *Query.QueryCtx, - q: []u8, -) !void { - var i: usize = 0; - const header = utils.readNext(t.QueryHeader, q, &i); - const sizeIndex = try ctx.thread.query.reserve(4); - const size = header.size; - const typeEntry = try Node.getType(ctx.db, header.typeId); - var it = IdsIterator{ .i = 0, .ids = utils.read([]u32, q, size + 4), .typeEntry = typeEntry }; - var nodeCnt: u32 = 0; - switch (header.iteratorType) { - .default => { - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, &i); - }, - .desc => { - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, &i); - }, - .sort => { - const sortHeader = utils.readNext(t.SortHeader, q, &i); - var itSort = try Sort.fromIterator(false, false, ctx.db, ctx.thread, typeEntry, &sortHeader, &it); - nodeCnt = try iterator(.default, ctx, q, &itSort, &header, typeEntry, &i); - itSort.deinit(); - }, - .descSort => { - const sortHeader = utils.readNext(t.SortHeader, q, &i); - var itSort = try Sort.fromIterator(true, false, ctx.db, ctx.thread, typeEntry, &sortHeader, &it); - nodeCnt = try iterator(.default, ctx, q, &itSort, &header, typeEntry, &i); - itSort.deinit(); - }, - else => {}, - } - ctx.thread.query.write(nodeCnt, sizeIndex); -} - -pub fn default( - ctx: *Query.QueryCtx, - q: []u8, -) !void { - var i: usize = 0; - const header = utils.readNext(t.QueryHeader, q, &i); - const sizeIndex = try ctx.thread.query.reserve(4); - const typeEntry = try Node.getType(ctx.db, header.typeId); - var nodeCnt: u32 = 0; - switch (header.iteratorType) { - .default => { - var it = Node.iterator(false, typeEntry); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, &i); - }, - .filter => { - var it = Node.iterator(false, typeEntry); - nodeCnt = try iterator(.filter, ctx, q, &it, &header, typeEntry, &i); - }, - .desc => { - var it = Node.iterator(true, typeEntry); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, &i); - }, - .sort => { - const sortHeader = utils.readNext(t.SortHeader, q, &i); - var it = try Sort.iterator(false, ctx.db, ctx.thread, header.typeId, &sortHeader); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, &i); - }, - .descSort => { - const sortHeader = utils.readNext(t.SortHeader, q, &i); - var it = try Sort.iterator(true, ctx.db, ctx.thread, header.typeId, &sortHeader); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, &i); - }, - else => { - // not handled - }, - } - ctx.thread.query.write(nodeCnt, sizeIndex); -} - -inline fn referencesSort( - comptime desc: bool, - comptime edge: bool, - ctx: *Query.QueryCtx, - q: []u8, - from: Node.Node, - fromType: Selva.Type, - i: *usize, - header: *const t.QueryHeader, - typeEntry: Node.Type, -) !Sort.SortIterator(desc, edge) { - const sortHeader = utils.readNext(t.SortHeader, q, i); - var refs = try References.iterator(desc, edge, ctx.db, from, header.prop, fromType); - return try Sort.fromIterator(desc, edge, ctx.db, ctx.thread, typeEntry, &sortHeader, &refs); -} - -pub fn references( - ctx: *Query.QueryCtx, - q: []u8, - from: Node.Node, - fromType: Selva.Type, - i: *usize, -) !void { - const header = utils.readNext(t.QueryHeader, q, i); - try ctx.thread.query.append(t.ReadOp.references); - try ctx.thread.query.append(header.prop); - const resultByteSizeIndex = try ctx.thread.query.reserve(4); - const startIndex = ctx.thread.query.index; - const sizeIndex = try ctx.thread.query.reserve(4); - const typeEntry = try Node.getType(ctx.db, header.typeId); - var nodeCnt: u32 = 0; - - switch (header.iteratorType) { - .edgeInclude => { - var it = try References.iterator(false, true, ctx.db, from, header.prop, fromType); - nodeCnt = try iteratorEdge(.edgeInclude, ctx, q, &it, &header, typeEntry, i); - }, - .edgeIncludeDesc => { - var it = try References.iterator(true, true, ctx.db, from, header.prop, fromType); - nodeCnt = try iteratorEdge(.edgeInclude, ctx, q, &it, &header, typeEntry, i); - }, - .edge => { - var it = try References.iterator(false, true, ctx.db, from, header.prop, fromType); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, i); - }, - .default => { - var it = try References.iterator(false, false, ctx.db, from, header.prop, fromType); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, i); - }, - .desc => { - var it = try References.iterator(true, false, ctx.db, from, header.prop, fromType); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, i); - }, - .sort => { - var it = try referencesSort(false, false, ctx, q, from, fromType, i, &header, typeEntry); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, i); - it.deinit(); - }, - .edgeDesc => { - var it = try References.iterator(true, true, ctx.db, from, header.prop, fromType); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, i); - }, - .descSort => { - var it = try referencesSort(true, false, ctx, q, from, fromType, i, &header, typeEntry); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, i); - it.deinit(); - }, - .edgeSort => { - var it = try referencesSort(false, true, ctx, q, from, fromType, i, &header, typeEntry); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, i); - it.deinit(); - }, - .edgeDescSort => { - var it = try referencesSort(true, true, ctx, q, from, fromType, i, &header, typeEntry); - nodeCnt = try iterator(.default, ctx, q, &it, &header, typeEntry, i); - it.deinit(); - }, - .edgeIncludeSort => { - var it = try referencesSort(false, true, ctx, q, from, fromType, i, &header, typeEntry); - nodeCnt = try iteratorEdge(.edgeInclude, ctx, q, &it, &header, typeEntry, i); - it.deinit(); - }, - .edgeIncludeDescSort => { - var it = try referencesSort(true, true, ctx, q, from, fromType, i, &header, typeEntry); - nodeCnt = try iteratorEdge(.edgeInclude, ctx, q, &it, &header, typeEntry, i); - it.deinit(); - }, - else => { - // not handled - }, - } - - i.* += header.includeSize; - ctx.thread.query.write(nodeCnt, sizeIndex); - - ctx.thread.query.writeAs( - u32, - @truncate(ctx.thread.query.index - startIndex), - resultByteSizeIndex, - ); -} - -pub fn aggregates( - ctx: *Query.QueryCtx, - q: []u8, -) !void { - var i: usize = 0; - var nodeCnt: u32 = 0; - - const header = utils.read(t.AggHeader, q, i); - - i += utils.sizeOf(t.AggHeader); - std.debug.print("header: {any}\n", .{header}); - const typeId = header.typeId; - const typeEntry = try Node.getType(ctx.db, typeId); - const isSamplingSet = header.isSamplingSet; - - const accumulatorProp = try ctx.db.allocator.alloc(u8, header.accumulatorSize); - @memset(accumulatorProp, 0); - defer ctx.db.allocator.free(accumulatorProp); - const hllAccumulator = Selva.c.selva_string_create(null, Selva.c.HLL_INIT_SIZE, Selva.c.SELVA_STRING_MUTABLE); - defer Selva.c.selva_string_free(hllAccumulator); - - var it = Node.iterator(false, typeEntry); - switch (header.iteratorType) { - .aggregate => { - nodeCnt = try Aggregates.iterator(ctx, &it, header.limit, false, undefined, q[i..], accumulatorProp, typeEntry, hllAccumulator); - try Aggregates.finalizeResults(ctx, q[i..], accumulatorProp, isSamplingSet, 0); - }, - .aggregateFilter => { - const filter = utils.sliceNext(header.filterSize, q, &i); - try Filter.prepare(filter, ctx, typeEntry); - nodeCnt = try Aggregates.iterator(ctx, &it, header.limit, true, filter, q[i..], accumulatorProp, typeEntry, hllAccumulator); - try Aggregates.finalizeResults(ctx, q[i..], accumulatorProp, isSamplingSet, 0); - }, - .groupBy => { - var groupByHashMap = GroupByHashMap.init(ctx.db.allocator); - defer groupByHashMap.deinit(); - nodeCnt = try GroupBy.iterator(ctx, &groupByHashMap, &it, header.limit, false, undefined, q[i..], header.accumulatorSize, typeEntry, hllAccumulator); - try GroupBy.finalizeGroupResults(ctx, &groupByHashMap, header, q[i..]); - }, - .groupByFilter => { - const filter = utils.sliceNext(header.filterSize, q, &i); - try Filter.prepare(filter, ctx, typeEntry); - var groupByHashMap = GroupByHashMap.init(ctx.db.allocator); - defer groupByHashMap.deinit(); - nodeCnt = try GroupBy.iterator(ctx, &groupByHashMap, &it, header.limit, true, filter, q[i..], header.accumulatorSize, typeEntry, hllAccumulator); - try GroupBy.finalizeGroupResults(ctx, &groupByHashMap, header, q[i..]); - }, - else => {}, - } -} - -pub fn aggregatesCount( - ctx: *Query.QueryCtx, - q: []u8, -) !void { - var i: usize = 0; - const header = utils.read(t.AggHeader, q, i); - i += utils.sizeOf(t.AggHeader); - const typeId = header.typeId; - const typeEntry = try Node.getType(ctx.db, typeId); - const count: u32 = @truncate(Node.getNodeCount(typeEntry)); - try ctx.thread.query.append(count); -} diff --git a/native/query/multiple/aggregates.zig b/native/query/multiple/aggregates.zig new file mode 100644 index 0000000000..4f7caf30aa --- /dev/null +++ b/native/query/multiple/aggregates.zig @@ -0,0 +1,84 @@ +const utils = @import("../../utils.zig"); +const Query = @import("../common.zig"); +const t = @import("../../types.zig"); +const Node = @import("../../selva/node.zig"); +const Iterate = @import("./iterate.zig"); +const Sort = @import("../../sort/sort.zig"); +const Selva = @import("../../selva/selva.zig"); +const Filter = @import("../filter/filter.zig"); +const GroupByHashMap = @import("../aggregates/hashMap.zig").GroupByHashMap; +const GroupBy = @import("../aggregates/group.zig"); +const Aggregates = @import("../aggregates/aggregates.zig"); + +pub fn aggregates( + ctx: *Query.QueryCtx, + q: []u8, +) !void { + var i: usize = 0; + var nodeCnt: u32 = 0; + + const header = utils.read(t.AggHeader, q, i); + + i += utils.sizeOf(t.AggHeader); + const typeId = header.typeId; + const typeEntry = try Node.getType(ctx.db, typeId); + + const accumulatorProp = try ctx.db.allocator.alloc(u8, header.accumulatorSize); + @memset(accumulatorProp, 0); + defer ctx.db.allocator.free(accumulatorProp); + const hllAccumulator = Selva.c.selva_string_create(null, Selva.c.HLL_INIT_SIZE, Selva.c.SELVA_STRING_MUTABLE); + defer Selva.c.selva_string_free(hllAccumulator); + + var aggCtx = Aggregates.AggCtx{ + .queryCtx = ctx, + .typeEntry = typeEntry, + .limit = header.limit, + .isSamplingSet = header.isSamplingSet, + .hllAccumulator = hllAccumulator, + .accumulatorSize = header.accumulatorSize, + .resultsSize = header.resultsSize, + .totalResultsSize = 0, + }; + + var it = Node.iterator(false, typeEntry); + switch (header.iteratorType) { + .aggregate => { + nodeCnt = try Aggregates.iterator(&aggCtx, &it, false, undefined, q[i..], accumulatorProp); + try Aggregates.finalizeResults(&aggCtx, q[i..], accumulatorProp, 0); + }, + .aggregateFilter => { + const filter = utils.sliceNext(header.filterSize, q, &i); + try Filter.prepare(filter, ctx, typeEntry); + nodeCnt = try Aggregates.iterator(&aggCtx, &it, true, filter, q[i..], accumulatorProp); + try Aggregates.finalizeResults(&aggCtx, q[i..], accumulatorProp, 0); + }, + .groupBy => { + var groupByHashMap = GroupByHashMap.init(ctx.db.allocator); + defer groupByHashMap.deinit(); + nodeCnt = @intCast(GroupBy.iterator(&aggCtx, &groupByHashMap, &it, false, undefined, q[i..])); + try GroupBy.finalizeGroupResults(&aggCtx, &groupByHashMap, q[i..]); + }, + .groupByFilter => { + const filter = utils.sliceNext(header.filterSize, q, &i); + try Filter.prepare(filter, ctx, typeEntry); + var groupByHashMap = GroupByHashMap.init(ctx.db.allocator); + defer groupByHashMap.deinit(); + nodeCnt = @intCast(GroupBy.iterator(&aggCtx, &groupByHashMap, &it, true, filter, q[i..])); + try GroupBy.finalizeGroupResults(&aggCtx, &groupByHashMap, q[i..]); + }, + else => {}, + } +} + +pub fn aggregatesCount( + ctx: *Query.QueryCtx, + q: []u8, +) !void { + var i: usize = 0; + const header = utils.read(t.AggHeader, q, i); + i += utils.sizeOf(t.AggHeader); + const typeId = header.typeId; + const typeEntry = try Node.getType(ctx.db, typeId); + const count: u32 = @truncate(Node.getNodeCount(typeEntry)); + try ctx.thread.query.append(count); +} diff --git a/native/query/multiple/default.zig b/native/query/multiple/default.zig new file mode 100644 index 0000000000..a99d040945 --- /dev/null +++ b/native/query/multiple/default.zig @@ -0,0 +1,65 @@ +const utils = @import("../../utils.zig"); +const Query = @import("../common.zig"); +const t = @import("../../types.zig"); +const Node = @import("../../selva/node.zig"); +const Iterate = @import("./iterate.zig"); +const Sort = @import("../../sort/sort.zig"); + +pub fn default( + ctx: *Query.QueryCtx, + q: []u8, +) !void { + var i: usize = 0; + // make default header! use :type in iterator + const header = utils.readNext(t.QueryHeader, q, &i); + const sizeIndex = try ctx.thread.query.reserve(4); + const typeEntry = try Node.getType(ctx.db, header.typeId); + var nodeCnt: u32 = 0; + + switch (header.iteratorType) { + .default => { + var it = Node.iterator(false, typeEntry); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, &i); + }, + .desc => { + var it = Node.iterator(true, typeEntry); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, &i); + }, + + .sort => { + const sortHeader = utils.readNext(t.SortHeader, q, &i); + var it = try Sort.iterator(false, ctx.db, ctx.thread, header.typeId, &sortHeader); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, &i); + }, + .descSort => { + const sortHeader = utils.readNext(t.SortHeader, q, &i); + var it = try Sort.iterator(true, ctx.db, ctx.thread, header.typeId, &sortHeader); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, &i); + }, + + .filter => { + var it = Node.iterator(false, typeEntry); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, &i); + }, + .descFilter => { + var it = Node.iterator(true, typeEntry); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, &i); + }, + + .filterSort => { + const sortHeader = utils.readNext(t.SortHeader, q, &i); + var it = try Sort.iterator(false, ctx.db, ctx.thread, header.typeId, &sortHeader); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, &i); + }, + .descFilterSort => { + const sortHeader = utils.readNext(t.SortHeader, q, &i); + var it = try Sort.iterator(true, ctx.db, ctx.thread, header.typeId, &sortHeader); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, &i); + }, + + else => { + // not handled + }, + } + ctx.thread.query.write(nodeCnt, sizeIndex); +} diff --git a/native/query/multiple/ids.zig b/native/query/multiple/ids.zig new file mode 100644 index 0000000000..f536cc8aad --- /dev/null +++ b/native/query/multiple/ids.zig @@ -0,0 +1,55 @@ +const utils = @import("../../utils.zig"); +const Query = @import("../common.zig"); +const t = @import("../../types.zig"); +const Node = @import("../../selva/node.zig"); +const Iterate = @import("./iterate.zig"); +const Sort = @import("../../sort/sort.zig"); + +pub const IdsIterator = struct { + ids: []u32, + i: u32, + typeEntry: Node.Type, + pub fn next(self: *IdsIterator) ?Node.Node { + if (self.i == self.ids.len) { + return null; + } + const node = Node.getNode(self.typeEntry, self.ids[self.i]); + self.i += 1; + return node; + } +}; + +pub fn ids( + ctx: *Query.QueryCtx, + q: []u8, +) !void { + var i: usize = 0; + const header = utils.readNext(t.QueryHeader, q, &i); + const sizeIndex = try ctx.thread.query.reserve(4); + const size = header.size; + const typeEntry = try Node.getType(ctx.db, header.typeId); + var it = IdsIterator{ .i = 0, .ids = utils.read([]u32, q, size + 4), .typeEntry = typeEntry }; + var nodeCnt: u32 = 0; + switch (header.iteratorType) { + .default => { + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, &i); + }, + .desc => { + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, &i); + }, + .sort => { + const sortHeader = utils.readNext(t.SortHeader, q, &i); + var itSort = try Sort.fromIterator(false, false, ctx.db, ctx.thread, typeEntry, &sortHeader, &it); + nodeCnt = try Iterate.node(.default, ctx, q, &itSort, &header, typeEntry, &i); + itSort.deinit(); + }, + .descSort => { + const sortHeader = utils.readNext(t.SortHeader, q, &i); + var itSort = try Sort.fromIterator(true, false, ctx.db, ctx.thread, typeEntry, &sortHeader, &it); + nodeCnt = try Iterate.node(.default, ctx, q, &itSort, &header, typeEntry, &i); + itSort.deinit(); + }, + else => {}, + } + ctx.thread.query.write(nodeCnt, sizeIndex); +} diff --git a/native/query/multiple/iterate.zig b/native/query/multiple/iterate.zig new file mode 100644 index 0000000000..638d7e009e --- /dev/null +++ b/native/query/multiple/iterate.zig @@ -0,0 +1,149 @@ +const std = @import("std"); +const utils = @import("../../utils.zig"); +const Query = @import("../common.zig"); +const Include = @import("../include/include.zig"); +const Filter = @import("../filter/filter.zig"); +const Node = @import("../../selva/node.zig"); +const References = @import("../../selva/references.zig"); +const Selva = @import("../../selva/selva.zig"); +const Thread = @import("../../thread/thread.zig"); +const Schema = @import("../../selva/schema.zig"); +const t = @import("../../types.zig"); +const Sort = @import("../../sort/sort.zig"); +const Aggregates = @import("../aggregates/aggregates.zig"); +const GroupBy = @import("../aggregates/group.zig"); +const GroupByHashMap = @import("../aggregates/hashMap.zig").GroupByHashMap; +const String = @import("../../string.zig"); +const writeAs = utils.writeAs; +const read = utils.read; + +pub fn node( + comptime It: t.QueryIteratorType, + ctx: *Query.QueryCtx, + q: []u8, + it: anytype, + header: *const t.QueryHeader, + typeEntry: Node.Type, + i: *usize, +) !u32 { + var offset: u32 = header.offset; + var nodeCnt: u32 = 0; + var filter: []u8 = undefined; + if (It == t.QueryIteratorType.filter) { + filter = utils.sliceNext(header.filterSize, q, i); + try Filter.prepare(filter, ctx, typeEntry); + } + const nestedQuery = q[i.* .. i.* + header.includeSize]; + while (offset > 0) { + const n = it.next() orelse return 0; + if (It == t.QueryIteratorType.filter) { + if (try Filter.filter(n, ctx, filter)) { + offset -= 1; + } + } else { + offset -= 1; + } + } + while (it.next()) |n| { + if (It == t.QueryIteratorType.filter) { + if (!try Filter.filter(n, ctx, filter)) { + continue; + } + } + try ctx.thread.query.append(t.ReadOp.id); + try ctx.thread.query.append(Node.getNodeId(n)); + try Include.include(n, ctx, nestedQuery, typeEntry); + nodeCnt += 1; + if (nodeCnt >= header.limit) { + break; + } + } + return nodeCnt; +} + +pub fn edge( + comptime It: t.QueryIteratorType, + ctx: *Query.QueryCtx, + q: []u8, + it: anytype, + header: *const t.QueryHeader, + typeEntry: Node.Type, + i: *usize, +) !u32 { + var offset: u32 = header.offset; + var nodeCnt: u32 = 0; + + var filter: []u8 = undefined; + var edgeFilter: []u8 = undefined; + + if (It == t.QueryIteratorType.filter or + It == t.QueryIteratorType.edgeFilterAndFilterOnEdge or + It == t.QueryIteratorType.edgeIncludeFilterAndFilterOnEdge) + { + filter = utils.sliceNext(header.filterSize, q, i); + try Filter.prepare(filter, ctx, typeEntry); + } + + if (It == t.QueryIteratorType.edgeIncludeFilterOnEdge or + It == t.QueryIteratorType.edgeFilterOnEdge or + It == t.QueryIteratorType.edgeFilterAndFilterOnEdge or + It == t.QueryIteratorType.edgeIncludeFilterAndFilterOnEdge) + { + edgeFilter = utils.sliceNext(header.edgeFilterSize, q, i); + try Filter.prepare(edgeFilter, ctx, typeEntry); + } + + const nestedQuery = q[i.* .. i.* + header.includeSize]; + const edgeTypeEntry = try Node.getType(ctx.db, header.edgeTypeId); + const edgeQuery = q[i.* + header.includeSize .. i.* + header.includeSize + header.edgeSize]; + + while (offset > 0) { + _ = it.next() orelse return 0; + offset -= 1; + } + + while (it.nextRef()) |ref| { + if (It == t.QueryIteratorType.filter or + It == t.QueryIteratorType.edgeFilterAndFilterOnEdge or + It == t.QueryIteratorType.edgeIncludeFilterAndFilterOnEdge) + { + if (!try Filter.filter(ref.node, ctx, filter)) { + continue; + } + } + + if (It == t.QueryIteratorType.edgeIncludeFilterOnEdge or + It == t.QueryIteratorType.edgeFilterOnEdge or + It == t.QueryIteratorType.edgeFilterAndFilterOnEdge) + { + if (!try Filter.filter(ref.edge, ctx, edgeFilter)) { + continue; + } + } + + try ctx.thread.query.append(t.ReadOp.id); + try ctx.thread.query.append(Node.getNodeId(ref.node)); + try Include.include(ref.node, ctx, nestedQuery, typeEntry); + + if (It != t.QueryIteratorType.edgeFilterOnEdge and + It != t.QueryIteratorType.edgeFilterAndFilterOnEdge) + { + try ctx.thread.query.append(t.ReadOp.edge); + const edgesByteSizeIndex = try ctx.thread.query.reserve(4); + const edgeStartIndex = ctx.thread.query.index; + try Include.include(ref.edge, ctx, edgeQuery, edgeTypeEntry); + ctx.thread.query.writeAs( + u32, + @truncate(ctx.thread.query.index - edgeStartIndex), + edgesByteSizeIndex, + ); + } + + nodeCnt += 1; + if (nodeCnt >= header.limit) { + break; + } + } + i.* += header.edgeSize; + return nodeCnt; +} diff --git a/native/query/multiple/references.zig b/native/query/multiple/references.zig new file mode 100644 index 0000000000..87a824e24a --- /dev/null +++ b/native/query/multiple/references.zig @@ -0,0 +1,254 @@ +const utils = @import("../../utils.zig"); +const t = @import("../../types.zig"); +const Node = @import("../../selva/node.zig"); +const Selva = @import("../../selva/selva.zig"); +const Sort = @import("../../sort/sort.zig"); +const References = @import("../../selva/references.zig"); +const Query = @import("../common.zig"); +const Iterate = @import("./iterate.zig"); +const std = @import("std"); + +// Has to be inlined to force stack allocation +inline fn referencesSort( + comptime desc: bool, + comptime edge: bool, + ctx: *Query.QueryCtx, + q: []u8, + from: Node.Node, + fromType: Selva.Type, + i: *usize, + header: *const t.QueryHeader, + typeEntry: Node.Type, +) !Sort.SortIterator(desc, edge) { + const sortHeader = utils.readNext(t.SortHeader, q, i); + var refs = try References.iterator(desc, edge, ctx.db, from, header.prop, fromType); + return try Sort.fromIterator(desc, edge, ctx.db, ctx.thread, typeEntry, &sortHeader, &refs); +} + +pub fn references( + ctx: *Query.QueryCtx, + q: []u8, + from: Node.Node, + fromType: Selva.Type, + i: *usize, +) !void { + const header = utils.readNext(t.QueryHeader, q, i); + try ctx.thread.query.append(t.ReadOp.references); + try ctx.thread.query.append(header.prop); + const resultByteSizeIndex = try ctx.thread.query.reserve(4); + const startIndex = ctx.thread.query.index; + const sizeIndex = try ctx.thread.query.reserve(4); + const typeEntry = try Node.getType(ctx.db, header.typeId); + var nodeCnt: u32 = 0; + + switch (header.iteratorType) { + .default => { + var it = try References.iterator(false, false, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, i); + }, + .desc => { + var it = try References.iterator(true, false, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, i); + }, + .sort => { + var it = try referencesSort(false, false, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + .descSort => { + var it = try referencesSort(true, false, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + .filter => { + var it = try References.iterator(false, false, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, i); + }, + .descFilter => { + var it = try References.iterator(true, false, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, i); + }, + .filterSort => { + var it = try referencesSort(false, false, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + .descFilterSort => { + var it = try referencesSort(true, false, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + + // name this large / hasEdge + .edge => { + var it = try References.iterator(false, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, i); + }, + .edgeDesc => { + var it = try References.iterator(true, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, i); + }, + .edgeSort => { + var it = try referencesSort(false, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + .edgeDescSort => { + var it = try referencesSort(true, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.node(.default, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + + .edgeFilter => { + var it = try References.iterator(false, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, i); + }, + .edgeDescFilter => { + var it = try References.iterator(true, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, i); + }, + .edgeFilterSort => { + var it = try referencesSort(false, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + .edgeDescFilterSort => { + var it = try referencesSort(true, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.node(.filter, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + + // -------------------- + // NAME THESE BETTER + .edgeFilterOnEdge => { + var it = try References.iterator(false, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.edgeFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + }, + .edgeFilterOnEdgeDesc => { + var it = try References.iterator(true, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.edgeFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + }, + .edgeFilterOnEdgeSort => { + var it = try referencesSort(false, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.edgeFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + .edgeFilterOnEdgeSortDesc => { + var it = try referencesSort(true, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.edgeFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + // -------------------- + + // split up this file + // then we can name this edgeInclude + .edgeInclude => { + var it = try References.iterator(false, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.default, ctx, q, &it, &header, typeEntry, i); + }, + .edgeIncludeDesc => { + var it = try References.iterator(true, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.default, ctx, q, &it, &header, typeEntry, i); + }, + .edgeIncludeSort => { + var it = try referencesSort(false, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.default, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + .edgeIncludeDescSort => { + var it = try referencesSort(true, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.default, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + + .edgeIncludeFilter => { + var it = try References.iterator(false, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.filter, ctx, q, &it, &header, typeEntry, i); + }, + .edgeIncludeDescFilter => { + var it = try References.iterator(true, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.filter, ctx, q, &it, &header, typeEntry, i); + }, + .edgeIncludeFilterSort => { + var it = try referencesSort(false, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.filter, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + .edgeIncludeDescFilterSort => { + var it = try referencesSort(true, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.filter, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + + .edgeFilterAndFilterOnEdge => { + var it = try References.iterator(false, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.edgeFilterAndFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + }, + .edgeFilterAndFilterOnEdgeDesc => { + var it = try References.iterator(true, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.edgeFilterAndFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + }, + .edgeFilterAndFilterOnEdgeSort => { + var it = try referencesSort(false, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.edgeFilterAndFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + .edgeFilterAndFilterOnEdgeSortDesc => { + var it = try referencesSort(true, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.edgeFilterAndFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + // -------------------- + + // -------------------- + .edgeIncludeFilterOnEdge => { + var it = try References.iterator(false, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.edgeIncludeFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + }, + .edgeIncludeFilterOnEdgeDesc => { + var it = try References.iterator(true, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.edgeIncludeFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + }, + .edgeIncludeFilterOnEdgeSort => { + var it = try referencesSort(false, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.edgeIncludeFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + .edgeIncludeFilterOnEdgeSortDesc => { + var it = try referencesSort(true, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.edgeIncludeFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + // -------------------- + .edgeIncludeFilterAndFilterOnEdge => { + var it = try References.iterator(false, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.edgeIncludeFilterAndFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + }, + .edgeIncludeFilterAndFilterOnEdgeDesc => { + var it = try References.iterator(true, true, ctx.db, from, header.prop, fromType); + nodeCnt = try Iterate.edge(.edgeIncludeFilterAndFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + }, + .edgeIncludeFilterAndFilterOnEdgeSort => { + var it = try referencesSort(false, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.edgeIncludeFilterAndFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + .edgeIncludeFilterAndFilterOnEdgeSortDesc => { + var it = try referencesSort(true, true, ctx, q, from, fromType, i, &header, typeEntry); + nodeCnt = try Iterate.edge(.edgeIncludeFilterAndFilterOnEdge, ctx, q, &it, &header, typeEntry, i); + it.deinit(); + }, + // -------------------- + + else => {}, + } + + i.* += header.includeSize; //+ header.edgeSize; + ctx.thread.query.write(nodeCnt, sizeIndex); + + ctx.thread.query.writeAs( + u32, + @truncate(ctx.thread.query.index - startIndex), + resultByteSizeIndex, + ); +} diff --git a/native/query/query.zig b/native/query/query.zig index 2e38c1f381..3426fc9bf9 100644 --- a/native/query/query.zig +++ b/native/query/query.zig @@ -3,13 +3,16 @@ const errors = @import("../errors.zig"); const napi = @import("../napi.zig"); const Query = @import("common.zig"); const utils = @import("../utils.zig"); -const multiple = @import("multiple.zig"); -const single = @import("single.zig"); const Thread = @import("../thread/thread.zig"); const t = @import("../types.zig"); const DbCtx = @import("../db/ctx.zig").DbCtx; const Selva = @import("../selva"); +const ids = @import("multiple/ids.zig").ids; +const default = @import("multiple/default.zig").default; +const Single = @import("single.zig"); +const Aggregates = @import("multiple/aggregates.zig"); + // -------- NAPI ---------- (put in js bridge maybe?) pub fn getQueryBufThread(env: napi.Env, info: napi.Info) callconv(.c) napi.Value { return getQueryBufInternalThread(env, info) catch |err| { @@ -41,20 +44,20 @@ pub fn getQueryThreaded( const queryId = utils.readNext(u32, buffer, &index); const q = buffer[index .. buffer.len - 8]; // - checksum len - utils.debugPrint("q: {any}\n", .{q}); + // utils.debugPrint("q: {any}\n", .{q}); const op = utils.read(t.OpType, q, 0); _ = try thread.query.result(0, queryId, op); switch (op) { - .default => try multiple.default(&ctx, q), - .id => try single.default(false, &ctx, q), - .idFilter => try single.default(true, &ctx, q), - .alias => try single.alias(false, &ctx, q), - .aliasFilter => try single.alias(true, &ctx, q), - .ids => try multiple.ids(&ctx, q), - .aggregates => try multiple.aggregates(&ctx, q), - .aggregatesCount => try multiple.aggregatesCount(&ctx, q), + .default => try default(&ctx, q), + .id => try Single.default(false, &ctx, q), + .idFilter => try Single.default(true, &ctx, q), + .alias => try Single.alias(false, &ctx, q), + .aliasFilter => try Single.alias(true, &ctx, q), + .ids => try ids(&ctx, q), + .aggregates => try Aggregates.aggregates(&ctx, q), + .aggregatesCount => try Aggregates.aggregatesCount(&ctx, q), else => { return errors.DbError.INCORRECT_QUERY_TYPE; }, diff --git a/native/query/single.zig b/native/query/single.zig index 011da8575d..e7a304f2e4 100644 --- a/native/query/single.zig +++ b/native/query/single.zig @@ -60,6 +60,7 @@ pub fn default( try ctx.thread.query.append(t.ReadOp.id); try ctx.thread.query.append(header.id); const nestedQuery = q[i .. i + header.includeSize]; + try Include.include(node, ctx, nestedQuery, typeEntry); } else { try ctx.thread.query.append(@as(u32, 0)); @@ -79,20 +80,20 @@ pub fn reference( if (References.getReference(from, fs)) |ref| { const typeEntry = try Node.getType(ctx.db, header.typeId); const n = Node.getNode(typeEntry, ref.dst); + try ctx.thread.query.append(t.ReadOp.reference); + try ctx.thread.query.append(header.prop); + const resultByteSizeIndex = try ctx.thread.query.reserve(4); + const startIndex = ctx.thread.query.index; if (n) |node| { - try ctx.thread.query.append(t.ReadOp.reference); - try ctx.thread.query.append(header.prop); - const resultByteSizeIndex = try ctx.thread.query.reserve(4); - const startIndex = ctx.thread.query.index; try ctx.thread.query.append(ref.dst); const nestedQuery = q[i.* .. i.* + header.includeSize]; try Include.include(node, ctx, nestedQuery, typeEntry); - ctx.thread.query.writeAs( - u32, - @truncate(ctx.thread.query.index - startIndex), - resultByteSizeIndex, - ); } + ctx.thread.query.writeAs( + u32, + @truncate(ctx.thread.query.index - startIndex), + resultByteSizeIndex, + ); } i.* += header.includeSize; } @@ -105,6 +106,7 @@ pub fn referenceEdge( i: *usize, ) !void { const header = utils.readNext(t.QueryHeaderSingleReference, q, i); + const fs = try Schema.getFieldSchema(fromType, header.prop); if (References.getReference(from, fs)) |ref| { const typeEntry = try Node.getType(ctx.db, header.typeId); @@ -121,14 +123,25 @@ pub fn referenceEdge( const edgeTypeEntry = try Node.getType(ctx.db, header.edgeTypeId); const e = Node.getNode(edgeTypeEntry, ref.edge); + if (e) |edge| { const edgeQuery = q[i.* + header.includeSize .. i.* + header.includeSize + header.edgeSize]; try ctx.thread.query.append(t.ReadOp.edge); + const edgesByteSizeIndex = try ctx.thread.query.reserve(4); + const edgeStartIndex = ctx.thread.query.index; try Include.include(edge, ctx, edgeQuery, edgeTypeEntry); + ctx.thread.query.writeAs( + u32, + @truncate(ctx.thread.query.index - edgeStartIndex), + edgesByteSizeIndex, + ); + } else { + std.log.err( + "singe ref edge -> WRONG EDGE NODE HAS TO BE THERE! (even if it does not hold values) \n", + .{}, + ); } - i.* += header.edgeSize; - ctx.thread.query.writeAs( u32, @truncate(ctx.thread.query.index - startIndex), @@ -137,5 +150,5 @@ pub fn referenceEdge( } } - i.* += header.includeSize; + i.* += header.includeSize + header.edgeSize; } diff --git a/native/selva/fields.zig b/native/selva/fields.zig index 4fc9f3b82b..3a8d4c4031 100644 --- a/native/selva/fields.zig +++ b/native/selva/fields.zig @@ -19,26 +19,33 @@ const emptyArray: []const [16]u8 = emptySlice; extern "c" const selva_string: opaque {}; -pub fn getCardinality(node: Node.Node, fieldSchema: Schema.FieldSchema) ?[]u8 { - if (selva.c.selva_fields_get_selva_string(node, fieldSchema)) |stored| { - const countDistinct = selva.c.hll_count(@ptrCast(stored)); - return countDistinct[0..4]; +inline fn toNodeId(node: anytype) selva.c.node_id_t { + if (comptime @TypeOf(node) == selva.c.node_id_t) { + return node; + } else if (comptime @TypeOf(node) == Node.Node) { + return Node.getNodeId(node); } else { - return null; + @compileLog("Invalid type: ", @TypeOf(node)); + @compileError("Invalid type"); } } -pub fn getCardinalityReference(ctx: *DbCtx, efc: Schema.EdgeFieldConstraint, ref: References.ReferenceLarge, fieldSchema: Schema.FieldSchema) []u8 { - const edge_node = Node.getEdgeNode(ctx, efc, ref); - if (edge_node == null) { - return emptySlice; +pub fn ensureCardinality(node: Node.Node, fieldSchema: Schema.FieldSchema, hllPrecision: u8, hllMode: bool) *selva.c.struct_selva_string { + var data = selva.c.selva_fields_get_selva_string(node, fieldSchema); + if (data == null) { + data = selva.c.selva_fields_ensure_string(node, fieldSchema, selva.c.HLL_INIT_SIZE) orelse errors.SelvaError.SELVA_EINTYPE; + selva.c.hll_init(data, hllPrecision, hllMode); } - if (selva.c.selva_fields_get_selva_string(edge_node, fieldSchema) orelse null) |stored| { - const countDistinct = selva.c.hll_count(@ptrCast(stored)); + return data; +} + +pub fn getCardinality(node: Node.Node, fieldSchema: Schema.FieldSchema) ?[]u8 { + if (selva.c.selva_fields_get_selva_string(node, fieldSchema)) |stored| { + const countDistinct = selva.c.hll_count(stored); return countDistinct[0..4]; } else { - return emptySlice; + return null; } } @@ -51,6 +58,10 @@ pub fn get( if (propType == t.PropType.alias) { const target = Node.getNodeId(node); const typeAliases = selva.c.selva_get_aliases(typeEntry, fieldSchema.field); + if (typeAliases == null) { + std.log.err("not an alias prop {any}", .{ fieldSchema }); + return @as([*]u8, undefined)[0..0]; + } const alias = selva.c.selva_get_alias_by_dest(typeAliases, target); if (alias == null) { return @as([*]u8, undefined)[0..0]; @@ -106,7 +117,9 @@ pub inline fn setMicroBuffer(node: Node.Node, fieldSchema: Schema.FieldSchema, v )); } -pub inline fn setColvec(te: Node.Type, nodeId: selva.c.node_id_t, fieldSchema: Schema.FieldSchema, vec: []u8) void { +pub inline fn setColvec(te: Node.Type, node: anytype, fieldSchema: Schema.FieldSchema, vec: []u8) void { + const nodeId = toNodeId(node); + selva.c.colvec_set_vec( te, nodeId, @@ -115,27 +128,39 @@ pub inline fn setColvec(te: Node.Type, nodeId: selva.c.node_id_t, fieldSchema: S ); } +pub inline fn clearColvec(te: Node.Type, node: anytype, fieldSchema: Schema.FieldSchema) void { + const nodeId = toNodeId(node); + + selva.c.colvec_clear_vec( + te, + nodeId, + fieldSchema, + ); +} + // TODO This is now hll specific but we might want to change it. -pub inline fn ensurePropTypeString( - ctx: *Modify.ModifyCtx, +pub fn ensurePropTypeString( + node: Node.Node, fieldSchema: Schema.FieldSchema, ) !*selva.c.selva_string { - return selva.c.selva_fields_ensure_string(ctx.node.?, fieldSchema, selva.c.HLL_INIT_SIZE) orelse errors.SelvaError.SELVA_EINTYPE; + return selva.c.selva_fields_ensure_string(node, fieldSchema, selva.c.HLL_INIT_SIZE) orelse errors.SelvaError.SELVA_EINTYPE; } pub fn ensureEdgePropTypeString( ctx: *Modify.ModifyCtx, - node: Node.Node, efc: Schema.EdgeFieldConstraint, ref: References.ReferenceLarge, fieldSchema: Schema.FieldSchema, ) !*selva.c.selva_string { - const edge_node = selva.c.selva_fields_ensure_ref_edge(ctx.db.selva, node, efc, ref, 0) orelse return errors.SelvaError.SELVA_ENOTSUP; - return selva.c.selva_fields_ensure_string(edge_node, fieldSchema, selva.c.HLL_INIT_SIZE) orelse return errors.SelvaError.SELVA_EINTYPE; + if (Node.getEdgeNode(ctx.db, efc, ref)) |edgeNode| { + return selva.c.selva_fields_ensure_string(edgeNode, fieldSchema, selva.c.HLL_INIT_SIZE) orelse return errors.SelvaError.SELVA_EINTYPE; + } else { + return errors.SelvaError.SELVA_ENOENT; + } } -pub inline fn deleteField(ctx: *Modify.ModifyCtx, node: Node.Node, fieldSchema: Schema.FieldSchema) !void { - try errors.selva(selva.c.selva_fields_del(ctx.db.selva, node, fieldSchema)); +pub inline fn deleteField(db: *DbCtx, node: Node.Node, fieldSchema: Schema.FieldSchema) !void { + try errors.selva(selva.c.selva_fields_del(db.selva, node, fieldSchema)); } pub inline fn deleteTextFieldTranslation(ctx: *Modify.ModifyCtx, fieldSchema: Schema.FieldSchema, lang: t.LangCode) !void { @@ -306,3 +331,15 @@ pub fn getAliasByName(typeEntry: Node.Type, field: u8, aliasName: []u8) ?Node.No // TODO Partials return res.node; } + +pub fn getAliasByNode(typeEntry: Node.Type, node: anytype, field: u8) ![]const u8 { + if (selva.c.selva_get_aliases(typeEntry, field)) |aliases| { + const nodeId = toNodeId(node); + if (selva.c.selva_get_alias_by_dest(aliases, nodeId)) |alias| { + var len: usize = undefined; + const name = selva.c.selva_get_alias_name(alias, &len); + return name[0..len]; + } + } + return errors.SelvaError.SELVA_ENOENT; +} diff --git a/native/selva/node.zig b/native/selva/node.zig index 9cfdf0e0b2..39c0771935 100644 --- a/native/selva/node.zig +++ b/native/selva/node.zig @@ -11,15 +11,29 @@ const DbCtx = @import("../db/ctx.zig").DbCtx; pub const Type = selva.Type; pub const Node = selva.Node; -pub inline fn getType(ctx: *DbCtx, typeId: t.TypeId) !Type { - const selvaTypeEntry: ?Type = selva.c.selva_get_type_by_index( - ctx.selva.?, - typeId, - ); - if (selvaTypeEntry == null) { - return errors.SelvaError.SELVA_EINTYPE; +pub inline fn getType(ctx: *DbCtx, v: anytype) !Type { + var selvaTypeEntry: ?Type = undefined; + + if (comptime @TypeOf(v) == t.TypeId) { + selvaTypeEntry = selva.c.selva_get_type_by_index( + ctx.selva.?, + v, + ); + } else if (comptime @TypeOf(v) == selva.Node or + @TypeOf(v) == ?selva.Node) + { + if (comptime @TypeOf(v) == ?selva.Node) { + if (v == null) { + return errors.SelvaError.SELVA_ENOENT; + } + } + selvaTypeEntry = selva.c.selva_get_type_by_node(ctx.selva.?, v); + } else { + @compileLog("Invalid type: ", @TypeOf(v)); + @compileError("Invalid type"); } - return selvaTypeEntry.?; + + return if (selvaTypeEntry == null) errors.SelvaError.SELVA_EINTYPE else selvaTypeEntry.?; } pub inline fn getRefDstType(ctx: *DbCtx, sch: anytype) !Type { @@ -64,7 +78,7 @@ pub inline fn getNodeTypeId(node: Node) t.TypeId { return selva.c.selva_get_node_type(node); } -pub inline fn upsertNode(_: *Modify.ModifyCtx, typeEntry: selva.Type, id: u32) !Node { +pub inline fn upsertNode(typeEntry: selva.Type, id: u32) !Node { const res = selva.c.selva_upsert_node(typeEntry, id); // TODO Partials if (res.node == null) { @@ -155,41 +169,32 @@ pub inline fn getNodeFromReference(dstType: selva.Type, ref: anytype) ?Node { return null; } -pub inline fn ensureRefEdgeNode(ctx: *Modify.ModifyCtx, node: Node, efc: selva.EdgeFieldConstraint, ref: selva.ReferenceLarge) !Node { - const edgeNode = selva.c.selva_fields_ensure_ref_edge(ctx.db.selva, node, efc, ref, 0); - if (edgeNode) |n| { - selva.markDirty(ctx, efc.edge_node_type, getNodeId(n)); - return n; - } else { - return errors.SelvaError.SELVA_ENOTSUP; - } -} - pub inline fn getEdgeNode(db: *DbCtx, efc: selva.EdgeFieldConstraint, ref: selva.ReferenceLarge) ?Node { if (ref.*.edge == 0) { return null; } const edge_type = selva.c.selva_get_type_by_index(db.selva, efc.*.edge_node_type); - return selva.c.selva_find_node(edge_type, ref.*.edge); + // TODO Partials + return selva.c.selva_find_node(edge_type, ref.*.edge).node; } -pub inline fn deleteNode(ctx: *Modify.ModifyCtx, typeEntry: Type, node: Node) !void { - selva.c.selva_del_node(ctx.db.selva, typeEntry, node); +pub inline fn deleteNode(db: *DbCtx, typeEntry: Type, node: Node) !void { + selva.c.selva_del_node(db.selva, typeEntry, node); } -pub inline fn flushNode(ctx: *Modify.ModifyCtx, typeEntry: Type, node: Node) void { - selva.c.selva_flush_node(ctx.db.selva, typeEntry, node); +pub inline fn flushNode(db: *DbCtx, typeEntry: Type, node: Node) void { + selva.c.selva_flush_node(db.selva, typeEntry, node); } -pub inline fn expireNode(ctx: *Modify.ModifyCtx, typeId: t.TypeId, nodeId: u32, ts: i64) void { - selva.c.selva_expire_node(ctx.db.selva, typeId, nodeId, ts, selva.c.SELVA_EXPIRE_NODE_STRATEGY_CANCEL_OLD); - selva.markDirty(ctx, typeId, nodeId); +pub inline fn expireNode(db: *DbCtx, typeId: t.TypeId, nodeId: u32, ts: i64) void { + selva.c.selva_expire_node(db.selva, typeId, nodeId, ts, selva.c.SELVA_EXPIRE_NODE_STRATEGY_CANCEL_OLD); + selva.markDirty(db, typeId, nodeId); } -pub inline fn expire(ctx: *Modify.ModifyCtx) void { +pub inline fn expire(db: *DbCtx) void { // Expire things before query - selva.c.selva_db_expire_tick(ctx.db.selva, std.time.timestamp()); + selva.c.selva_db_expire_tick(db.selva, std.time.timestamp()); } pub inline fn getNodeBlockHash(db: *DbCtx, typeEntry: Type, start: u32, hashOut: *SelvaHash128) c_int { diff --git a/native/selva/references.zig b/native/selva/references.zig index d105dec2e2..36b4a9495e 100644 --- a/native/selva/references.zig +++ b/native/selva/references.zig @@ -16,13 +16,17 @@ pub inline fn preallocReferences(ctx: *Modify.ModifyCtx, len: u64) void { _ = selva.c.selva_fields_prealloc_refs(ctx.db.selva.?, ctx.node.?, ctx.fieldSchema.?, len); } +pub inline fn preallocReferences2(db: *DbCtx, node: Node.Node, fieldSchema: Schema.FieldSchema, len: u64) void { + _ = selva.c.selva_fields_prealloc_refs(db.selva.?, node, fieldSchema, len); +} + pub inline fn getReference(node: Node.Node, fieldSchema: Schema.FieldSchema) ?ReferenceLarge { return selva.c.selva_fields_get_reference(node, fieldSchema); } -pub fn deleteReference(ctx: *Modify.ModifyCtx, node: Node.Node, fieldSchema: Schema.FieldSchema, id: u32) !void { +pub fn deleteReference(db: *DbCtx, node: Node.Node, fieldSchema: Schema.FieldSchema, id: u32) !void { try errors.selva(selva.c.selva_fields_del_ref( - ctx.db.selva, + db.selva, node, fieldSchema, id, @@ -30,7 +34,7 @@ pub fn deleteReference(ctx: *Modify.ModifyCtx, node: Node.Node, fieldSchema: Sch const efc = selva.c.selva_get_edge_field_constraint(fieldSchema); const dstType = efc.*.dst_node_type; - selva.markDirty(ctx, dstType, id); + selva.markDirty(db, dstType, id); } pub fn referencesHas(refs: References, dstNodeId: u32) bool { @@ -81,7 +85,7 @@ pub fn ReferencesIteratorEdges(comptime desc: bool) type { i: u32 = 0, pub fn nextRef(self: *ReferencesIteratorEdges(desc)) ?ReferencesIteratorEdgesResult { if (self.i < self.refs.nr_refs) { - const index = if (desc) self.refs.nr_refs - self.i else self.i; + const index = if (desc) self.refs.nr_refs - self.i - 1 else self.i; const ref = self.refs.unnamed_0.large[index]; const node = Node.getNode(self.dstType, ref.dst); const edgeNode = Node.getNode(self.edgeType, ref.edge); @@ -90,13 +94,14 @@ pub fn ReferencesIteratorEdges(comptime desc: bool) type { if (edgeNode) |n2| { return ReferencesIteratorEdgesResult{ .node = n1, .edge = n2 }; } + // no edge but node exsits } } return null; } pub fn next(self: *ReferencesIteratorEdges(desc)) ?Node.Node { if (self.i < self.refs.nr_refs) { - const index = if (desc) self.refs.nr_refs - self.i else self.i; + const index = if (desc) self.refs.nr_refs - self.i - 1 else self.i; const ref = self.refs.unnamed_0.large[index]; const node = Node.getNode(self.dstType, ref.dst); self.i = self.i + 1; @@ -146,15 +151,15 @@ pub fn iterator( } } -pub inline fn clearReferences(ctx: *Modify.ModifyCtx, node: Node.Node, fieldSchema: Schema.FieldSchema) void { - selva.c.selva_fields_clear_references(ctx.db.selva, node, fieldSchema); +pub fn clearReferences(db: *DbCtx, node: Node.Node, fieldSchema: Schema.FieldSchema) void { + selva.c.selva_fields_clear_references(db.selva, node, fieldSchema); } -pub fn writeReference(ctx: *Modify.ModifyCtx, src: Node.Node, fieldSchema: Schema.FieldSchema, dst: Node.Node) !?ReferenceLarge { +pub fn writeReference(db: *DbCtx, src: Node.Node, fieldSchema: Schema.FieldSchema, dst: Node.Node) !?ReferenceLarge { var refAny: selva.c.SelvaNodeReferenceAny = undefined; errors.selva(selva.c.selva_fields_reference_set( - ctx.db.selva, + db.selva, src, fieldSchema, dst, @@ -178,22 +183,21 @@ pub fn writeReference(ctx: *Modify.ModifyCtx, src: Node.Node, fieldSchema: Schem return refAny.p.large; } -pub fn putReferences(ctx: *Modify.ModifyCtx, node: Node.Node, fieldSchema: Schema.FieldSchema, ids: []u32) !void { - try errors.selva(selva.c.selva_fields_references_insert_tail(ctx.db.selva, node, fieldSchema, try Node.getRefDstType(ctx.db, fieldSchema), ids.ptr, ids.len)); - +pub fn putReferences(db: *DbCtx, node: Node.Node, fieldSchema: Schema.FieldSchema, ids: []u32) !void { + try errors.selva(selva.c.selva_fields_references_insert_tail(db.selva, node, fieldSchema, try Node.getRefDstType(db, fieldSchema), ids.ptr, ids.len)); const efc = selva.c.selva_get_edge_field_constraint(fieldSchema); const dstType = efc.*.dst_node_type; for (ids) |id| { - selva.markDirty(ctx, dstType, id); + selva.markDirty(db, dstType, id); } } // @param index 0 = first; -1 = last. -pub fn insertReference(ctx: *Modify.ModifyCtx, node: Node.Node, fieldSchema: Schema.FieldSchema, dstNode: Node.Node, index: isize, reorder: bool) !selva.c.SelvaNodeReferenceAny { - const te_dst = selva.c.selva_get_type_by_node(ctx.db.selva, dstNode); +pub fn insertReference(db: *DbCtx, node: Node.Node, fieldSchema: Schema.FieldSchema, dstNode: Node.Node, index: isize, reorder: bool) !selva.c.SelvaNodeReferenceAny { + const te_dst = selva.c.selva_get_type_by_node(db.selva, dstNode); var ref: selva.c.SelvaNodeReferenceAny = undefined; const insertFlags: selva.c.selva_fields_references_insert_flags = if (reorder) selva.c.SELVA_FIELDS_REFERENCES_INSERT_FLAGS_REORDER else 0; - const code = selva.c.selva_fields_references_insert(ctx.db.selva, node, fieldSchema, index, insertFlags, te_dst, dstNode, &ref); + const code = selva.c.selva_fields_references_insert(db.selva, node, fieldSchema, index, insertFlags, te_dst, dstNode, &ref); if (code != selva.c.SELVA_EEXIST) { try errors.selva(code); @@ -202,7 +206,7 @@ pub fn insertReference(ctx: *Modify.ModifyCtx, node: Node.Node, fieldSchema: Sch // relevant when updating const efc = selva.c.selva_get_edge_field_constraint(fieldSchema); const dstType = efc.*.dst_node_type; - selva.markDirty(ctx, dstType, Node.getNodeId(dstNode)); + selva.markDirty(db, dstType, Node.getNodeId(dstNode)); } return ref; diff --git a/native/selva/selva.zig b/native/selva/selva.zig index b305415a27..fa011e26eb 100644 --- a/native/selva/selva.zig +++ b/native/selva/selva.zig @@ -30,9 +30,11 @@ pub const c = @cImport({ @cInclude("selva/membar.h"); @cInclude("selva/mblen.h"); }); +const t = @import("../types.zig"); const std = @import("std"); const Modify = @import("../modify/common.zig"); +const DbCtx = @import("../db/ctx.zig").DbCtx; pub const Node = *c.SelvaNode; pub const Aliases = *c.SelvaAliases; @@ -54,8 +56,15 @@ pub fn selvaStringDestroy(str: ?c.selva_string) void { } // TODO Accept also Type as an arg -pub inline fn markDirty(ctx: *Modify.ModifyCtx, typeId: u16, nodeId: u32) void { - c.selva_mark_dirty(c.selva_get_type_by_index(ctx.db.selva, typeId), nodeId); +pub inline fn markDirty(db: *DbCtx, nodeType: anytype, nodeId: u32) void { + if (comptime @TypeOf(nodeType) == t.TypeId) { + c.selva_mark_dirty(c.selva_get_type_by_index(db.selva, nodeType), nodeId); + } else if (comptime @TypeOf(nodeType) == Type) { + c.selva_mark_dirty(nodeType, nodeId); + } else { + @compileLog("Invalid type: ", @TypeOf(nodeType)); + @compileError("Invalid type"); + } } pub fn markReferencesDirty(ctx: *Modify.ModifyCtx, dstTypeId: u16, refs: []u32) void { diff --git a/native/sort/iterator.zig b/native/sort/iterator.zig index dfbd58bf58..0cfedb67b8 100644 --- a/native/sort/iterator.zig +++ b/native/sort/iterator.zig @@ -208,7 +208,7 @@ fn getSortIndex( } } -pub fn fromIterator( +pub inline fn fromIterator( comptime desc: bool, comptime isEdge: bool, dbCtx: *DbCtx, diff --git a/native/thread/worker/worker.zig b/native/thread/worker/worker.zig index 2f336c78c9..a97dbf82b1 100644 --- a/native/thread/worker/worker.zig +++ b/native/thread/worker/worker.zig @@ -10,6 +10,7 @@ const utils = @import("../../utils.zig"); const selva = @import("../../selva/selva.zig").c; const jemalloc = @import("../../jemalloc.zig"); const common = @import("../common.zig"); +const Node = @import("../../selva/node.zig"); const modifyNotPending = @import("modifyNotPending.zig").modifyNotPending; pub fn worker(threads: *Thread.Threads, thread: *common.Thread) !void { @@ -108,37 +109,15 @@ pub fn worker(threads: *Thread.Threads, thread: *common.Thread) !void { .emptyMod => { // does nothing but does trigger flush marked subs and maybe more in the future }, - .modify => try Modify.modify(thread, m, threads.ctx, op), + .modify => try Modify.modify(thread, m, threads.ctx), + .expire => Node.expire(threads.ctx), .loadBlock => try dump.loadBlock(thread, threads.ctx, m, op), .unloadBlock => try dump.unloadBlock(thread, threads.ctx, m, op), .loadCommon => try dump.loadCommon(thread, threads.ctx, m, op), - - .createType => { - const typeCode = utils.read(u32, m, 0); - const resp = try thread.modify.result(4, typeCode, op); - const schema = m[5..m.len]; - const err = selva.selva_db_create_type( - threads.ctx.selva, - @truncate(typeCode), - schema.ptr, - schema.len, - ); - utils.write(resp, err, 0); - }, // .subscribe => { // _ = try thread.modify.result(0, utils.read(u32, m, 0), op); // }, // .unsubscribe => try Subscription.unsubscribe(threads.ctx, m, thread), - .setSchemaIds => { - _ = try thread.modify.result(0, utils.read(u32, m, 0), op); - if (threads.ctx.ids.len > 0) { - jemalloc.free(threads.ctx.ids); - threads.ctx.ids = &[_]u32{}; - } - threads.ctx.ids = jemalloc.alloc(u32, (m.len - 5) / @sizeOf(u32)); - const ids = m[5..m.len]; - utils.byteCopy(threads.ctx.ids, ids, 0); - }, else => {}, } // this is not always nessecary e.g. subscribe does not need this diff --git a/native/types.zig b/native/types.zig index ddd2bf5881..f5e5cc4615 100644 --- a/native/types.zig +++ b/native/types.zig @@ -2,6 +2,8 @@ const Schema = @import("selva/schema.zig"); const Node = @import("selva/node.zig"); pub const TypeId = u16; +pub const SelvaFieldType = u8; +pub const SelvaField = u8; pub const BridgeResponse = enum(u32) { query = 1, @@ -36,9 +38,8 @@ pub const OpType = enum(u8) { loadBlock = 128, unloadBlock = 129, loadCommon = 130, - createType = 131, - setSchemaIds = 132, emptyMod = 133, + expire = 134, // -------------------- noOp = 255, @@ -70,10 +71,128 @@ pub const ModOp = enum(u8) { deleteTextField = 16, upsert = 17, insert = 18, + end = 254, // TODO remove when modify is not used for response padding = 255, }; +pub const Modify = enum(u8) { + create = 0, + createRing = 1, + update = 2, + delete = 3, + upsert = 4, + insert = 5, +}; + +pub const ModifyHeader = packed struct { + opId: u32, + opType: OpType, + schema: u64, + count: u32, +}; + +pub const ModifyUpdateHeader = packed struct { + op: Modify, + type: TypeId, + isTmp: bool, + _padding: u7, + id: u32, + size: u32, +}; + +pub const ModifyDeleteHeader = packed struct { + op: Modify, + type: TypeId, + isTmp: bool, + _padding: u7, + id: u32, +}; + +pub const ModifyCreateHeader = packed struct { + op: Modify, + type: TypeId, + size: u32, +}; + +pub const ModifyCreateRingHeader = packed struct { + op: Modify, + type: TypeId, + maxNodeId: u32, + size: u32, +}; + +pub const ModifyMainHeader = packed struct { + id: u8, + type: PropType, + increment: bool, + incrementPositive: bool, + expire: bool, + _padding: u5, + size: u8, + start: u16, +}; + +pub const ModifyPropHeader = packed struct { + id: u8, + type: PropType, + size: u32, +}; + +pub const ModifyReferences = enum(u8) { + clear = 0, + ids = 1, + idsWithMeta = 2, + tmpIds = 3, + delIds = 4, + delTmpIds = 5, +}; + +pub const ModifyReferencesHeader = packed struct { + op: ModifyReferences, + size: u32, +}; + +// pub const ModifyReferencesMeta = enum(u8) { +// noTmpNoIndex = 0, +// tmpNoIndex = 1, +// tmpIndex = 2, +// noTmpIndex = 4, +// }; + +pub const ModifyReferencesMetaHeader = packed struct { + id: u32, + isTmp: bool, + withIndex: bool, + _padding: u6, + index: i32, + size: u32, +}; + +pub const ModifyReferenceMetaHeader = packed struct { + id: u32, + isTmp: bool, + _padding: u7, + size: u32, +}; + +pub const ModifyCardinalityHeader = packed struct { + sparse: bool, + _padding: u7, + precision: u8, +}; + +pub const ModifyResultItem = packed struct { + id: u32, + err: ModifyError, +}; + +pub const ModifyError = enum(u8) { + null = 0, + nx = 1, + unknown = 2, +}; + pub const PropType = enum(u8) { null = 0, timestamp = 1, @@ -157,8 +276,6 @@ pub const PropType = enum(u8) { pub fn size(self: PropType) u8 { switch (self) { .timestamp, - // .created, - // .updated, .number, => return 8, .int8, @@ -178,20 +295,33 @@ pub const PropType = enum(u8) { } }; +pub const PropTypeSelva = enum(u8) { + null = 0, + microBuffer = 1, + string = 2, + text = 3, + reference = 4, + references = 5, + alias = 8, + aliases = 9, + colVec = 10, +}; + pub const RefOp = enum(u8) { clear = 0, del = 1, - end = 2, - + end = @intFromEnum(ModOp.end), set = 3, - setIndex = 4, - setTmp = 5, - setEdge = 6, + setEdge = 4, - setIndexTmp = 7, - setEdgeIndex = 8, - setEdgeIndexTmp = 9, - setEdgeTmp = 10, + // setIndex = 4, + // setTmp = 5, + // setEdge = 6, + + // setIndexTmp = 7, + // setEdgeIndex = 8, + // setEdgeIndexTmp = 9, + // setEdgeTmp = 10, // overwrite = 0, // add = 1, @@ -519,6 +649,27 @@ pub const QueryIteratorType = enum(u8) { edgeIncludeDescSort = 35, edgeIncludeDescFilter = 36, edgeIncludeDescFilterSort = 37, + + edgeIncludeFilterOnEdge = 40, + edgeIncludeFilterOnEdgeDesc = 41, + edgeIncludeFilterOnEdgeSort = 42, + edgeIncludeFilterOnEdgeSortDesc = 43, + + edgeFilterOnEdge = 60, + edgeFilterOnEdgeDesc = 61, + edgeFilterOnEdgeSort = 62, + edgeFilterOnEdgeSortDesc = 63, + + edgeIncludeFilterAndFilterOnEdge = 70, + edgeIncludeFilterAndFilterOnEdgeDesc = 71, + edgeIncludeFilterAndFilterOnEdgeSort = 72, + edgeIncludeFilterAndFilterOnEdgeSortDesc = 73, + + edgeFilterAndFilterOnEdge = 80, + edgeFilterAndFilterOnEdgeDesc = 81, + edgeFilterAndFilterOnEdgeSort = 82, + edgeFilterAndFilterOnEdgeSortDesc = 83, + // default search search = 120, searchFilter = 121, @@ -744,25 +895,15 @@ pub const FilterOpCompare = enum(u8) { // ----------- gt = 14, lt = 15, - gtBatch = 16, - ltBatch = 17, - gtBatchSmall = 18, - ltBatchSmall = 19, - // ----------- ge = 20, le = 21, - geBatch = 22, - leBatch = 23, - geBatchSmall = 24, - leBatchSmall = 25, // ----------- - // eq = 12, - // will become quite a lot :L > , < <=, >= - // maybe format a bit easier + inc = 22, + ninc = 23, + incBatch = 24, + nincBatch = 25, + // ---------- - // var is a lot less - - // selectLargeRef = 202, selectLargeRefs = 203, selectRef = 204, selectSmallRefs = 205, @@ -798,3 +939,44 @@ pub const FilterSelect = packed struct { // edgeTypeId: TypeId, // you want EDGE INDEX as well }; + +pub const SelvaSchemaHeader = packed struct { + blockCapacity: u32, + nrFields: u8, + nrFixedFields: u8, + nrVirtualFields: u8, + sdbVersion: u8, +}; + +pub const SelvaSchemaMicroBuffer = packed struct { + type: SelvaFieldType, + len: u16, + hasDefault: u8, +}; + +pub const SelvaSchemaString = packed struct { + type: SelvaFieldType, + fixedLenHint: u8, + defaultLen: u32, +}; + +pub const SelvaSchemaText = packed struct { + type: SelvaFieldType, + nrDefaults: u8, +}; + +pub const SelvaSchemaRef = packed struct { + type: SelvaFieldType, + flags: u8, + dstNodeType: TypeId, + inverseField: SelvaField, + edgeNodeType: TypeId, + capped: u32, +}; + +pub const SelvaSchemaColvec = packed struct { + type: SelvaFieldType, + vecLen: u16, + compSize: u16, + hasDefault: u8, +}; diff --git a/native/utils.zig b/native/utils.zig index b8ac28a424..1b9f535538 100644 --- a/native/utils.zig +++ b/native/utils.zig @@ -83,16 +83,16 @@ pub inline fn writeNext(comptime T: type, buffer: []u8, value: T, offset: *usize } pub inline fn toSlice(comptime T: type, value: []u8) []T { - const x: []T = @as([*]T, @ptrCast(@alignCast(value.ptr)))[0..@divFloor(value.len, @sizeOf(T))]; + const x: []T = @as([*]T, @ptrCast(@alignCast(value.ptr)))[0..@divFloor(value.len, sizeOf(T))]; return x; } pub inline fn readPtr( comptime T: type, - buffer: []u8, + buffer: []const u8, offset: usize, ) *T { - return @as(*T, @ptrCast(@alignCast(buffer.ptr + offset))); + return @as(*T, @constCast(@ptrCast(@alignCast(buffer.ptr + offset)))); } pub inline fn read( @@ -127,7 +127,7 @@ pub inline fn read( pub fn ReadIterator(comptime T: type) type { return struct { offset: *usize, - buffer: []u8, + buffer: []const u8, len: usize, pub fn next(self: *ReadIterator(T)) ?T { if (self.offset.* < self.len) { @@ -140,7 +140,7 @@ pub fn ReadIterator(comptime T: type) type { }; } -pub inline fn readIterator(T: type, buffer: []u8, amount: usize, offset: *usize) ReadIterator(T) { +pub inline fn readIterator(T: type, buffer: []const u8, amount: usize, offset: *usize) ReadIterator(T) { return ReadIterator(T){ .buffer = buffer, .len = amount * sizeOf(T) + offset.*, @@ -148,7 +148,7 @@ pub inline fn readIterator(T: type, buffer: []u8, amount: usize, offset: *usize) }; } -pub inline fn readNext(T: type, buffer: []u8, offset: *usize) T { +pub inline fn readNext(T: type, buffer: []const u8, offset: *usize) T { const val = read(T, buffer, offset.*); offset.* = offset.* + @bitSizeOf(T) / 8; return val; @@ -257,14 +257,15 @@ pub inline fn alignLeft(comptime T: type, data: []u8) u8 { if (offset != 0) move(aligned, unAligned); return offset; } - -pub inline fn alignLeftLen(alignment: u8, data: []u8) u8 { - // (i + 7) & ~@as(usize, 7); more efficient - const unAligned = data[alignment..data.len]; - const address = @intFromPtr(unAligned.ptr); - const offset: u8 = @truncate(address % alignment); - const aligned = data[alignment - offset .. data.len - offset]; - if (offset != 0) move(aligned, unAligned); +pub fn alignLeftLen(alignment: u8, data: []u8) u8 { + const addr = @intFromPtr(data.ptr); + const offset = @as(u8, @truncate(addr & (alignment - 1))); + if (offset == 0) return 0; + const start = alignment - offset; + const len = data.len - alignment; + const src = data[alignment .. alignment + len]; + const dst = data[start .. start + len]; + move(dst, src); return offset; } diff --git a/package-lock.json b/package-lock.json index ad0b1ce41d..71baa502af 100644 --- a/package-lock.json +++ b/package-lock.json @@ -33,6 +33,7 @@ "@swc-node/register": "^1.11.1", "@types/node": "^22.5.3", "@types/react": "^18.3.1", + "arktype": "2.1.29", "async-sema": "3.1.1", "concurrently": "^9.2.1", "fs-extra": "^11.1.1", @@ -44,7 +45,8 @@ "ts-node": "^10.9.2", "tsdown": "^0.16.7", "tsx": "^4.20.6", - "typescript": "^5.6.3" + "typescript": "^5.6.3", + "valibot": "1.2.0" }, "engines": { "node": "24", @@ -56,6 +58,23 @@ "@esbuild/linux-x64": "^0.27.0" } }, + "node_modules/@ark/schema": { + "version": "0.56.0", + "resolved": "https://registry.npmjs.org/@ark/schema/-/schema-0.56.0.tgz", + "integrity": "sha512-ECg3hox/6Z/nLajxXqNhgPtNdHWC9zNsDyskwO28WinoFEnWow4IsERNz9AnXRhTZJnYIlAJ4uGn3nlLk65vZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ark/util": "0.56.0" + } + }, + "node_modules/@ark/util": { + "version": "0.56.0", + "resolved": "https://registry.npmjs.org/@ark/util/-/util-0.56.0.tgz", + "integrity": "sha512-BghfRC8b9pNs3vBoDJhcta0/c1J1rsoS1+HgVUreMFPdhz/CRAKReAu57YEllNaSy98rWAdY1gE+gFup7OXpgA==", + "dev": true, + "license": "MIT" + }, "node_modules/@babel/generator": { "version": "7.28.5", "dev": true, @@ -155,10 +174,6 @@ "@based/locale-x86-64-gnu": "*" } }, - "node_modules/@based/db/node_modules/@based/locale-x86-64-gnu": { - "dev": true, - "optional": true - }, "node_modules/@based/errors": { "version": "1.6.7", "resolved": "https://registry.npmjs.org/@based/errors/-/errors-1.6.7.tgz", @@ -1780,6 +1795,28 @@ "dev": true, "license": "MIT" }, + "node_modules/arkregex": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/arkregex/-/arkregex-0.0.5.tgz", + "integrity": "sha512-ncYjBdLlh5/QnVsAA8De16Tc9EqmYM7y/WU9j+236KcyYNUXogpz3sC4ATIZYzzLxwI+0sEOaQLEmLmRleaEXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ark/util": "0.56.0" + } + }, + "node_modules/arktype": { + "version": "2.1.29", + "resolved": "https://registry.npmjs.org/arktype/-/arktype-2.1.29.tgz", + "integrity": "sha512-jyfKk4xIOzvYNayqnD8ZJQqOwcrTOUbIU4293yrzAjA3O1dWh61j71ArMQ6tS/u4pD7vabSPe7nG3RCyoXW6RQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ark/schema": "0.56.0", + "@ark/util": "0.56.0", + "arkregex": "0.0.5" + } + }, "node_modules/ast-kit": { "version": "2.2.0", "dev": true, @@ -3135,6 +3172,21 @@ "dev": true, "license": "MIT" }, + "node_modules/valibot": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/valibot/-/valibot-1.2.0.tgz", + "integrity": "sha512-mm1rxUsmOxzrwnX5arGS+U4T25RdvpPjPN4yR0u9pUBov9+zGVtO84tif1eY4r6zWxVxu3KzIyknJy3rxfRZZg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "typescript": ">=5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, "node_modules/validator": { "version": "13.15.23", "license": "MIT", diff --git a/package.json b/package.json index 3c05e6c19a..9da371fb22 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,7 @@ "watch-zig:linux-arm64": "zig build -Dtarget=aarch64-linux-gnu --watch", "watch-zig:linux-x64": "zig build -Dtarget=x86_64-linux-gnu --watch", "watch-ts": "tsc -p tsconfig.build.json --watch", + "watch-ts-test": "tsc -p tsconfig.test.json --watch", "watch-native": "npm run build-c && npm run watch-zig", "watch-exports": "tsx --watch-path=./native/types.zig scripts/zigTsExports.ts ", "watch": "npm run concurrently npm:watch-native npm:watch-ts npm:watch-exports", @@ -68,6 +69,7 @@ "@swc-node/register": "^1.11.1", "@types/node": "^22.5.3", "@types/react": "^18.3.1", + "arktype": "2.1.29", "async-sema": "3.1.1", "concurrently": "^9.2.1", "fs-extra": "^11.1.1", @@ -79,7 +81,8 @@ "ts-node": "^10.9.2", "tsdown": "^0.16.7", "tsx": "^4.20.6", - "typescript": "^5.6.3" + "typescript": "^5.6.3", + "valibot": "1.2.0" }, "exports": { ".": "./dist/sdk.js", diff --git a/podman/Containerfile b/podman/Containerfile index 64ad3b143c..fdd0523a1b 100644 --- a/podman/Containerfile +++ b/podman/Containerfile @@ -1,4 +1,4 @@ -FROM gcc:14.2 +FROM gcc:15.2 ENV NODE_VERSION 24.11.1 ENV NVM_DIR /usr/local/nvm RUN apt-get update && \ diff --git a/scripts/test_push_exports.ts b/scripts/test_push_exports.ts new file mode 100644 index 0000000000..0db36c5c1e --- /dev/null +++ b/scripts/test_push_exports.ts @@ -0,0 +1,98 @@ +import { + ModifyMainHeader, + pushModifyMainHeader, + createModifyMainHeader, + readModifyMainHeader, + QueryHeader, + pushQueryHeader, + createQueryHeader, + OpType, + QueryType, + QueryIteratorType, +} from '../src/zigTsExports.js' +import { AutoSizedUint8Array } from '../src/modify/AutoSizedUint8Array.js' +import { deepEqual } from '../src/utils/index.js' + +const test = () => { + const header: ModifyMainHeader = { + id: 1, + start: 123, + size: 456, + } + + // Use createModifyMainHeader (existing logic) + const expectedBuf = createModifyMainHeader(header) + + // Use new pushModifyMainHeader + const autoBuf = new AutoSizedUint8Array() + const idx = pushModifyMainHeader(autoBuf, header) + + if (idx !== 0) { + console.error(`Expected index 0, got ${idx}`) + process.exit(1) + } + + const actualBuf = autoBuf.view + + console.log('Expected:', expectedBuf) + console.log('Actual: ', actualBuf) + + if (expectedBuf.length !== actualBuf.length) { + console.error('Length mismatch') + process.exit(1) + } + + for (let i = 0; i < expectedBuf.length; i++) { + if (expectedBuf[i] !== actualBuf[i]) { + console.error( + `Mismatch at index ${i}: expected ${expectedBuf[i]}, got ${actualBuf[i]}`, + ) + process.exit(1) + } + } + + console.log('ModifyMainHeader match!') + + // Test packed struct: QueryHeader + const queryHeader: QueryHeader = { + op: QueryType.ids, + prop: 10, + typeId: 99, + edgeTypeId: 88, + offset: 100, + limit: 50, + filterSize: 10, + searchSize: 20, + edgeSize: 30, + edgeFilterSize: 40, + includeSize: 5, + iteratorType: QueryIteratorType.desc, + size: 200, + sort: true, + } + + const expectedQueryBuf = createQueryHeader(queryHeader) + const autoQueryBuf = new AutoSizedUint8Array() + pushQueryHeader(autoQueryBuf, queryHeader) + const actualQueryBuf = autoQueryBuf.view + + console.log('QueryHeader Expected:', expectedQueryBuf) + console.log('QueryHeader Actual: ', actualQueryBuf) + + if (expectedQueryBuf.length !== actualQueryBuf.length) { + console.error('QueryHeader Length mismatch') + process.exit(1) + } + + for (let i = 0; i < expectedQueryBuf.length; i++) { + if (expectedQueryBuf[i] !== actualQueryBuf[i]) { + console.error( + `QueryHeader Mismatch at index ${i}: expected ${expectedQueryBuf[i]}, got ${actualQueryBuf[i]}`, + ) + process.exit(1) + } + } + console.log('QueryHeader match!') +} + +test() diff --git a/scripts/zigTsExports.ts b/scripts/zigTsExports.ts index c072a48e75..52b7e5ab5b 100644 --- a/scripts/zigTsExports.ts +++ b/scripts/zigTsExports.ts @@ -16,7 +16,8 @@ const parseZig = (input: string): string => { readUint32, readInt32, readUint64, readInt64, readFloatLE, readDoubleLE -} from './utils/index.js'\n\n` +} from './utils/index.js' +import { AutoSizedUint8Array } from './utils/AutoSizedUint8Array.js'\n\n` // Symbol tables const typeSizes: Record = { @@ -385,14 +386,10 @@ const parseZig = (input: string): string => { structs[name] = { isPacked, bitSize: totalBits } - if (isPacked) { - // Generate pack/unpack helpers for this packed struct (BigInt based for >32 bit support) - // Pack: takes Object -> returns bigint - // Unpack: takes bigint -> returns Object - + if (isPacked && totalBits <= 32) { // Pack - output += `export const pack${name} = (obj: ${name}): bigint => {\n` - output += ` let val = 0n\n` + output += `export const pack${name} = (obj: ${name}): number => {\n` + output += ` let val = 0\n` let currentBit = 0 fields.forEach((f) => { if (f.isPadding) { @@ -401,22 +398,22 @@ const parseZig = (input: string): string => { } let valExpr = `obj.${f.name}` if (f.isBoolean) { - valExpr = `(${valExpr} ? 1n : 0n)` + valExpr = `(${valExpr} ? 1 : 0)` } else if (f.isStruct) { valExpr = `pack${f.type}(${valExpr})` } else { - // Cast to BigInt - valExpr = `BigInt(${valExpr})` + valExpr = `Number(${valExpr})` } - output += ` val |= (${valExpr} & ${(1n << BigInt(f.bitSize)) - 1n}n) << ${currentBit}n\n` + const mask = f.bitSize === 32 ? -1 : (1 << f.bitSize) - 1 + output += ` val |= (${valExpr} & ${mask}) << ${currentBit}\n` currentBit += f.bitSize }) output += ` return val\n` output += `}\n\n` // Unpack - output += `export const unpack${name} = (val: bigint): ${name} => {\n` + output += `export const unpack${name} = (val: number): ${name} => {\n` output += ` return {\n` currentBit = 0 fields.forEach((f) => { @@ -425,18 +422,20 @@ const parseZig = (input: string): string => { return } - let readExpr = `(val >> ${currentBit}n) & ${(1n << BigInt(f.bitSize)) - 1n}n` + const mask = f.bitSize === 32 ? -1 : (1 << f.bitSize) - 1 + let readExpr = `(val >>> ${currentBit}) & ${mask}` if (f.isBoolean) { - readExpr = `(${readExpr}) === 1n` + readExpr = `(${readExpr}) === 1` } else if (f.isStruct) { readExpr = `unpack${f.type}(${readExpr})` } else { - readExpr = `Number(${readExpr})` if (f.type.endsWith('Enum')) { readExpr = `(${readExpr}) as ${f.type}` } else if (f.type === 'TypeId') { readExpr = `(${readExpr}) as TypeId` + } else { + readExpr = `Number(${readExpr})` } } @@ -749,7 +748,7 @@ const parseZig = (input: string): string => { else if (sBits <= 32) readExpr = `readUint32(buf, ${offStr})` else readExpr = `readUint64(buf, ${offStr})` - readExpr = `unpack${prim}(BigInt(${readExpr}))` + readExpr = `unpack${prim}(${readExpr})` } else { switch (prim) { case 'u8': @@ -859,7 +858,7 @@ const parseZig = (input: string): string => { } if (f.isStruct) { - readExpr = `unpack${f.type}(BigInt(${readExpr}))` + readExpr = `unpack${f.type}(${readExpr})` } else if (f.type.endsWith('Enum')) { readExpr = `(${readExpr}) as ${f.type}` } else if (f.type === 'TypeId') { @@ -903,7 +902,7 @@ const parseZig = (input: string): string => { else if (sBits <= 32) readExpr = `readUint32(buf, ${offStr})` else readExpr = `readUint64(buf, ${offStr})` - readExpr = `unpack${prim}(BigInt(${readExpr}))` + readExpr = `unpack${prim}(${readExpr})` } else { switch (prim) { case 'u8': @@ -1011,7 +1010,7 @@ const parseZig = (input: string): string => { } if (f.isStruct) { - readExpr = `unpack${f.type}(BigInt(${readExpr}))` + readExpr = `unpack${f.type}(${readExpr})` } else if (f.type.endsWith('Enum')) { readExpr = `(${readExpr}) as ${f.type}` } else if (f.type === 'TypeId') { @@ -1029,6 +1028,136 @@ const parseZig = (input: string): string => { output += ` write${name}(buffer, header, 0)\n` output += ` return buffer\n` output += `}\n\n` + + // 7. Export Push Function + output += `export const push${name} = (\n` + output += ` buf: AutoSizedUint8Array,\n` + output += ` header: ${name},\n` + output += `): number => {\n` + output += ` const index = buf.length\n` + + if (!isPacked) { + fields.forEach((f) => { + const fName = f.name + const prim = getPrimitive( + body + .find((l) => l.includes(`${fName}:`)) + ?.match(regexStructField)?.[2] || 'u8', + ) + const valRef = f.isPadding ? '0' : `header.${fName}` + + switch (prim) { + case 'u8': + case 'LangCode': + output += ` buf.pushUint8(${valRef})\n` + break + case 'bool': + output += ` buf.pushUint8(${valRef} ? 1 : 0)\n` + break + case 'i8': + output += ` buf.pushUint8(${valRef})\n` + break + case 'u16': + output += ` buf.pushUint16(${valRef})\n` + break + case 'i16': + output += ` buf.pushUint16(${valRef})\n` + break + case 'u32': + output += ` buf.pushUint32(${valRef})\n` + break + case 'i32': + output += ` buf.pushUint32(${valRef})\n` + break + case 'f32': + output += ` buf.pushFloatLE(${valRef})\n` + break + case 'u64': + case 'usize': + output += ` buf.pushUint64(${valRef})\n` + break + case 'i64': + output += ` buf.pushInt64(${valRef})\n` + break + case 'f64': + output += ` buf.pushDoubleLE(${valRef})\n` + break + default: + // Fallback for unknown types or padding greater than handled above + const byteCount = Math.ceil(f.bitSize / 8) + for (let k = 0; k < byteCount; k++) { + output += ` buf.pushUint8(0)\n` + } + } + }) + } else { + let currentBitGlobal = 0 + + for (let i = 0; i < fields.length; i++) { + const f = fields[i] + + if (currentBitGlobal % 8 === 0 && [8, 16, 32, 64].includes(f.bitSize)) { + const fName = f.name + const valRef = f.isPadding ? '0' : `header.${fName}` + let valWithTernary = + f.isBoolean && !f.isPadding ? `(${valRef} ? 1 : 0)` : valRef + + if (f.isStruct && !f.isPadding) { + valWithTernary = `pack${f.type}(${valRef})` + } + + if (f.bitSize === 8) { + output += ` buf.pushUint8(Number(${valWithTernary}))\n` + } else if (f.bitSize === 16) { + output += ` buf.pushUint16(Number(${valWithTernary}))\n` + } else if (f.bitSize === 32) { + output += ` buf.pushUint32(Number(${valWithTernary}))\n` + } else if (f.bitSize === 64) { + output += ` buf.pushUint64(${valWithTernary})\n` + } + currentBitGlobal += f.bitSize + } else { + let remainingBits = f.bitSize + let valExpression = f.isPadding ? '0' : `header.${f.name}` + + if (f.isBoolean && !f.isPadding) { + valExpression = `(${valExpression} ? 1 : 0)` + } else if (f.isStruct && !f.isPadding) { + valExpression = `Number(pack${f.type}(${valExpression}))` + } + + let bitsProcessed = 0 + while (remainingBits > 0) { + const bitInByte = currentBitGlobal % 8 + const bitsCanFitInByte = 8 - bitInByte + const bitsToWrite = Math.min(remainingBits, bitsCanFitInByte) + + const mask = (1 << bitsToWrite) - 1 + + if (bitInByte === 0) { + // New byte started + output += ` buf.pushUint8(0)\n` + } + + // Access the last byte using view directly OR ensure pushUint8(0) initialized it + // We know we just pushed a byte if bitInByte == 0. + // If bitInByte > 0, the byte exists at buf.length - 1 + // But we need to be careful about not relying on `buf.view` if possible? + // Actually `buf.view` property exists on AutoSizedUint8Array. + // Let's use `buf.view[buf.length - 1] |= ...` + + output += ` buf.view[buf.length - 1] |= ((${valExpression} >>> ${bitsProcessed}) & ${mask}) << ${bitInByte}\n` + + currentBitGlobal += bitsToWrite + bitsProcessed += bitsToWrite + remainingBits -= bitsToWrite + } + } + } + } + + output += ` return index\n` + output += `}\n\n` } return output diff --git a/src/client/websocket/FakeWebsocket.ts b/src/client/websocket/FakeWebsocket.ts index ec32a3b544..71f10fc21a 100644 --- a/src/client/websocket/FakeWebsocket.ts +++ b/src/client/websocket/FakeWebsocket.ts @@ -20,7 +20,8 @@ const syncSubs = (ws: FakeWebsocket) => { export class FakeWebsocket { url: string - authState: string[] + // TODO remove these ts guards + authState!: string[] constructor(url: string, restPrefix: string, client: BasedClient) { // wss://1x7j3eroh-5mzale2mp.based.dev/ze1xch7kjGQtwg const segments = url.split('/') @@ -30,13 +31,13 @@ export class FakeWebsocket { this._r = restPrefix syncSubs(this) } - timer: ReturnType + timer!: ReturnType client: BasedClient - _c: boolean + _c!: boolean _r: string - _om: (x?: any) => void - _oe: (x?: any) => void - _oc: (x?: any) => void + _om!: (x?: any) => void + _oe!: (x?: any) => void + _oc!: (x?: any) => void close() { this._c = true if (this._oc) { diff --git a/src/client/websocket/types.ts b/src/client/websocket/types.ts index 6ef861381a..bab7ab62ef 100644 --- a/src/client/websocket/types.ts +++ b/src/client/websocket/types.ts @@ -3,7 +3,7 @@ import WebSocket from 'isomorphic-ws' export class Connection { public ws?: WebSocket public disconnected?: boolean - destroy: () => void + destroy!: () => void public fallBackTimer?: ReturnType public fallBackInProgress?: boolean public useFallback?: string diff --git a/src/db-client/hooks.ts b/src/db-client/hooks.ts index a41d619eeb..5b628d2a4f 100644 --- a/src/db-client/hooks.ts +++ b/src/db-client/hooks.ts @@ -8,7 +8,7 @@ export type DbClientHooks = { schema: SchemaOut, transformFns?: SchemaMigrateFns, ): Promise - flushModify(buf: Uint8Array): Promise + flushModify(buf: Uint8Array): Promise getQueryBuf(buf: Uint8Array): ReturnType subscribe( q: BasedDbQuery, @@ -26,7 +26,7 @@ export const getDefaultHooks = (server: DbServer): DbClientHooks => { onError: OnError, ) { server.subscribe(q.subscriptionBuffer!, onData) - return () => { } + return () => {} }, setSchema(schema: SchemaOut, transformFns) { return server.setSchema(schema, transformFns) @@ -41,17 +41,15 @@ export const getDefaultHooks = (server: DbServer): DbClientHooks => { }, flushModify(buf: Uint8Array) { const x = buf.slice(0) - const res = server.modify(x) if (res instanceof Promise) { return res.then((res) => { server.keepRefAliveTillThisPoint(x) - return res && new Uint8Array(res) + return new Uint8Array(res) }) } - - return Promise.resolve(res && new Uint8Array(res)) + return Promise.resolve(new Uint8Array(res)) }, getQueryBuf(buf: Uint8Array) { return server.getQueryBuf(buf) diff --git a/src/db-client/index.ts b/src/db-client/index.ts index f8ebe125dd..fc08225888 100644 --- a/src/db-client/index.ts +++ b/src/db-client/index.ts @@ -4,20 +4,24 @@ import { SubStore } from './query/subscription/index.js' import { DbShared } from '../shared/DbBase.js' import { DbClientHooks } from './hooks.js' import { setLocalClientSchema } from './setLocalClientSchema.js' -import { ModifyOpts } from './modify/types.js' -import { create } from './modify/create/index.js' -import { Ctx } from './modify/Ctx.js' -import { update } from './modify/update/index.js' -import { del } from './modify/delete/index.js' -import { expire } from './modify/expire/index.js' -import { cancel, drain, schedule } from './modify/drain.js' -import { insert, upsert } from './modify/upsert/index.js' import { parse, type SchemaIn, type SchemaMigrateFns, type SchemaOut, + type ResolveSchema, + type StrictSchema, } from '../schema/index.js' +import { AutoSizedUint8Array } from '../utils/AutoSizedUint8Array.js' +import { LangCode, Modify } from '../zigTsExports.js' +import { ModifyCtx, flush, BasedModify } from './modify/index.js' +import type { InferPayload, InferTarget } from './modify/types.js' +import { serializeCreate } from './modify/create.js' +import { serializeUpdate } from './modify/update.js' +import { serializeDelete } from './modify/delete.js' +import { serializeUpsert } from './modify/upsert.js' +import { BasedQuery2 } from './query2/index.js' +import type { InferSchemaOutput } from './query2/types.js' type DbClientOpts = { hooks: DbClientHooks @@ -26,7 +30,20 @@ type DbClientOpts = { debug?: boolean } -export class DbClient extends DbShared { +export type BasedCreatePromise = BasedModify +export type BasedUpdatePromise = BasedModify +export type BasedDeletePromise = BasedModify +export type BasedUpsertPromise = BasedModify +export type BasedInsertPromise = BasedUpsertPromise + +export type ModifyOpts = { + unsafe?: boolean + locale?: keyof typeof LangCode +} + +export class DbClientClass< + S extends { types: any } = SchemaOut, +> extends DbShared { constructor({ hooks, maxModifySize = 100 * 1e3 * 1e3, @@ -35,37 +52,25 @@ export class DbClient extends DbShared { }: DbClientOpts) { super() this.hooks = hooks - this.maxModifySize = maxModifySize - this.modifyCtx = new Ctx( - 0, - new Uint8Array( - new ArrayBuffer(Math.min(1e3, maxModifySize), { - maxByteLength: maxModifySize, - }), - ), - ) - this.flushTime = flushTime - + this.modifyCtx = { + buf: new AutoSizedUint8Array(256, maxModifySize), + flushTime, + batch: { count: 0 }, + hooks, + } if (debug) { debugMode(this) } + this.hooks.subscribeSchema((schema) => { setLocalClientSchema(this, schema) }) } subs = new Map() - stopped: boolean + stopped!: boolean hooks: DbClientHooks - - // modify - flushTime: number - writeTime: number = 0 - isDraining = false - modifyCtx: Ctx - maxModifySize: number - upserting: Map; p: Promise }> = - new Map() + modifyCtx: ModifyCtx async schemaIsSet() { if (!this.schema) { @@ -73,71 +78,131 @@ export class DbClient extends DbShared { } } - async setSchema( - schema: SchemaIn, + async setSchema( + schema: StrictSchema, transformFns?: SchemaMigrateFns, - ): Promise { - const strictSchema = parse(schema).schema + ): Promise>> { + const strictSchema = parse(schema as any).schema await this.drain() const schemaChecksum = await this.hooks.setSchema( strictSchema as SchemaOut, transformFns, ) if (this.stopped) { - return this.schema?.hash ?? 0 + return this as unknown as DbClientClass> } if (schemaChecksum !== this.schema?.hash) { await this.once('schema') - return this.schema?.hash ?? 0 + return this as unknown as DbClientClass> } - return schemaChecksum + return this as unknown as DbClientClass> } - create(type: string, obj = {}, opts?: ModifyOpts): Promise { - return create(this, type, obj, opts) + query2( + type: T, + id?: number[], + ): BasedQuery2 + + query2( + type: T, + id: + | number + | (Partial> & { [Symbol.toStringTag]?: never }), + ): BasedQuery2 + query2( + type: T, + id?: + | number + | number[] + | (Partial> & { [Symbol.toStringTag]?: never }), + ): BasedQuery2 { + return new BasedQuery2(this, type, id) } - async copy( - type: string, - target: number, - objOrTransformFn?: - | Record - | ((item: Record) => Promise), - ): Promise { - const item = await this.query(type, target) - .include('*', '**.id') - .get() - .toObject() + create( + type: T, + obj?: InferPayload, + opts?: ModifyOpts, + ): BasedCreatePromise { + return new BasedModify( + this.modifyCtx, + serializeCreate, + this.schema!, + type, + obj ?? {}, + this.modifyCtx.buf, + opts?.locale ? LangCode[opts.locale] : LangCode.none, + ) + } - if (typeof objOrTransformFn === 'function') { - const { id: _, ...props } = await objOrTransformFn(item) - return this.create(type, props) - } + update( + type: T, + target: number | BasedModify, + obj: InferPayload, + opts?: ModifyOpts, + ): BasedUpdatePromise { + return new BasedModify( + this.modifyCtx, + serializeUpdate, + this.schema!, + type, + target, + obj, + this.modifyCtx.buf, + opts?.locale ? LangCode[opts.locale] : LangCode.none, + ) + } - if (typeof objOrTransformFn === 'object' && objOrTransformFn !== null) { - const { id: _, ...props } = item - await Promise.all( - Object.keys(objOrTransformFn).map(async (key) => { - const val = objOrTransformFn[key] - if (val === null) { - delete props[key] - } else if (typeof val === 'function') { - const res = await val(item) - if (Array.isArray(res)) { - props[key] = await Promise.all(res) - } else { - props[key] = res - } - } else { - props[key] = val - } - }), - ) - return this.create(type, props) - } + upsert( + type: T, + target: InferTarget, + obj: InferPayload, + opts?: ModifyOpts, + ): BasedUpsertPromise { + return new BasedModify( + this.modifyCtx, + serializeUpsert, + this.schema!, + type, + target, + obj, + this.modifyCtx.buf, + opts?.locale ? LangCode[opts.locale] : LangCode.none, + Modify.upsert, + ) + } + + insert( + type: T, + target: InferTarget, + obj: InferPayload, + opts?: ModifyOpts, + ): BasedInsertPromise { + return new BasedModify( + this.modifyCtx, + serializeUpsert, + this.schema!, + type, + target, + obj, + this.modifyCtx.buf, + opts?.locale ? LangCode[opts.locale] : LangCode.none, + Modify.insert, + ) + } - const { id: _, ...props } = item - return this.create(type, props) + delete( + type: keyof S['types'] & string, + target: number | BasedModify, + ): BasedDeletePromise { + return new BasedModify( + this.modifyCtx, + serializeDelete, + this.schema!, + type, + target, + this.modifyCtx.buf, + ) } query( @@ -164,92 +229,6 @@ export class DbClient extends DbShared { return new BasedDbQuery(this, type, id as number | number[] | Uint32Array) } - update( - type: string, - id: number | Promise, - value: any, - opts?: ModifyOpts, - ): Promise - - update( - type: string, - value: Record & { id: number }, - opts?: ModifyOpts, - ): Promise - - update( - typeOrValue: string | any, - idOverwriteOrValue: - | number - | Promise - | boolean - | ModifyOpts - | (Record & { id: number }), - value?: any, - opts?: ModifyOpts, - ): Promise { - if (typeof typeOrValue !== 'string') { - return this.update( - '_root', - 1, - typeOrValue, - idOverwriteOrValue as ModifyOpts, - ) - } - if (typeof idOverwriteOrValue === 'object') { - if ( - 'then' in idOverwriteOrValue && - typeof idOverwriteOrValue.then === 'function' - ) { - // @ts-ignore - if (idOverwriteOrValue.id) { - // @ts-ignore - return this.update(typeOrValue, idOverwriteOrValue.id, value, opts) - } - return idOverwriteOrValue.then((id: number) => { - return this.update(typeOrValue, id, value, opts) - }) - } - if ('id' in idOverwriteOrValue) { - const { id, ...props } = idOverwriteOrValue - return this.update(typeOrValue, id, props, opts) - } - } - return update(this, typeOrValue, idOverwriteOrValue as number, value, opts) - } - - upsert(type: string, obj: Record, opts?: ModifyOpts) { - return upsert(this, type, obj, opts) - } - - insert(type: string, obj: Record, opts?: ModifyOpts) { - return insert(this, type, obj, opts) - } - - delete(type: string, id: number | Promise) { - if ( - typeof id === 'object' && - id !== null && - 'then' in id && - typeof id.then === 'function' - ) { - // @ts-ignore - if (id.id) { - // @ts-ignore - id = id.id - } else { - // @ts-ignore - return id.then((id) => this.delete(type, id)) - } - } - // @ts-ignore - return del(this, type, id) - } - - expire(type: string, id: number, seconds: number) { - return expire(this, type, id, seconds) - } - destroy() { this.stop() this.listeners = {} @@ -261,21 +240,30 @@ export class DbClient extends DbShared { onClose() } this.subs.clear() - cancel(this.modifyCtx, Error('Db stopped - in-flight modify cancelled')) + // cancel(this.modifyCtx, Error('Db stopped - in-flight modify cancelled')) } // For more advanced / internal usage - use isModified instead for most cases - async drain() { - if (this.upserting.size) { - await Promise.all(Array.from(this.upserting).map(([, { p }]) => p)) - } - await drain(this, this.modifyCtx) - const t = this.writeTime - this.writeTime = 0 - return t + drain() { + flush(this.modifyCtx) + return this.isModified() } - isModified() { - return schedule(this, this.modifyCtx) + async isModified() { + let lastModify + while (lastModify !== this.modifyCtx.lastModify) { + lastModify = this.modifyCtx.lastModify + await lastModify.catch(noop) + } } } + +export type DbClient = DbClientClass + +export const DbClient = DbClientClass as { + new ( + opts: DbClientOpts, + ): DbClient> +} + +function noop() {} diff --git a/src/db-client/modify/create.ts b/src/db-client/modify/create.ts new file mode 100644 index 0000000000..3c21aabef8 --- /dev/null +++ b/src/db-client/modify/create.ts @@ -0,0 +1,54 @@ +import type { SchemaOut } from '../../schema.js' +import type { AutoSizedUint8Array } from '../../utils/AutoSizedUint8Array.js' +import { + Modify, + pushModifyCreateHeader, + writeModifyCreateHeaderProps, + pushModifyCreateRingHeader, + writeModifyCreateRingHeaderProps, + type LangCodeEnum, +} from '../../zigTsExports.js' +import { execHooks, getTypeDef } from './index.js' +import { serializeProps } from './props.js' + +export const serializeCreate = ( + schema: SchemaOut, + type: string, + payload: Record, + buf: AutoSizedUint8Array, + lang: LangCodeEnum, +) => { + const typeDef = getTypeDef(schema, type) + if (typeDef.schema.capped) { + const index = pushModifyCreateRingHeader(buf, { + op: Modify.createRing, + type: typeDef.id, + maxNodeId: typeDef.schema.capped, + size: 0, + }) + const start = buf.length + serializeProps( + typeDef.tree, + execHooks(typeDef, payload, 'create'), + buf, + Modify.create, + lang, + ) + writeModifyCreateRingHeaderProps.size(buf.data, buf.length - start, index) + } else { + const index = pushModifyCreateHeader(buf, { + op: Modify.create, + type: typeDef.id, + size: 0, + }) + const start = buf.length + serializeProps( + typeDef.tree, + execHooks(typeDef, payload, 'create'), + buf, + Modify.create, + lang, + ) + writeModifyCreateHeaderProps.size(buf.data, buf.length - start, index) + } +} diff --git a/src/db-client/modify/delete.ts b/src/db-client/modify/delete.ts new file mode 100644 index 0000000000..2bb235dc26 --- /dev/null +++ b/src/db-client/modify/delete.ts @@ -0,0 +1,29 @@ +import type { SchemaOut } from '../../schema.js' +import type { AutoSizedUint8Array } from '../../utils/AutoSizedUint8Array.js' +import { Modify, pushModifyDeleteHeader } from '../../zigTsExports.js' +import { assignTarget, BasedModify, getTypeDef } from './index.js' + +export const serializeDelete = < + S extends SchemaOut = SchemaOut, + T extends keyof S['types'] & string = keyof S['types'] & string, +>( + schema: S, + type: T, + item: number | BasedModify, + buf: AutoSizedUint8Array, +) => { + const typeDef = getTypeDef(schema, type) + if (typeDef.schema.insertOnly) { + throw new Error('This type is insertOnly') + } + const header = assignTarget(item, { + op: Modify.delete, + type: typeDef.id, + }) + pushModifyDeleteHeader(buf, { + op: Modify.delete, + isTmp: header.isTmp, + id: header.id, + type: typeDef.id, + }) +} diff --git a/src/db-client/modify/index.ts b/src/db-client/modify/index.ts new file mode 100644 index 0000000000..a01b2bf3b1 --- /dev/null +++ b/src/db-client/modify/index.ts @@ -0,0 +1,259 @@ +import { SchemaOut } from '../../schema.js' +import { pushModifyHeader, writeModifyHeaderProps } from '../../zigTsExports.js' +import { AutoSizedUint8Array } from '../../utils/AutoSizedUint8Array.js' +import { getTypeDefs } from '../../schema/defs/getTypeDefs.js' +import { readUint32 } from '../../utils/uint8.js' +import type { serializeCreate } from './create.js' +import type { serializeUpdate } from './update.js' +import type { serializeDelete } from './delete.js' +import type { serializeUpsert } from './upsert.js' +import type { TypeDef } from '../../schema/defs/index.js' +import { getByPath, setByPath } from '../../utils/path.js' +export { getTypeDefs } + +export const execHooks = ( + typeDef: TypeDef, + payload: Record, + key: 'create' | 'update', +) => { + const typeHook = typeDef.schema.hooks?.[key] + if (typeHook) payload = typeHook(payload) ?? payload + for (const def of typeDef.propHooks[key]) { + const propHook = def.schema.hooks![key]! + const res = propHook(getByPath(payload, def.path), payload) + if (res !== undefined) setByPath(payload, def.path, res) + } + return payload +} + +export const getTypeDef = (schema: SchemaOut, type: string): TypeDef => { + const typeDef = getTypeDefs(schema).get(type) + if (!typeDef) { + throw new Error(`Type ${type} not found`) + } + return typeDef +} + +export const getRealId = (item: unknown) => { + if (typeof item === 'number') return item + if (item instanceof BasedModify) return item.id +} + +export const getTmpId = (item: unknown) => { + if (item instanceof BasedModify) return item.tmpId +} + +export const assignTarget = < + H extends Record & { id?: number; isTmp?: boolean }, +>( + item: unknown, + header: H, +): H & { id: number; isTmp: boolean } => { + const realId = getRealId(item) + const id = realId || getTmpId(item) + if (id === undefined) { + if (item instanceof BasedModify) { + throw item + } + throw new Error('Invalid id') + } + header.id = id + header.isTmp = !realId + return header as H & { id: number; isTmp: boolean } +} + +type ModifySerializer = + | typeof serializeCreate + | typeof serializeUpdate + | typeof serializeDelete + | typeof serializeUpsert + +type ModifyBatch = { + count: number + promises?: BasedModify[] + dependents?: BasedModify[] + result?: Uint8Array + flushed?: true +} + +export type ModifyCtx = { + buf: AutoSizedUint8Array + batch: ModifyBatch + flushTime: number + lastModify?: BasedModify + flushTimer?: NodeJS.Timeout | true | undefined + hooks: { + flushModify: (buf: Uint8Array) => Promise + } +} + +export const flush = (ctx: ModifyCtx) => { + if (ctx.buf.length === 0) return + const batch = ctx.batch + writeModifyHeaderProps.count(ctx.buf.data, batch.count, 0) + batch.flushed = true + ctx.hooks.flushModify(ctx.buf.view).then((result) => { + batch.result = result + const promises = batch.promises + const dependents = batch.dependents + if (dependents) { + batch.dependents = undefined + for (const item of dependents) { + item._exec.apply(item, item._arguments) + if (item._resolve) { + item._await() + } + } + } + if (promises) { + batch.promises = undefined + for (const item of promises) { + const id = item.id + const err = item.error + if (err) { + item._reject!(err) + } else { + item._resolve!(id!) + } + } + } + }) + + ctx.buf.length = 0 + ctx.batch = { count: 0 } +} + +const schedule = (ctx: ModifyCtx) => { + if (ctx.flushTimer) return + if (ctx.flushTime === 0) { + ctx.flushTimer = true + process.nextTick(() => { + ctx.flushTimer = undefined + flush(ctx) + }) + } else { + ctx.flushTimer = setTimeout(() => { + ctx.flushTimer = undefined + flush(ctx) + }, ctx.flushTime) + } +} + +export class BasedModify any = ModifySerializer> + implements Promise +{ + [Symbol.toStringTag]!: 'BasedModify' + constructor(ctx: ModifyCtx, serialize: S, ...args: Parameters) { + this._exec(ctx, serialize, ...args) + } + private _result() { + if (this._batch?.result) { + this._id = readUint32(this._batch.result, this._index! * 5) + const errCode = this._batch.result[this._index! * 5 + 4] + if (errCode) this._error = new Error('ModifyError: ' + errCode) + } + } + get id(): number | undefined { + this._result() + return this._id + } + get error(): Error | undefined { + this._result() + return this._error + } + get tmpId(): number | undefined { + if (this._batch && !this._batch.flushed) { + return this._index + } + } + get promise(): Promise { + this._promise ??= new Promise((resolve, reject) => { + if (this.id) { + resolve(this.id) + } else if (this.error) { + reject(this.error) + } else { + this._resolve = resolve + this._reject = reject + this._await() + } + }) + return this._promise + } + + private _id?: number + private _error?: Error + private _blocker?: BasedModify + private _index?: number + private _batch?: ModifyBatch + private _promise?: Promise + + _arguments?: IArguments + _resolve?: (value: number | PromiseLike) => void + _reject?: (reason?: any) => void + _await() { + if (this._batch) { + this._batch.promises ??= [] + this._batch.promises.push(this) + } + } + _exec(ctx: ModifyCtx, serialize: S, ...args: Parameters) { + const isEmpty = ctx.buf.length === 0 + if (isEmpty) { + pushModifyHeader(ctx.buf, { + opId: 0, // is filled on server + opType: 0, // is filled on server + schema: args[0].hash, + count: 0, + }) + } + const initialLength = ctx.buf.length + try { + ;(serialize as any)(...args) + } catch (e) { + ctx.buf.length = initialLength + if (e === AutoSizedUint8Array.ERR_OVERFLOW) { + if (isEmpty) throw new Error('Range error') + flush(ctx) + this._exec.apply(this, arguments) + return + } else if (e instanceof BasedModify) { + let blocker: BasedModify = e + while (blocker._blocker) blocker = blocker._blocker + blocker._batch!.dependents ??= [] + blocker._batch!.dependents.push(this) + this._blocker = blocker + this._arguments = arguments + return + } else if (this._arguments) { + // its in async mode + this._error = e + this._reject?.(e) + return + } else { + this._error = e + throw e + } + } + + schedule(ctx) + this._batch = ctx.batch + this._index = ctx.batch.count++ + ctx.lastModify = this + } + + then( + onfulfilled?: ((value: number) => Res1 | PromiseLike) | null, + onrejected?: ((reason: any) => Res2 | PromiseLike) | null, + ): Promise { + return this.promise.then(onfulfilled, onrejected) + } + catch( + onrejected?: ((reason: any) => Res | PromiseLike) | null, + ): Promise { + return this.promise.catch(onrejected) + } + finally(onfinally?: (() => void) | null): Promise { + return this.promise.finally(onfinally) + } +} diff --git a/src/db-client/modify/props.ts b/src/db-client/modify/props.ts new file mode 100644 index 0000000000..674d897681 --- /dev/null +++ b/src/db-client/modify/props.ts @@ -0,0 +1,77 @@ +import { + isPropDef, + type PropDef, + type PropTree, +} from '../../schema/defs/index.js' +import type { AutoSizedUint8Array } from '../../utils/AutoSizedUint8Array.js' +import { + Modify, + pushModifyMainHeader, + pushModifyPropHeader, + writeModifyPropHeaderProps, + type LangCodeEnum, + type ModifyEnum, +} from '../../zigTsExports.js' + +export const serializeProps = ( + tree: PropTree, + data: any, + buf: AutoSizedUint8Array, + op: ModifyEnum, + lang: LangCodeEnum, +) => { + if (op !== Modify.update) { + for (const key of tree.required) { + if (!(key in data)) { + const def = tree.props.get(key)! + throw new Error( + `Field ${'path' in def ? def.path.join('.') : key} is required`, + ) + } + } + } + for (const key in data) { + const def = tree.props.get(key) + if (def === undefined) { + continue + } + const val = data[key] + if (isPropDef(def)) { + const prop = def as PropDef + if (prop.id === 0) { + // main + const increment = typeof val === 'object' && val?.increment + pushModifyMainHeader(buf, { + id: 0, + start: prop.start, + type: prop.type, + size: prop.size, + increment: !!increment, + incrementPositive: increment > 0, + expire: ('expire' in prop.schema && prop.schema.expire) || false, + }) + if (val === null) { + buf.fill(0, buf.length, buf.length + prop.size) + } else { + prop.pushValue(buf, increment ? Math.abs(increment) : val, op, lang) + } + } else { + // separate + const index = pushModifyPropHeader(buf, prop) + if (val !== null) { + const start = buf.length + prop.pushValue(buf, val, op, lang) + writeModifyPropHeaderProps.size(buf.data, buf.length - start, index) + } + } + } else if (typeof val === 'object') { + if (val === null) { + const empty = {} + for (const [key] of def.props) empty[key] = null + serializeProps(def, empty, buf, op, lang) + } else { + serializeProps(def, val, buf, op, lang) + } + } + } +} diff --git a/src/db-client/modify/types.ts b/src/db-client/modify/types.ts index 43426e1df9..38ee81b4a9 100644 --- a/src/db-client/modify/types.ts +++ b/src/db-client/modify/types.ts @@ -1,35 +1,135 @@ -import { LangCode, ModOp } from '../../zigTsExports.js' - -export const RANGE_ERR = 1 -export const MOD_OPS_TO_STRING = { - [ModOp.createProp]: 'create', - [ModOp.updateProp]: 'update', - [ModOp.increment]: 'update', - [ModOp.expire]: 'update', -} as const - -export const enum SIZE { - DEFAULT_CURSOR = 11, +import type { BasedModify } from './index.js' +import type { TypedArray } from '../../schema/index.js' + +type NumInc = number | { increment: number } + +type TypeMap = { + string: string + number: NumInc + int8: NumInc + uint8: NumInc + int16: NumInc + uint16: NumInc + int32: NumInc + uint32: NumInc + boolean: boolean + text: string | Record + json: any + timestamp: NumInc | string | Date + binary: Uint8Array + alias: string + vector: TypedArray + colvec: TypedArray + cardinality: string | string[] +} + +type EdgeKeys = keyof T extends infer K + ? K extends string + ? string extends K + ? never + : K extends `$${string}` + ? K + : never + : never + : never + +type InferEdgeProps< + Prop, + Types, + Locales extends Record = Record, +> = { + [K in EdgeKeys]?: Prop[K] extends keyof TypeMap + ? TypeMap[Prop[K]] + : InferProp } -export type ModifyOpts = { - unsafe?: boolean - locale?: keyof typeof LangCode +type InferRefValue< + Prop, + Types, + Locales extends Record = Record, +> = + | number + | BasedModify + | (EdgeKeys extends never + ? { id: number | BasedModify } + : { id: number | BasedModify } & InferEdgeProps< + Prop, + Types, + Locales + >) + +type InferReferences< + Prop, + Types, + Locales extends Record = Record, +> = + | InferRefValue[] + | { + add?: Prettify>[] + update?: Prettify>[] + delete?: (number | BasedModify)[] + } + +type InferProp< + Prop, + Types, + Locales extends Record = Record, +> = Prop extends { type: 'text' } + ? string | Partial> + : Prop extends { type: 'object'; props: infer P } + ? InferType + : Prop extends { type: infer T extends keyof TypeMap } + ? TypeMap[T] + : Prop extends { enum: infer E extends readonly any[] } + ? E[number] + : Prop extends { ref: string } + ? Prettify> + : Prop extends { items: { ref: string } } + ? Prettify> + : never + +type Prettify = Target extends any + ? Target extends (infer U)[] + ? Prettify[] + : Target extends BasedModify + ? Target + : Target extends object + ? { + -readonly [K in keyof Target]: Target[K] + } + : Target + : never + +type InferType< + Props, + Types, + Locales extends Record = Record, +> = Prettify< + { + [K in keyof Props as Props[K] extends { required: true } + ? K + : never]: InferProp + } & { + [K in keyof Props as Props[K] extends { required: true } + ? never + : K]?: InferProp | null + } +> + +export type InferPayload< + S extends { types: any; locales?: any }, + T extends keyof S['types'], +> = InferType< + S['types'][T]['props'], + S['types'], + S['locales'] extends Record ? S['locales'] : {} +> + +type InferAliasProps = { + [K in keyof Props as Props[K] extends { type: 'alias' } ? K : never]?: string } -export const NOEDGE_NOINDEX_REALID = 0 -export const EDGE_NOINDEX_REALID = 1 -export const EDGE_INDEX_REALID = 2 -export const NOEDGE_INDEX_REALID = 3 -export const NOEDGE_NOINDEX_TMPID = 4 -export const EDGE_NOINDEX_TMPID = 5 -export const EDGE_INDEX_TMPID = 6 -export const NOEDGE_INDEX_TMPID = 7 - -// export const REF_OP_OVERWRITE = 0 -// export const REF_OP_UPDATE = 1 -// export const REF_OP_DELETE = 2 -// export const REF_OP_PUT_OVERWRITE = 3 -// export const REF_OP_PUT_ADD = 4 - -// export type RefOp = typeof REF_OP_OVERWRITE | typeof REF_OP_UPDATE +export type InferTarget< + S extends { types: any }, + T extends keyof S['types'], +> = InferAliasProps diff --git a/src/db-client/modify/update.ts b/src/db-client/modify/update.ts new file mode 100644 index 0000000000..47627aa6f3 --- /dev/null +++ b/src/db-client/modify/update.ts @@ -0,0 +1,36 @@ +import type { SchemaOut } from '../../schema.js' +import type { AutoSizedUint8Array } from '../../utils/AutoSizedUint8Array.js' +import { + Modify, + pushModifyUpdateHeader, + writeModifyUpdateHeaderProps, + type LangCodeEnum, +} from '../../zigTsExports.js' +import { assignTarget, BasedModify, execHooks, getTypeDef } from './index.js' +import { serializeProps } from './props.js' + +export const serializeUpdate = ( + schema: SchemaOut, + type: string, + item: number | BasedModify, + payload: Record, + buf: AutoSizedUint8Array, + lang: LangCodeEnum, +) => { + const typeDef = getTypeDef(schema, type) + const header = assignTarget(item, { + op: Modify.update, + type: typeDef.id, + size: 0, + }) + const index = pushModifyUpdateHeader(buf, header) + const start = buf.length + serializeProps( + typeDef.tree, + execHooks(typeDef, payload, 'update'), + buf, + Modify.update, + lang, + ) + writeModifyUpdateHeaderProps.size(buf.data, buf.length - start, index) +} diff --git a/src/db-client/modify/upsert.ts b/src/db-client/modify/upsert.ts new file mode 100644 index 0000000000..d6dfbea8b4 --- /dev/null +++ b/src/db-client/modify/upsert.ts @@ -0,0 +1,53 @@ +import { type SchemaOut } from '../../schema.js' +import type { AutoSizedUint8Array } from '../../utils/AutoSizedUint8Array.js' +import { + Modify, + pushModifyCreateHeader, + type LangCodeEnum, + writeModifyCreateHeaderProps, +} from '../../zigTsExports.js' +import { getTypeDef, execHooks } from './index.js' +import { serializeProps } from './props.js' +import type { InferPayload, InferTarget } from './types.js' + +export const serializeUpsert = < + S extends SchemaOut = SchemaOut, + T extends keyof S['types'] & string = keyof S['types'] & string, +>( + schema: S, + type: T, + target: InferTarget, + payload: InferPayload, + buf: AutoSizedUint8Array, + lang: LangCodeEnum, + op: typeof Modify.insert | typeof Modify.upsert, +) => { + const typeDef = getTypeDef(schema, type) + const index = pushModifyCreateHeader(buf, { + op, + type: typeDef.id, + size: 0, + }) + // serialize target + const startTarget = buf.length + // TODO validate that its only aliases + serializeProps(typeDef.tree, target, buf, Modify.create, lang) + writeModifyCreateHeaderProps.size(buf.data, buf.length - startTarget, index) + // serialize payload + const sizePos = buf.reserveUint32() + const startPayload = buf.length + serializeProps( + typeDef.tree, + serializeProps( + typeDef.tree, + execHooks(typeDef, payload, 'create'), + buf, + Modify.create, + lang, + ), + buf, + Modify.update, + lang, + ) + buf.writeUint32(buf.length - startPayload, sizePos) +} diff --git a/src/db-client/query/BasedDbQuery.ts b/src/db-client/query/BasedDbQuery.ts index 313cf169f0..53193d5e83 100644 --- a/src/db-client/query/BasedDbQuery.ts +++ b/src/db-client/query/BasedDbQuery.ts @@ -264,6 +264,7 @@ export class QueryBranch { groupBy(field: string, step?: StepInput): T { if (this.queryCommands) { + // query def this.queryCommands.push({ method: 'groupBy', args: [field, step], @@ -398,14 +399,14 @@ export class QueryBranch { return this } - harmonicMean(...fields: string[]): T { + hmean(...fields: string[]): T { if (fields.length === 0) { throw new Error('Empty harmonic mean function called') } if (this.queryCommands) { this.queryCommands.push({ - method: 'harmonicMean', + method: 'hmean', args: fields, }) } else { @@ -554,7 +555,7 @@ class GetPromise extends Promise { export class BasedDbQuery extends QueryBranch { skipValidation?: boolean = false target: QueryTarget - readSchema: ReaderSchema + readSchema!: ReaderSchema constructor( db: DbClient, type: string, diff --git a/src/db-client/query/aggregates/aggregates.ts b/src/db-client/query/aggregates/aggregates.ts index a63925eea0..cada904c02 100644 --- a/src/db-client/query/aggregates/aggregates.ts +++ b/src/db-client/query/aggregates/aggregates.ts @@ -30,3 +30,38 @@ export const isRootCountOnly = (def: QueryDef, filterSize: number) => { } return true } + +export const getTimeZoneOffsetInMinutes = ( + timeZone: string, + date: Date = new Date(), +): number => { + const formatter = new Intl.DateTimeFormat('en-US', { + timeZone, + year: 'numeric', + month: 'numeric', + day: 'numeric', + hour: 'numeric', + minute: 'numeric', + second: 'numeric', + hour12: false, + }) + + const parts = formatter.formatToParts(date) + const getPart = (partName: string) => + parseInt(parts.find((p) => p.type === partName)?.value || '0', 10) + + const targetTimeAsUTC = Date.UTC( + getPart('year'), + getPart('month') - 1, + getPart('day'), + getPart('hour'), + getPart('minute'), + getPart('second'), + ) + + const originalUTCTime = date.getTime() + const offsetInMilliseconds = targetTimeAsUTC - originalUTCTime + const offsetInMinutes = offsetInMilliseconds / (1000 * 60) + + return Math.round(offsetInMinutes) +} diff --git a/src/db-client/query/aggregates/aggregation_old.ts b/src/db-client/query/aggregates/aggregation_old.ts index 616c544a9d..ed83686489 100644 --- a/src/db-client/query/aggregates/aggregation_old.ts +++ b/src/db-client/query/aggregates/aggregation_old.ts @@ -19,65 +19,7 @@ import { AggFunction, type AggFunctionEnum, } from '../../../zigTsExports.js' - -// export const aggregateToBuffer = ( -// aggregates: QueryDefAggregation, -// ): Uint8Array => { -// const aggBuffer = new Uint8Array(aggregates.size) -// let i = 0 -// if (aggregates.groupBy) { -// aggBuffer[i] = GroupBy.HAS_GROUP -// i += 1 -// aggBuffer[i] = aggregates.groupBy.prop -// i += 1 -// aggBuffer[i] = aggregates.groupBy.typeIndex -// i += 1 -// writeUint16(aggBuffer, aggregates.groupBy.start, i) -// i += 2 -// writeUint16(aggBuffer, aggregates.groupBy.len, i) -// i += 2 -// aggBuffer[i] = aggregates.groupBy.stepType || 0 -// i += 1 -// writeUint32(aggBuffer, aggregates.groupBy.stepRange || 0, i) -// i += 4 -// writeInt16(aggBuffer, aggregates.groupBy.tz || 0, i) -// i += 2 -// } else { -// aggBuffer[i] = GroupBy.NONE -// i += 1 -// } -// writeUint16(aggBuffer, aggregates.totalResultsSize, i) -// i += 2 -// writeUint16(aggBuffer, aggregates.totalAccumulatorSize, i) -// i += 2 -// aggBuffer[i] = setMode[aggregates?.option?.mode!] || 0 -// i += 1 -// for (const [prop, aggregatesArray] of aggregates.aggregates.entries()) { -// aggBuffer[i] = prop -// i += 1 -// let sizeIndex = i -// let size = 0 -// i += 2 -// for (const agg of aggregatesArray) { -// let startI = i -// aggBuffer[i] = agg.type -// i += 1 -// aggBuffer[i] = agg.propDef.typeIndex -// i += 1 -// writeUint16(aggBuffer, agg.propDef.start!, i) -// i += 2 -// writeUint16(aggBuffer, agg.resultPos, i) -// i += 2 -// writeUint16(aggBuffer, agg.accumulatorPos, i) -// i += 2 -// aggBuffer[i] = agg.propDef.__isEdge ? 1 : 0 -// i += 1 -// size += i - startI -// } -// writeUint16(aggBuffer, size, sizeIndex) -// } -// return aggBuffer -// } +import { getTimeZoneOffsetInMinutes } from './aggregates.js' const ensureAggregate = (def: QueryDef) => { if (!def.aggregate) { @@ -315,70 +257,3 @@ export const addAggregate = ( } } } - -// export const isRootCountOnly = (def: QueryDef, filterSize: number) => { -// if (filterSize != 0) { -// return false -// } -// if (def.type !== QueryDefType.Root) { -// return false -// } -// const aggregate = def.aggregate! -// if (aggregate.groupBy) { -// return false -// } -// if (aggregate.aggregates.size !== 1) { -// return false -// } -// if (!aggregate.aggregates.has(255)) { -// return false -// } -// const aggs = aggregate.aggregates.get(255)! -// if (aggs.length !== 1) { -// return false -// } -// if (aggs[0].type !== AggFunction.count) { -// return false -// } - -// // later -// // if (def.filter && def.filter.size > 0) { -// return false -// // } -// // return true -// } - -function getTimeZoneOffsetInMinutes( - timeZone: string, - date: Date = new Date(), -): number { - const formatter = new Intl.DateTimeFormat('en-US', { - timeZone, - year: 'numeric', - month: 'numeric', - day: 'numeric', - hour: 'numeric', - minute: 'numeric', - second: 'numeric', - hour12: false, - }) - - const parts = formatter.formatToParts(date) - const getPart = (partName: string) => - parseInt(parts.find((p) => p.type === partName)?.value || '0', 10) - - const targetTimeAsUTC = Date.UTC( - getPart('year'), - getPart('month') - 1, - getPart('day'), - getPart('hour'), - getPart('minute'), - getPart('second'), - ) - - const originalUTCTime = date.getTime() - const offsetInMilliseconds = targetTimeAsUTC - originalUTCTime - const offsetInMinutes = offsetInMilliseconds / (1000 * 60) - - return Math.round(offsetInMinutes) -} diff --git a/src/db-client/query/aggregates/toByteCode.ts b/src/db-client/query/aggregates/toByteCode.ts index 50c8babbc8..0050d23802 100644 --- a/src/db-client/query/aggregates/toByteCode.ts +++ b/src/db-client/query/aggregates/toByteCode.ts @@ -46,7 +46,6 @@ export const aggregateToBuffer = (def: QueryDef): IntermediateByteCode => { accumulatorSize: def.aggregate.totalAccumulatorSize, isSamplingSet: (def.aggregate?.option?.mode || 'sample') === 'sample', } - let headerBuffer: Uint8Array if (def.type == QueryDefType.References) { diff --git a/src/db-client/query/display.ts b/src/db-client/query/display.ts index 547fb2281b..20582ecd42 100644 --- a/src/db-client/query/display.ts +++ b/src/db-client/query/display.ts @@ -125,7 +125,7 @@ export const prettyPrintVal = (v: any, type: PropTypeEnum): string => { } if (type === PropType.cardinality) { - return `${styleText('blue', v)} ${styleText('italic', styleText('dim', 'unique'))}` + return `${styleText('blue', `${v}`)} ${styleText('italic', styleText('dim', 'unique'))}` } if (type === PropType.timestamp) { diff --git a/src/db-client/query/filter/condition.ts b/src/db-client/query/filter/condition.ts index 13ac1cb831..02d75340a9 100644 --- a/src/db-client/query/filter/condition.ts +++ b/src/db-client/query/filter/condition.ts @@ -45,7 +45,8 @@ const getFilterOp = ( op: FilterOp write: ReturnType } => { - if ( + if (operator === 'includes' || operator === '!includes') { + } else if ( operator === '=' || operator === '<' || operator === '>' || diff --git a/src/db-client/query/filter/toByteCode.ts b/src/db-client/query/filter/toByteCode.ts index 9d0328e2fc..c6e6b088ac 100644 --- a/src/db-client/query/filter/toByteCode.ts +++ b/src/db-client/query/filter/toByteCode.ts @@ -1,4 +1,3 @@ -import { debugBuffer } from '../../../sdk.js' import { writeUint64 } from '../../../utils/uint8.js' import { FilterConditionByteSize, @@ -9,7 +8,6 @@ import { PropType, writeFilterSelect, } from '../../../zigTsExports.js' -import { combineIntermediateResults } from '../query.js' import { byteSize } from '../toByteCode/utils.js' import { IntermediateByteCode, QueryDefFilter } from '../types.js' import { conditionBuffer } from './condition.js' @@ -17,7 +15,7 @@ import { conditionBuffer } from './condition.js' const addConditions = ( result: IntermediateByteCode[], def: QueryDefFilter, - fromLastProp: number, // bit wrong for id... + fromLastProp: number, ) => { let lastProp = -1 const prevProp = def.conditions.get(fromLastProp) @@ -38,7 +36,6 @@ const getSelectOp = ( edgeTypeId: number, isMulti: boolean, ): FilterOpCompareEnum => { - // very different if (edgeTypeId != 0) { return isMulti ? FilterOpCompare.selectLargeRefs : FilterOpCompare.selectRef } @@ -134,21 +131,10 @@ export const filterToBuffer = ( ) const nextOrIndex = resultSize + condition.byteLength + fromIndex - // console.log('DERP', nextOrIndex) - writeUint64(condition, nextOrIndex, offset) result.unshift(condition) result.push(filterToBuffer(def.or, lastProp, nextOrIndex, false)) } - // if (top && result.length > 0) { - // console.dir(logger(def), { depth: 10 }) - // const totalByteLength = byteSize(result) - // const res = new Uint8Array(totalByteLength) - // const nResult = combineIntermediateResults(res, 0, result) - // console.log('FILTER!', totalByteLength) - // debugBuffer(res) - // } - return result } diff --git a/src/db-client/query/filter/types.ts b/src/db-client/query/filter/types.ts index e579f203f2..0d946669d4 100644 --- a/src/db-client/query/filter/types.ts +++ b/src/db-client/query/filter/types.ts @@ -17,9 +17,9 @@ export type Operator = | '<=' | '..' | '!..' - | 'exists' - | '!exists' - | 'like' - | '!like' + // | 'exists' + // | '!exists' + // | 'like' + // | '!like' | 'includes' | '!includes' diff --git a/src/db-client/query/include/props.ts b/src/db-client/query/include/props.ts index 8537f5dc7b..8579ccbf72 100644 --- a/src/db-client/query/include/props.ts +++ b/src/db-client/query/include/props.ts @@ -1,10 +1,5 @@ import type { PropDef, PropDefEdge } from '../../../schema/index.js' -import { - LangCode, - LangCodeEnum, - LangCodeInverse, - PropType, -} from '../../../zigTsExports.js' +import { PropType } from '../../../zigTsExports.js' import { IncludeField, IncludeOpts, QueryDef, QueryDefType } from '../types.js' export const getAll = ( diff --git a/src/db-client/query/include/toByteCode.ts b/src/db-client/query/include/toByteCode.ts index 28102c1a20..95490dc9d5 100644 --- a/src/db-client/query/include/toByteCode.ts +++ b/src/db-client/query/include/toByteCode.ts @@ -76,7 +76,7 @@ export const includeToBuffer = ( i, ) // This writes the actual address of the prop to be used on read - value[0] = m + value[0] = m // this is for the reader i += 4 m += propDef.len } @@ -92,6 +92,7 @@ export const includeToBuffer = ( } else { for (const [start, value] of def.include.main.include.entries()) { value[0] = start + // this is for reading } result.push( createIncludeHeader({ diff --git a/src/db-client/query/queryDefToReadSchema.ts b/src/db-client/query/queryDefToReadSchema.ts index 1bd47c8980..520f60d3b6 100644 --- a/src/db-client/query/queryDefToReadSchema.ts +++ b/src/db-client/query/queryDefToReadSchema.ts @@ -1,6 +1,15 @@ import { IncludeOpts, QueryDef, Target } from './types.js' -import { LangCode, LangCodeInverse, PropType } from '../../zigTsExports.js' -import { type PropDef, type PropDefEdge } from '../../schema/index.js' +import { + LangCode, + LangCodeInverse, + PropType, + VectorBaseType, +} from '../../zigTsExports.js' +import { + SchemaVector, + type PropDef, + type PropDefEdge, +} from '../../schema/index.js' import { ReaderMeta, ReaderSchemaEnum, @@ -34,7 +43,8 @@ const createReaderPropDef = ( readerPropDef.enum = p.enum } if (p.typeIndex === PropType.vector || p.typeIndex === PropType.colVec) { - readerPropDef.vectorBaseType = p.vectorBaseType + readerPropDef.vectorBaseType = + VectorBaseType[(p.schema as SchemaVector).baseType] readerPropDef.len = p.len } if (p.typeIndex === PropType.cardinality) { @@ -130,6 +140,7 @@ export const convertToReaderSchema = ( if (q.aggregate.groupBy.display) { a.groupBy.display = q.aggregate.groupBy.display } + // MV: Tto review if (q.aggregate.groupBy.enum) { a.groupBy.enum = q.aggregate.groupBy.enum } @@ -166,6 +177,7 @@ export const convertToReaderSchema = ( for (const [start, p, opts] of q.include.main.include.values()) { readerSchema.main.props[start] = createReaderPropDef(p, locales, opts) } + for (const [k, v] of q.references.entries()) { const target = v.target as Target const propDef = target.propDef! diff --git a/src/db-client/query/subscription/index.ts b/src/db-client/query/subscription/index.ts index 2161a77b72..ae7b31296e 100644 --- a/src/db-client/query/subscription/index.ts +++ b/src/db-client/query/subscription/index.ts @@ -5,8 +5,8 @@ import { registerSubscription } from './toByteCode.js' import { OnData, OnError, OnClose } from './types.js' export class SubStore { - listeners: Map - onClose: OnClose + listeners!: Map + onClose!: OnClose response?: BasedQueryResponse checksum?: number len?: number diff --git a/src/db-client/query/toByteCode/toByteCode.ts b/src/db-client/query/toByteCode/toByteCode.ts index 9b548e1c59..2248a6aa4a 100644 --- a/src/db-client/query/toByteCode/toByteCode.ts +++ b/src/db-client/query/toByteCode/toByteCode.ts @@ -181,7 +181,7 @@ export function defToBuffer( edgeTypeId, edgeSize, edgeFilterSize: 0, // this is nice - size: buffer.byteLength + includeSize, + size: buffer.byteLength + includeSize, // is this used? QueryHeaderByteSize + searchSize + sortSize + includeSize }, 0, ) @@ -232,12 +232,13 @@ export const queryToBuffer = (query: BasedDbQuery) => { const def = query.def! const bufs = defToBuffer(query.db, def) bufs.push(schemaChecksum(def)) - const queryIdSize = 4 + const queryIdSize = 4 // prob want 8 here... const totalByteLength = byteSize(bufs) + queryIdSize const res = new Uint8Array(totalByteLength) const queryIdTarget = new Uint8Array(4) bufs.unshift(queryIdTarget) combineIntermediateResults(res, 0, bufs) + // maybe make these ids 8 bytes seems to short... const queryId = crc32(res) writeUint32(res, queryId, 0) // debugBuffer(res) diff --git a/src/db-client/query2/index.ts b/src/db-client/query2/index.ts new file mode 100644 index 0000000000..5b18c97908 --- /dev/null +++ b/src/db-client/query2/index.ts @@ -0,0 +1,921 @@ +import type { FilterAst, QueryAst } from '../../db-query/ast/ast.js' +import type { + PickOutput, + ResolveInclude, + Path, + FilterOpts, + Operator, + ResolveDotPath, + InferPathType, + FilterEdges, + InferSchemaOutput, + NumberPaths, + ExpandDotPath, + UnionToIntersection, +} from './types.js' +import type { ResolvedProps } from '../../schema/index.js' +import { astToQueryCtx } from '../../db-query/ast/toCtx.js' +import { AutoSizedUint8Array } from '../../utils/AutoSizedUint8Array.js' +import type { DbClient } from '../../sdk.js' +import { $buffer, proxyResult } from './result.js' +import type { StepInput, aggFnOptions } from '../query/aggregates/types.js' +import { readUint32 } from '../../utils/uint8.js' + +class Query< + S extends { types: any; locales?: any } = { types: any }, + T extends keyof S['types'] = any, + K extends + | keyof ResolvedProps + | '*' + | '**' + | { field: any; select: any } + | string = '*', // Allow string for potential dot paths + IsSingle extends boolean = false, + SourceField extends string | number | symbol | undefined = undefined, + IsRoot extends boolean = false, + EdgeProps extends Record = {}, + Aggregate = {}, + GroupedKey extends string | undefined = undefined, +> { + constructor(ast: QueryAst) { + this.ast = ast + } + ast: QueryAst + + locale< + L extends string & + (S['locales'] extends Record ? keyof S['locales'] : string), + >( + locale: L, + ): NextBranch< + { types: S['types']; locales: L }, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate, + GroupedKey + > { + this.ast.locale = locale + return this as any + } + include< + F extends [ + ( + | 'id' + | (keyof (ResolvedProps & EdgeProps) & string) + | Path + | '*' + | '**' + | ((q: SelectFn) => AnyQuery) + ), + ...( + | 'id' + | (keyof (ResolvedProps & EdgeProps) & string) + | Path + | '*' + | '**' + | ((q: SelectFn) => AnyQuery) + )[], + ], + >( + ...props: F + ): NextBranch< + S, + T, + (K extends '*' ? never : K) | ResolveIncludeArgs, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate, + GroupedKey + > { + if (props.length === 0) { + throw new Error('Query: include expects at least one argument') + } + for (const prop of props as (string | Function)[]) { + if (typeof prop === 'function') { + prop((prop: string) => new Query(traverse(this.ast, prop))) + } else { + traverse(this.ast, prop).include = {} + } + } + return this as any + } + + filter( + fn: ( + filter: FilterFn, + ) => FilterBranch>, + ): FilterBranch + filter< + P extends keyof (ResolvedProps & EdgeProps) | Path, + >( + prop: P, + op: Operator, + val: InferPathType, + opts?: FilterOpts, + ): FilterBranch + filter(prop: any, op?: any, val?: any, opts?: any): FilterBranch { + this.#filterGroup ??= this.ast.filter ??= {} + return this.#addFilter(prop, op, val, opts, false) + } + + and( + fn: ( + filter: FilterFn, + ) => FilterBranch>, + ): FilterBranch + and

& EdgeProps) | Path>( + prop: P, + op: Operator, + val: InferPathType, + opts?: FilterOpts, + ): FilterBranch + and(prop: any, op?: any, val?: any, opts?: any): FilterBranch { + return this.filter(prop, op, val, opts) + } + + or( + fn: ( + filter: FilterFn, + ) => FilterBranch>, + ): FilterBranch + or

& EdgeProps) | Path>( + prop: P, + op: Operator, + val: InferPathType, + opts?: FilterOpts, + ): FilterBranch + or(prop: any, op?: any, val?: any, opts?: any): FilterBranch { + this.#filterGroup ??= this.ast.filter ??= {} + this.#filterGroup = this.#filterGroup.or ??= {} + return this.#addFilter(prop, op, val, opts, true) + } + + sum) => AnyQuery>( + fn: F, + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & ResolveAggregate, + GroupedKey + > + sum

>( + ...props: [P, ...P[]] + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & UnionToIntersection>, + GroupedKey + > + sum( + ...props: any[] + ): NextBranch { + if (typeof props[0] === 'function') { + const fn = props[0] + fn((prop: string) => new Query(traverse(this.ast, prop))) + return this as any + } + if (props.length === 0) { + throw new Error('Query: sum expects at least one argument') + } + this.ast.sum ??= { props: [] } + this.ast.sum.props.push(...(props as string[])) // Safe cast as P is string-like key + return this as any + } + + count(): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & { count: number }, + GroupedKey + > { + this.ast.count = {} + return this as any + } + + cardinality) => AnyQuery>( + fn: F, + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & ResolveAggregate, + GroupedKey + > + cardinality

( + ...props: [P, ...P[]] + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & UnionToIntersection>, + GroupedKey + > + cardinality( + ...props: any[] + ): NextBranch { + if (typeof props[0] === 'function') { + const fn = props[0] + fn((prop: string) => new Query(traverse(this.ast, prop))) + return this as any + } + if (props.length === 0) { + throw new Error('Query: cardinality expects at least one argument') + } + this.ast.cardinality ??= { props: [] } + this.ast.cardinality.props.push(...props) + return this as any + } + + avg) => AnyQuery>( + fn: F, + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & ResolveAggregate, + GroupedKey + > + avg

>( + ...props: [P, ...P[]] + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & UnionToIntersection>, + GroupedKey + > + avg( + ...props: any[] + ): NextBranch { + if (typeof props[0] === 'function') { + const fn = props[0] + fn((prop: string) => new Query(traverse(this.ast, prop))) + return this as any + } + if (props.length === 0) { + throw new Error('Query: avg expects at least one argument') + } + this.ast.avg ??= { props: [] } + this.ast.avg.props.push(...(props as string[])) + return this as any + } + + hmean) => AnyQuery>( + fn: F, + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & ResolveAggregate, + GroupedKey + > + hmean

>( + ...props: [P, ...P[]] + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & UnionToIntersection>, + GroupedKey + > + hmean( + ...props: any[] + ): NextBranch { + if (typeof props[0] === 'function') { + const fn = props[0] + fn((prop: string) => new Query(traverse(this.ast, prop))) + return this as any + } + if (props.length === 0) { + throw new Error('Query: hmean expects at least one argument') + } + this.ast.hmean ??= { props: [] } + this.ast.hmean.props.push(...(props as string[])) + return this as any + } + + max) => AnyQuery>( + fn: F, + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & ResolveAggregate, + GroupedKey + > + max

>( + ...props: [P, ...P[]] + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & + UnionToIntersection }>>, + GroupedKey + > + max( + ...props: any[] + ): NextBranch { + if (typeof props[0] === 'function') { + const fn = props[0] + fn((prop: string) => new Query(traverse(this.ast, prop))) + return this as any + } + if (props.length === 0) { + throw new Error('Query: max expects at least one argument') + } + this.ast.max ??= { props: [] } + this.ast.max.props.push(...(props as string[])) + return this as any + } + + min) => AnyQuery>( + fn: F, + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & ResolveAggregate, + GroupedKey + > + min

>( + ...props: [P, ...P[]] + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & + UnionToIntersection }>>, + GroupedKey + > + min( + ...props: any[] + ): NextBranch { + if (typeof props[0] === 'function') { + const fn = props[0] + fn((prop: string) => new Query(traverse(this.ast, prop))) + return this as any + } + if (props.length === 0) { + throw new Error('Query: min expects at least one argument') + } + this.ast.min ??= { props: [] } + this.ast.min.props.push(...(props as string[])) + return this as any + } + + stddev) => AnyQuery>( + fn: F, + opts?: aggFnOptions, + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & ResolveAggregate, + GroupedKey + > + stddev

>( + ...args: [...P[], aggFnOptions] | [P, ...P[]] + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & UnionToIntersection>, + GroupedKey + > + stddev( + ...args: any[] + ): NextBranch { + if (typeof args[0] === 'function') { + const fn = args[0] + fn((prop: string) => new Query(traverse(this.ast, prop))) + return this as any + } + if (args.length === 0) { + throw new Error('Query: stddev expects at least one argument') + } + this.ast.stddev ??= { props: [] } + let opts: any + let props: string[] + if ( + typeof args[args.length - 1] === 'object' && + !Array.isArray(args[args.length - 1]) + ) { + opts = args[args.length - 1] + props = args.slice(0, -1) + } else if (Array.isArray(args[0])) { + props = args[0] + opts = args[1] + } else { + props = args + } + this.ast.stddev.props.push(...props) + if (opts?.mode) { + this.ast.stddev.samplingMode = opts.mode + } + return this as any + } + + var) => AnyQuery>( + fn: F, + opts?: aggFnOptions, + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & ResolveAggregate, + GroupedKey + > + var

>( + ...args: [...P[], aggFnOptions] | [P, ...P[]] + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate & UnionToIntersection>, + GroupedKey + > + var(...args: any[]): NextBranch { + if (typeof args[0] === 'function') { + const fn = args[0] + fn((prop: string) => new Query(traverse(this.ast, prop))) + return this as any + } + if (args.length === 0) { + throw new Error('Query: var expects at least one argument') + } + this.ast.variance ??= { props: [] } + let opts: any + let props: string[] + if ( + typeof args[args.length - 1] === 'object' && + !Array.isArray(args[args.length - 1]) + ) { + opts = args[args.length - 1] + props = args.slice(0, -1) + } else if (Array.isArray(args[0])) { + props = args[0] + opts = args[1] + } else { + props = args + } + this.ast.variance.props.push(...props) + if (opts?.mode) { + this.ast.variance.samplingMode = opts.mode + } + return this as any + } + + sort

( + prop: P, + order?: 'asc' | 'desc', + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate, + GroupedKey + > { + this.ast.order = order || 'asc' + this.ast.sort = { prop } + return this as any + } + + order( + order: 'asc' | 'desc', + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate, + GroupedKey + > { + this.ast.order = order || 'asc' + return this as any + } + + range( + start: number, + end?: number, + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate, + GroupedKey + > { + const limit = end ? end - start : 1000 + this.ast.range = { start, end: limit } + return this as any + } + + groupBy

( + prop: P, + step?: StepInput, + ): NextBranch< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate, + P + > { + this.ast.groupBy = { prop } + if (step) { + if (typeof step === 'object') { + const s = step as any + if (s.step) this.ast.groupBy.step = s.step + if (s.timeZone) this.ast.groupBy.timeZone = s.timeZone + if (s.display) this.ast.groupBy.display = s.display + } else { + this.ast.groupBy.step = step + } + } + return this as any + } + + #filterGroup?: FilterAst + #addFilter( + prop: any, + op: any, + val: any, + opts: any, + isOr: boolean, + ): FilterBranch { + if (typeof prop === 'function') { + prop((...args) => { + const target = isOr + ? this.#filterGroup! + : (this.#filterGroup!.and ??= {}) + const branch = new Query(target) + branch.#filterGroup = target + ;(branch.filter as any)(...args) + return branch + }) + return this as any + } + + const target = traverse(this.#filterGroup, prop as string) + target.ops ??= [] + target.ops.push({ op, val }) + return this as any + } +} + +type FilterBranch = T + +type FilterMethods = { + and: T['filter'] + or: T['filter'] +} + +// This overload is for when the user provides NO schema argument, rely on generic default or explicit generic +export function query< + S extends { types: any; locales?: any } = { types: any }, + T extends keyof S['types'] & string = keyof S['types'] & string, +>(type: T): Query + +// This overload is for when the user provides NO schema argument + ID, rely on generic default or explicit generic +export function query< + S extends { types: any; locales?: any } = { types: any }, + T extends keyof S['types'] & string = keyof S['types'] & string, +>( + type: T, + id: number | Partial>, +): Query + +export function query< + S extends { types: any; locales?: any }, + T extends keyof S['types'] & string = keyof S['types'] & string, +>( + type: T, + target?: number | number[] | Partial>, +): Query { + const ast: any = { type } + if (target) ast.target = target + return new Query(ast) +} + +export class BasedQuery2< + S extends { types: any; locales?: any } = { types: any }, + T extends keyof S['types'] = any, + K extends + | keyof ResolvedProps + | '*' + | '**' + | { field: any; select: any } + | string = '*', + IsSingle extends boolean = false, + Aggregate = {}, + GroupedKey extends string | undefined = undefined, +> extends Query { + constructor( + db: DbClient, + type: T, + target?: number | number[] | Partial>, + ) { + super({}) + this.ast.type = type as string + if (target) this.ast.target = target + this.db = db + } + + testGroupedKey(): GroupedKey { + return null as any + } + testAggregate(): Aggregate { + return null as any + } + testIsSingle(): IsSingle { + return null as any + } + testK(): K { + return null as any + } + + db: DbClient + async get(): Promise< + [GroupedKey] extends [string] + ? Record + : [keyof Aggregate] extends [never] + ? IsSingle extends true + ? PickOutput< + S, + T, + ResolveInclude, K> + > | null + : PickOutput, K>>[] + : Aggregate + > { + if ( + !this.ast.props && + !this.ast.sum && + !this.ast.count && + !this.ast.avg && + !this.ast.hmean && + !this.ast.max && + !this.ast.min && + !this.ast.stddev && + !this.ast.variance && + !this.ast.cardinality + ) { + this.include('*') + } + if (!this.db.schema) { + await this.db.once('schema') + } + await this.db.isModified() + const ctx = astToQueryCtx( + this.db.schema!, + this.ast, + new AutoSizedUint8Array(), + ) + const result = await this.db.hooks.getQueryBuf(ctx.query) + + return proxyResult(result, ctx.readSchema) as any + } +} + +type FilterFn< + S extends { types: any; locales?: any }, + T extends keyof S['types'], + EdgeProps extends Record, +> = FilterSignature< + S, + T, + EdgeProps, + FilterBranch> +> + +type FilterSignature< + S extends { types: any; locales?: any }, + T extends keyof S['types'], + EdgeProps extends Record, + Result, +> = { + ( + fn: ( + filter: FilterFn, + ) => FilterBranch>, + ): Result +

& EdgeProps) | Path>( + prop: P, + op: Operator, + val: InferPathType, + opts?: FilterOpts, + ): Result +} + +type SelectFn< + S extends { types: any; locales?: any }, + T extends keyof S['types'], +> =

>( + field: P, +) => Query< + S, + ResolvedProps[P] extends { ref: infer R extends string } + ? R + : ResolvedProps[P] extends { + items: { ref: infer R extends string } + } + ? R + : never, + '*', + false, + P, + false, + FilterEdges[P]> & + (ResolvedProps[P] extends { items: infer Items } + ? FilterEdges + : {}) +> + +// ResolveIncludeArgs needs to stay here because it refers to Query +export type ResolveIncludeArgs = T extends ( + q: any, +) => Query< + infer S, + infer T, + infer K, + infer Single, + infer SourceField, + any, + any, + infer Aggregate, + infer GroupedKey +> + ? [GroupedKey] extends [string] + ? { + field: SourceField + select: { _aggregate: Record } + } + : [keyof Aggregate] extends [never] + ? { field: SourceField; select: K } + : { field: SourceField; select: { _aggregate: Aggregate } } + : T extends string + ? ResolveDotPath + : T + +// ResolveAggregate extracts the aggregate structure from a callback function +type ResolveAggregate = + ResolveIncludeArgs extends { + field: infer F extends string | number | symbol + select: { _aggregate: infer A } + } + ? { [K in F]: A } + : never + +// Helper type to simplify include signature +type AnyQuery = Query< + S, + any, + any, + any, + any, + any, + any, + any, + any +> + +// Helper type to simplify method return types +type NextBranch< + S extends { types: any; locales?: any }, + T extends keyof S['types'], + K extends + | keyof ResolvedProps + | '*' + | '**' + | { field: any; select: any } + | string, + IsSingle extends boolean, + SourceField extends string | number | symbol | undefined, + IsRoot extends boolean, + EdgeProps extends Record, + Aggregate, + GroupedKey extends string | undefined, +> = IsRoot extends true + ? BasedQuery2 + : Query< + S, + T, + K, + IsSingle, + SourceField, + IsRoot, + EdgeProps, + Aggregate, + GroupedKey + > + +function traverse(target: any, prop: string) { + const path = prop.split('.') + for (const key of path) { + if (key[0] === '$') { + target.edges ??= {} + target.edges.props ??= {} + target = target.edges.props[key] ??= {} + } else { + target.props ??= {} + target = target.props[key] ??= {} + } + } + return target +} + +export const checksum = (res: any): number => { + const buf = res?.[$buffer] + return buf ? readUint32(buf, buf.byteLength - 4) : 0 +} diff --git a/src/db-client/query2/result.ts b/src/db-client/query2/result.ts new file mode 100644 index 0000000000..f820bab212 --- /dev/null +++ b/src/db-client/query2/result.ts @@ -0,0 +1,92 @@ +import { + resultToObject, + type ReaderSchema, + ReaderSchemaEnum, +} from '../../protocol/index.js' +import { readUint32 } from '../../utils/uint8.js' + +export const $buffer = Symbol() +export const $schema = Symbol() +export const $result = Symbol() + +const define = (result: any) => { + if ('length' in result) { + result.__proto__ = Array.prototype + result.length = 0 + resultToObject( + result[$schema], + result[$buffer], + result[$buffer].byteLength - 4, + 0, + result, + ) + } else { + result.__proto__ = Object.prototype + Object.assign( + result, + resultToObject( + result[$schema], + result[$buffer], + result[$buffer].byteLength - 4, + 0, + ), + ) + } + + Object.defineProperty(result, $buffer, { enumerable: false }) + Object.defineProperty(result, $schema, { enumerable: false }) +} + +const handler: ProxyHandler = { + get(stub, prop) { + const result = stub[$result] + if (prop === $buffer || prop === $schema) { + return result[prop] + } + if (prop === 'then') { + // this can be improved!! + const schema: ReaderSchema = result[$schema] + if (schema.type !== ReaderSchemaEnum.single || !schema.props?.then) { + return undefined + } + } + + define(result) + return result[prop] + }, + ownKeys(stub) { + const result = stub[$result] + define(result) + return Reflect.ownKeys(result) + }, + getOwnPropertyDescriptor(stub, prop) { + const result = stub[$result] + define(result) + return Reflect.getOwnPropertyDescriptor(result, prop) + }, +} + +export const proxyResult = (buffer: Uint8Array, schema: ReaderSchema) => { + if ('aggregate' in schema) { + return resultToObject(schema, buffer, buffer.byteLength - 4) + } + const single = schema.type === ReaderSchemaEnum.single + const length = readUint32(buffer, 0) + if (length === 0) return single ? null : [] + let stub, result + if (single) { + stub = {} + result = {} + } else { + stub = [] + result = [] + result.length = length + } + const proxy = new Proxy(stub, handler) + result[$buffer] = buffer + result[$schema] = schema + stub[$result] = result + // @ts-ignore + result.__proto__ = proxy + return result +} diff --git a/src/db-client/query2/types.ts b/src/db-client/query2/types.ts new file mode 100644 index 0000000000..71c7839ed0 --- /dev/null +++ b/src/db-client/query2/types.ts @@ -0,0 +1,382 @@ +import type { ResolvedProps } from '../../schema/index.js' +import type { TypedArray } from '../../schema/index.js' + +export type InferSchemaOutput< + S extends { types: any; locales?: any }, + T extends keyof S['types'], +> = InferType< + ResolvedProps, + S['types'], + S['locales'] extends string | Record ? S['locales'] : {} +> & { id: number } + +type TypeMap = { + string: string + number: number + int8: number + uint8: number + int16: number + uint16: number + int32: number + uint32: number + boolean: boolean + text: string + json: any + timestamp: number + binary: Uint8Array + alias: string + vector: TypedArray + colvec: TypedArray + cardinality: number +} + +// Helper to check if Selection is provided (not never/any/unknown default behavior) +type IsSelected = [T] extends [never] ? false : true + +export type FilterEdges = { + [K in keyof T as K extends `$${string}` ? K : never]: T[K] +} + +// Utility to clean up intersection types +type Prettify = { + [K in keyof T]: T[K] +} & {} + +export type PickOutputFromProps< + S extends { types: any; locales?: any }, + Props, + K, +> = Prettify< + { + [P in Extract | 'id']: P extends 'id' + ? number + : P extends keyof Props + ? IsRefProp extends true + ? InferProp< + Props[P], + S['types'], + S['locales'] extends string | Record + ? S['locales'] + : {}, + '-*' + > + : InferProp< + Props[P], + S['types'], + S['locales'] extends string | Record + ? S['locales'] + : {} + > + : never + } & { + [Field in Extract['field'] & + keyof Props]: InferProp< + Props[Field], + S['types'], + S['locales'] extends string | Record ? S['locales'] : {}, + Extract['select'] + > + } +> + +export type InferProp< + Prop, + Types, + Locales extends string | Record = Record, + Selection = never, +> = + IsSelected extends false + ? InferPropLogic + : [Selection] extends [{ _aggregate: infer Agg }] + ? Agg + : InferPropLogic + +type InferPropLogic< + Prop, + Types, + Locales extends string | Record = Record, + Selection = never, +> = Prop extends { type: 'text' } + ? Locales extends string + ? string + : { [K in Exclude]-?: string } + : Prop extends { type: 'object'; props: infer P } + ? InferType + : Prop extends { type: infer T extends keyof TypeMap } + ? TypeMap[T] + : Prop extends { enum: infer E extends readonly any[] } + ? E[number] | undefined + : Prop extends { ref: infer R extends string } + ? IsSelected extends true + ? R extends keyof Types + ? PickOutputFromProps< + { types: Types; locales: Locales }, + ResolvedProps & FilterEdges, + ResolveInclude< + ResolvedProps & FilterEdges, + Selection + > + > | null + : never + : number // ID + : Prop extends { + items: { ref: infer R extends string } & infer Items + } + ? IsSelected extends true + ? R extends keyof Types + ? PickOutputFromProps< + { types: Types; locales: Locales }, + ResolvedProps & FilterEdges, + ResolveInclude< + ResolvedProps & FilterEdges, + Selection + > + >[] + : never + : number[] // IDs + : unknown + +type InferType< + Props, + Types, + Locales extends string | Record = Record, +> = { + [K in keyof Props]: InferProp +} + +// Helpers for include +type IsRefProp

= [P] extends [{ type: 'reference' } | { type: 'references' }] + ? true + : [P] extends [{ ref: any }] + ? true + : [P] extends [{ items: { ref: any } }] + ? true + : false + +export type NonRefKeys = { + [K in keyof Props]: IsRefProp extends true ? never : K +}[keyof Props] + +export type RefKeys = { + [K in keyof Props]: IsRefProp extends true ? K : never +}[keyof Props] + +export type NonRefNonEdgeKeys = { + [K in keyof Props]: IsRefProp extends true + ? never + : K extends `$${string}` + ? never + : K +}[keyof Props] + +export type ResolveInclude = K extends any + ? K extends '*' + ? NonRefKeys + : K extends '-*' + ? NonRefNonEdgeKeys + : K extends '**' + ? RefKeys + : K + : never + +export type IncludeSelection< + S extends { types: any; locales?: any }, + T extends keyof S['types'], + K, +> = ResolveInclude, K> + +export type PickOutput< + S extends { types: any; locales?: any }, + T extends keyof S['types'], + K, +> = Prettify< + { + [P in + | Extract> + | 'id']: P extends keyof ResolvedProps + ? IsRefProp[P]> extends true + ? InferProp< + ResolvedProps[P], + S['types'], + S['locales'] extends string | Record + ? S['locales'] + : {}, + '-*' + > + : InferSchemaOutput[P] + : InferSchemaOutput[P] + } & { + [Field in Extract['field'] & + keyof ResolvedProps]: InferProp< + ResolvedProps[Field], + S['types'], + S['locales'] extends string | Record ? S['locales'] : {}, + Extract['select'] + > + } +> + +export type FilterOpts = { + lowerCase?: boolean + fn?: + | 'dotProduct' + | 'manhattanDistance' + | 'cosineSimilarity' + | 'euclideanDistance' + score?: number +} + +export type Operator = + | '=' + | '<' + | '>' + | '!=' + | '>=' + | '<=' + | '..' + | '!..' + | 'exists' + | '!exists' + | 'like' + | '!like' + | 'includes' + | '!includes' + +type Prev = [never, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + +// Helper to generate paths from edges +export type EdgePaths< + S extends { types: any; locales?: any }, + Prop, + Depth extends number, +> = { + [K in keyof FilterEdges & string]: + | K + | (FilterEdges[K] extends { ref: infer R extends string } + ? `${K}.${Path | 'id' | '*' | '**'}` + : FilterEdges[K] extends { + items: { ref: infer R extends string } + } + ? `${K}.${Path | 'id' | '*' | '**'}` + : never) +}[keyof FilterEdges & string] + +type PropsPath< + S extends { types: any; locales?: any }, + Props, + Depth extends number, +> = [Depth] extends [never] + ? never + : { + [K in keyof Props & string]: + | K + | (Props[K] extends { ref: infer R extends string } + ? `${K}.${ + | Path + | EdgePaths + | 'id' + | '*' + | '**'}` + : Props[K] extends { props: infer P } + ? `${K}.${PropsPath}` + : Props[K] extends { type: 'text' } + ? S['locales'] extends string + ? never + : `${K}.${keyof (S['locales'] extends Record ? S['locales'] : Record) & string}` + : Props[K] extends { + items: { ref: infer R extends string } & infer Items + } + ? `${K}.${ + | Path + | EdgePaths + | 'id' + | '*' + | '**'}` + : never) + }[keyof Props & string] + +export type Path< + S extends { types: any; locales?: any }, + T extends keyof S['types'], + Depth extends number = 5, +> = PropsPath, Depth> + +export type ResolveDotPath = + T extends `${infer Head}.${infer Tail}` + ? { field: Head; select: ResolveDotPath } + : T + +type InferPropsPathType< + S extends { types: any; locales?: any }, + Props, + P, +> = P extends 'id' + ? number + : P extends keyof Props + ? InferProp< + Props[P], + S['types'], + S['locales'] extends Record ? S['locales'] : {} + > + : P extends `${infer Head}.${infer Tail}` + ? Head extends keyof Props + ? Props[Head] extends { ref: infer R extends string } + ? Tail extends keyof FilterEdges + ? InferProp< + Props[Head][Tail & keyof Props[Head]], + S['types'], + S['locales'] extends Record ? S['locales'] : {} + > + : InferPathType + : Props[Head] extends { props: infer NestedProps } + ? InferPropsPathType + : Props[Head] extends { type: 'text' } + ? S['locales'] extends string + ? never + : Tail extends keyof (S['locales'] extends Record + ? S['locales'] + : Record) + ? string + : never + : Props[Head] extends { + items: { ref: infer R extends string } & infer Items + } + ? Tail extends keyof FilterEdges + ? InferProp< + Items[Tail & keyof Items], + S['types'], + S['locales'] extends string | Record + ? S['locales'] + : {} + > + : InferPathType + : never + : never + : never + +export type InferPathType< + S extends { types: any; locales?: any }, + T extends keyof S['types'], + P, + EdgeProps extends Record = {}, +> = InferPropsPathType & EdgeProps, P> + +export type NumberPaths< + S extends { types: any; locales?: any }, + T extends keyof S['types'], +> = { + [K in Path]: InferPathType extends number ? K : never +}[Path] + +export type ExpandDotPath< + T extends string, + V, +> = T extends `${infer Head}.${infer Tail}` + ? { [K in Head]: ExpandDotPath } + : { [K in T]: V } + +export type UnionToIntersection = ( + U extends any ? (k: U) => void : never +) extends (k: infer I) => void + ? I + : never diff --git a/src/db-client/setLocalClientSchema.ts b/src/db-client/setLocalClientSchema.ts index f401af3c26..086afec767 100644 --- a/src/db-client/setLocalClientSchema.ts +++ b/src/db-client/setLocalClientSchema.ts @@ -1,6 +1,6 @@ import { DbClient } from '../index.js' -import { cancel } from './modify/drain.js' -import { Ctx, MODIFY_HEADER_SIZE } from './modify/Ctx.js' +// import { cancel } from './_modify/drain.js' +// import { Ctx, MODIFY_HEADER_SIZE } from './_modify/Ctx.js' import { updateTypeDefs, type SchemaOut } from '../schema/index.js' export const setLocalClientSchema = (client: DbClient, schema: SchemaOut) => { @@ -12,12 +12,13 @@ export const setLocalClientSchema = (client: DbClient, schema: SchemaOut) => { client.schemaTypesParsed = schemaTypesParsed client.schemaTypesParsedById = schemaTypesParsedById - if (client.modifyCtx.index > MODIFY_HEADER_SIZE) { - console.info('Modify cancelled - schema updated') - } + console.warn('TODO schema CHANGE') + // if (client.modifyCtx.index > MODIFY_HEADER_SIZE) { + // console.info('Modify cancelled - schema updated') + // } - cancel(client.modifyCtx, Error('Schema changed - in-flight modify cancelled')) - client.modifyCtx = new Ctx(schema.hash, client.modifyCtx.buf) + // cancel(client.modifyCtx, Error('Schema changed - in-flight modify cancelled')) + // client.modifyCtx = new Ctx(schema.hash, client.modifyCtx.buf) // resubscribe for (const [q, store] of client.subs) { diff --git a/src/db-client/string.ts b/src/db-client/string.ts deleted file mode 100644 index 69c1a943da..0000000000 --- a/src/db-client/string.ts +++ /dev/null @@ -1,58 +0,0 @@ -import native from '../native.js' -import { Ctx } from './modify/Ctx.js' -import { resize } from './modify/resize.js' -import { ENCODER, makeTmpBuffer, writeUint32 } from '../utils/uint8.js' -import { COMPRESSED, NOT_COMPRESSED } from '../protocol/index.js' -import { LangCode, LangCodeEnum } from '../zigTsExports.js' - -const { getUint8Array: getTmpBuffer } = makeTmpBuffer(4096) // the usual page size? - -export const write = ( - ctx: Ctx, - value: string, - offset: number, - lang: LangCodeEnum, - noCompression: boolean, -): number => { - const buf = ctx.buf - value = value.normalize('NFKD') - buf[offset] = lang - const { written: l } = ENCODER.encodeInto(value, buf.subarray(offset + 2)) - let crc = native.crc32(buf.subarray(offset + 2, offset + 2 + l)) - if (value.length > 200 && !noCompression) { - const insertPos = offset + 6 + l - const startPos = offset + 2 - const endPos = offset + 2 + l - const willEnd = insertPos + l - resize(ctx, willEnd) - buf.copyWithin(insertPos, startPos, endPos) - const size = native.compress(buf, offset + 6, l) - if (size === 0) { - resize(ctx, l + 6) - buf[offset + 1] = NOT_COMPRESSED - ENCODER.encodeInto(value, buf.subarray(offset + 2)) - writeUint32(buf, crc, offset + l + 2) - return l + 6 - } else { - resize(ctx, size + 10) - let len = l - buf[offset + 1] = COMPRESSED - writeUint32(buf, len, offset + 2) - writeUint32(buf, crc, offset + size + 6) - return size + 10 - } - } else { - buf[offset + 1] = NOT_COMPRESSED - writeUint32(buf, crc, offset + 2 + l) - return l + 6 - } -} - -export const stringCompress = (str: string): Uint8Array => { - const s = str.normalize('NFKD') - const tmpCompressBlock = getTmpBuffer(2 * native.stringByteLength(s) + 10) - const l = write({ buf: tmpCompressBlock } as Ctx, str, 0, LangCode.none, false) - const nBuffer = new Uint8Array(l) - nBuffer.set(tmpCompressBlock.subarray(0, l)) - return nBuffer -} diff --git a/src/db-query/ast/aggregates.ts b/src/db-query/ast/aggregates.ts new file mode 100644 index 0000000000..411b50073b --- /dev/null +++ b/src/db-query/ast/aggregates.ts @@ -0,0 +1,297 @@ +import { TypeDef, PropDef } from '../../schema/defs/index.js' +import { + QueryType, + QueryIteratorType, + AggFunction, + AggHeaderByteSize, + createAggHeader, + createAggProp, + createGroupByKeyProp, + GroupByKeyPropByteSize, + AggPropByteSize, + type QueryIteratorTypeEnum, + IntervalInverse, + PropType, +} from '../../zigTsExports.js' +import { Ctx, QueryAst } from './ast.js' +import { filter } from './filter/filter.js' +import { + aggregateTypeMap, + IntervalString, + Interval, +} from '../../db-client/query/aggregates/types.js' +import { readPropDef } from './readSchema.js' +import { getTimeZoneOffsetInMinutes } from '../../db-client/query/aggregates/aggregates.js' + +type Sizes = { result: number; accumulator: number } + +export const pushAggregatesQuery = ( + ast: QueryAst, + ctx: Ctx, + typeDef: TypeDef, +) => { + const headerStartPos = ctx.query.length + ctx.query.length += AggHeaderByteSize + ctx.readSchema.aggregate = { + aggregates: [], + totalResultsSize: 0, + } + + let filterSize = 0 + if (ast.filter) { + filterSize = filter(ast.filter, ctx, typeDef) + } + + let sizes: Sizes = { + result: 0, + accumulator: 0, + } + + const hasGroupBy = pushGroupBy(ast, ctx, typeDef, sizes) + + pushAggregates(ast, ctx, typeDef, sizes) + + const headerBuffer = buildAggregateHeader( + ast, + typeDef, + filterSize, + hasGroupBy, + sizes, + ) + ctx.query.data.set(headerBuffer, headerStartPos) +} + +const isRootCountOnly = (ast: QueryAst) => { + return !!( + ast.count && + !ast.sum && + !ast.avg && + !ast.min && + !ast.max && + !ast.stddev && + !ast.variance && + !ast.hmean && + !ast.cardinality && + !ast.filter && + !ast.groupBy + ) +} + +const buildAggregateHeader = ( + ast: QueryAst, + typeDef: TypeDef, + filterSize: number, + hasGroupBy: boolean, + sizes: Sizes, +) => { + const rangeStart = ast.range?.start || 0 + + const commonHeader = { + offset: rangeStart, + filterSize, + hasGroupBy, + resultsSize: sizes.result, + accumulatorSize: sizes.accumulator, + isSamplingSet: checkSamplingMode(ast), + } + + const isCountOnly = isRootCountOnly(ast) + const op = isCountOnly ? QueryType.aggregatesCount : QueryType.aggregates + + let headerBuffer: Uint8Array + + // TODO: references + + let iteratorType = QueryIteratorType.aggregate + if (hasGroupBy) iteratorType += 2 + if (filterSize > 0) iteratorType += 1 + + headerBuffer = createAggHeader({ + ...commonHeader, + op, + typeId: typeDef.id, + limit: (ast.range?.end || 1000) + rangeStart, + iteratorType: iteratorType as QueryIteratorTypeEnum, + }) + return headerBuffer +} + +const pushAggregates = ( + ast: QueryAst, + ctx: Ctx, + typeDef: TypeDef, + sizes: { result: number; accumulator: number }, +) => { + ctx.readSchema.aggregate = ctx.readSchema.aggregate || { + aggregates: [], + totalResultsSize: 0, + groupBy: undefined, + } + + for (const key in AggFunction) { + if (!(key in ast)) continue + + const data = ast[key] + if (!data) continue + + const fn = AggFunction[key] + + let props = Array.isArray(data.props) + ? data.props + : data.props + ? [data.props] + : [] + + let i = 0 + if (key === 'count' && props.length === 0) { + ctx.readSchema.aggregate?.aggregates.push({ + path: ['count'], + type: fn, + resultPos: sizes.result, + }) + props.push('count') + } + + for (const propName of props) { + let propDef: PropDef | any = typeDef.props.get(propName) + if (propName === 'count' && fn === AggFunction.count) { + propDef = { + id: 255, + path: [propName], + start: 0, + type: 1, + } + } + if (!propDef) { + throw new Error(`Aggregate property '${propName}' not found`) + } + + let resSize = 0 + let accSize = 0 + const specificSizes = aggregateTypeMap.get(fn) + if (specificSizes) { + resSize += specificSizes.resultsSize + accSize += specificSizes.accumulatorSize + } else { + resSize += 8 + accSize += 8 + } + + const buffer = createAggProp({ + propId: propDef.id, + propType: propDef.type || 0, + propDefStart: propDef.start || 0, + aggFunction: fn, + resultPos: sizes.result, + accumulatorPos: sizes.accumulator, + }) + ctx.readSchema.aggregate?.aggregates.push({ + path: propDef.path!, + type: fn, + resultPos: sizes.result, + }) + ctx.readSchema.main.props[i] = readPropDef(propDef, ctx.locales) + ctx.readSchema.main.len += propDef.size + i += propDef.size + + ctx.query.data.set(buffer, ctx.query.length) + ctx.query.length += AggPropByteSize + + sizes.result += resSize + sizes.accumulator += accSize + + ctx.readSchema.aggregate.totalResultsSize += resSize + } + } +} + +export const isAggregateAst = (ast: QueryAst) => { + return !!( + ast.groupBy || + ast.count || + ast.sum || + ast.avg || + ast.min || + ast.max || + ast.stddev || + ast.variance || + ast.hmean || + ast.cardinality + ) +} + +const checkSamplingMode = (ast: QueryAst): boolean => { + if ( + ast['stddev']?.samplingMode === 'population' || + ast['variance']?.samplingMode === 'population' + ) + return false + else return true +} + +const pushGroupBy = ( + ast: QueryAst, + ctx: Ctx, + typeDef: TypeDef, + sizes: Sizes, +): boolean => { + if (!ast.groupBy) return false + + const { prop: propName, step, timeZone, display } = ast.groupBy + const propDef = typeDef.props.get(propName) + + if (!propDef) { + throw new Error(`Group By property '${propName}' not found in AST.`) + // to put the equivalent to aggregationFieldDoesNotExist to handle the error + } + + const { stepType, stepRange } = step + ? parseStep(step) + : { stepType: 0, stepRange: 0 } + + const timeZoneOffset = timeZone ? getTimeZoneOffsetInMinutes(timeZone) : 0 + + const buffer = createGroupByKeyProp({ + propId: propDef.id, + propType: propDef.type || 0, + propDefStart: propDef.start || 0, + stepType, + stepRange, + timezone: timeZoneOffset, + }) + + let enumProxy + if (propDef.type === PropType.enum) { + // @ts-ignore + enumProxy = Object.values(propDef.enum) + } + + ctx.query.data.set(buffer, ctx.query.length) + ctx.query.length += GroupByKeyPropByteSize + + if (ctx.readSchema.aggregate) { + ctx.readSchema.aggregate.groupBy = { + typeIndex: propDef.type, + stepRange, + ...(stepType !== 0 && { stepType: IntervalInverse[stepType] }), + ...(display !== undefined && { display }), + ...(enumProxy !== undefined && { enum: enumProxy }), + } + } + + return true +} + +type Step = { stepType: number; stepRange: number } +const parseStep = (step: number | IntervalString): Step => { + let stepRange = 0 + let stepType = 0 + if (typeof step === 'string') { + const intervalEnumKey = step as IntervalString + stepType = Interval[intervalEnumKey] + } else { + // validateStepRange(def, step) // TODO: see/make the equivalent for def.errors + stepRange = step + } + return { stepType, stepRange } as Step +} diff --git a/src/db-query/ast/ast.ts b/src/db-query/ast/ast.ts new file mode 100644 index 0000000000..3f96d85164 --- /dev/null +++ b/src/db-query/ast/ast.ts @@ -0,0 +1,107 @@ +import type { IntervalString } from '../../db-client/query/aggregates/types.js' +import { ReaderLocales, ReaderSchema } from '../../protocol/index.js' +import { AutoSizedUint8Array } from '../../utils/AutoSizedUint8Array.js' + +export type FilterOpts = { + lowerCase?: boolean + fn?: + | 'dotProduct' + | 'manhattanDistance' + | 'cosineSimilarity' + | 'euclideanDistance' + score?: number +} + +export type Operator = + | '=' + | '<' + | '>' + | '!=' + | '>=' + | '<=' + | '..' + | '!..' + | 'exists' + | '!exists' + | 'like' + | '!like' + | 'includes' + | '!includes' + +export type FilterOp = { + op: Operator + val?: any + opts?: FilterOpts +} + +export type FilterLeaf = FilterAst & { + ops?: FilterOp[] + select?: { start: number; end: number } +} + +export const EdgeStrategy = { + noEdge: 0, + edgeOnly: 1, + mixed: 2, + edgeAndProps: 3, +} as const + +export type EdgeStrategyEnum = (typeof EdgeStrategy)[keyof typeof EdgeStrategy] + +export type FilterAst = { + props?: { + [key: string]: FilterLeaf + } + or?: FilterAst + and?: FilterAst + edges?: FilterAst + edgeStrategy?: EdgeStrategyEnum + + // this is a bit difficult combining OR filters with edges combined + // this would require and extra check for the type of node how to do? +} + +export type Include = { + // glob?: '*' | '**' // youri thinks we can just do these as props + meta?: true | 'only' | false + maxChars?: number + maxBytes?: number + raw?: boolean +} + +export type QueryAst = { + include?: Include + select?: { start: number; end: number } + locale?: string + range?: { start: number; end: number } + type?: string + target?: number | number[] | Record + filter?: FilterAst + order?: 'asc' | 'desc' + sort?: { prop: string } + props?: Record + edges?: QueryAst + // aggregate options + count?: { props?: string } + sum?: { props: string[] } + cardinality?: { props: string[] } + avg?: { props: string[] } + hmean?: { props: string[] } + max?: { props: string[] } + min?: { props: string[] } + stddev?: { props: string[]; samplingMode?: 'sample' | 'population' } + variance?: { props: string[]; samplingMode?: 'sample' | 'population' } + groupBy?: { + prop: string + step?: number | IntervalString + timeZone?: string + display?: Intl.DateTimeFormat + enum?: string[] + } +} + +export type Ctx = { + query: AutoSizedUint8Array + readSchema: ReaderSchema + locales: ReaderLocales +} diff --git a/src/db-query/ast/filter/comparison.ts b/src/db-query/ast/filter/comparison.ts new file mode 100644 index 0000000000..0d9e6fb5bd --- /dev/null +++ b/src/db-query/ast/filter/comparison.ts @@ -0,0 +1,136 @@ +import native from '../../../native.js' +import { PropDef } from '../../../schema/defs/index.js' +import { ENCODER } from '../../../utils/uint8.js' +import { + FilterConditionByteSize, + FilterConditionAlignOf, + writeFilterCondition, + PropTypeEnum, + FilterOpCompareEnum, + FilterOpCompare, +} from '../../../zigTsExports.js' +import { FilterOpts, Operator } from '../ast.js' +import { operatorToEnum } from './operatorToEnum.js' + +export const conditionByteSize = (propSize: number, size: number) => { + return size + FilterConditionByteSize + FilterConditionAlignOf + 1 + propSize +} + +// Make this configurable in the client +// has to be send from the server +const VECTOR_BYTES = 16 + +export const createCondition = ( + prop: { start: number; id: number; size: number; type: PropTypeEnum }, + op: FilterOpCompareEnum, + size: number = prop.size, +) => { + const conditionBuffer = new Uint8Array(conditionByteSize(prop.size, size)) + conditionBuffer[0] = 255 // Means condition header is not aligned + const offset = + writeFilterCondition( + conditionBuffer, + { + op: { + prop: prop.type, + compare: op, + }, + start: prop.start || 0, + prop: prop.id, + fieldSchema: 0, + len: prop.size, + offset: 255, // Means value is not aligned + size: size + prop.size, + }, + FilterConditionAlignOf + 1, + ) + prop.size + return { condition: conditionBuffer, offset } +} + +export const fixedComparison = ( + prop: PropDef, + operator: Operator, + val: any[], + opts?: FilterOpts, +) => { + const op = operatorToEnum(operator, val, prop) + + if (op === FilterOpCompare.eqBatch || op === FilterOpCompare.neqBatch) { + const size = val.length * prop.size + const empty = VECTOR_BYTES - (size % VECTOR_BYTES) + const rest = empty / prop.size + const { condition, offset } = createCondition(prop, op, size + empty) + let i = offset + for (const v of val) { + prop.write(condition, v, i) + i += prop.size + } + for (let j = 0; j < rest; j++) { + prop.write(condition, val[0], i) + i += prop.size + } + return condition + } + + if ( + op === FilterOpCompare.eqBatchSmall || + op === FilterOpCompare.neqBatchSmall + ) { + const vectorLen = VECTOR_BYTES / prop.size + const { condition, offset } = createCondition(prop, op, VECTOR_BYTES) + let i = offset + for (let j = 0; j < vectorLen; j++) { + prop.write(condition, j >= val.length ? val[0] : val[j], i) + i += prop.size + } + return condition + } + + if (op === FilterOpCompare.range || op === FilterOpCompare.nrange) { + const { condition, offset } = createCondition(prop, op, prop.size * 2) + prop.write(condition, val[0], offset) + prop.write(condition, val[1] - val[0], offset + prop.size) + return condition + } + + const { condition, offset } = createCondition(prop, op) + prop.write(condition, val[0], offset) + return condition +} + +export const variableComparison = ( + prop: PropDef, + operator: Operator, + val: any[], + opts?: FilterOpts, +) => { + const op = operatorToEnum(operator, val, prop) + + if (op === FilterOpCompare.inc || op === FilterOpCompare.ninc) { + if (val.length === 1) { + const size = native.stringByteLength(val[0]) + const { condition, offset } = createCondition(prop, op, size) + ENCODER.encodeInto(val[0], condition.subarray(offset)) + return condition + } + } + + throw new Error( + `Filter comparison not supported "${operator}" ${prop.path.join('.')}`, + ) +} + +export const comparison = ( + prop: PropDef, + op: Operator, + val: any, + opts?: FilterOpts, +) => { + if (!Array.isArray(val)) { + val = [val] + } + if (prop.size > 0) { + return fixedComparison(prop, op, val, opts) + } + return variableComparison(prop, op, val, opts) +} diff --git a/src/db-query/ast/filter/filter.ts b/src/db-query/ast/filter/filter.ts new file mode 100644 index 0000000000..468214ce66 --- /dev/null +++ b/src/db-query/ast/filter/filter.ts @@ -0,0 +1,210 @@ +import { + isPropDef, + PropDef, + PropTree, + TypeDef, +} from '../../../schema/defs/index.js' +import { writeUint64 } from '../../../utils/uint8.js' +import { + FilterConditionAlignOf, + FilterOpCompare, + PropType, + writeFilterConditionProps, +} from '../../../zigTsExports.js' +import { Ctx, FilterAst, FilterOp } from '../ast.js' +import { comparison, conditionByteSize, createCondition } from './comparison.js' + +type WalkCtx = { + tree: PropTree + prop: number + main: { prop: PropDef; ops: FilterOp[] }[] +} + +const walk = ( + ast: FilterAst, + ctx: Ctx, + typeDef: TypeDef, + walkCtx: WalkCtx, + edgeType?: TypeDef, +) => { + const { tree, main } = walkCtx + + for (const field in ast.props) { + const prop = tree.props.get(field) + const astProp = ast.props[field] + const ops = astProp.ops + if (isPropDef(prop)) { + if (prop.type === PropType.references) { + // references(astProp, ctx, prop) + } else if (prop.type === PropType.reference) { + // this can be added here + // need this again... + // reference(astProp, ctx, prop) + } else if (ops) { + if (prop.id === 0) { + main.push({ prop, ops }) + } else { + walkCtx.prop = prop.id + for (const op of ops) { + // can prob just push this directly + const condition = comparison(prop, op.op, op.val, op.opts) + ctx.query.set(condition, ctx.query.length) + } + } + } + } else { + if (prop) { + walk(astProp, ctx, typeDef, { + main, + tree: prop, + prop: walkCtx.prop, + }) + } else { + // if EN, if NL + throw new Error(`Prop does not exist ${field}`) + } + } + } + return walkCtx +} + +const MAX_INDEX = 11e9 - 1e9 + +const indexOf = ( + haystack: Uint8Array, + needle: Uint8Array, + offset: number, + end: number, +) => { + if (needle.length === 0) return 0 + for (let i = offset; i <= end - needle.length; i++) { + let found = true + for (let j = 0; j < needle.length; j++) { + if (haystack[i + j] !== needle[j]) { + found = false + break + } + } + if (found) return i + } + return -1 +} + +// filter + EDGE + +// EDGE ONLY? +export const filter = ( + ast: FilterAst, + ctx: Ctx, + typeDef: TypeDef, + filterIndex: number = 0, + lastProp: number = PropType.id, + edgeType?: TypeDef, + prevOr?: Uint8Array, +): number => { + const startIndex = ctx.query.length + + const walkCtx = { + main: [], + tree: typeDef.tree, + prop: lastProp, + } + + if (ast.or) { + ctx.query.reserve(conditionByteSize(8, 8)) + } + + let andOrReplace: Uint8Array | void = undefined + + const { main } = walk(ast, ctx, typeDef, walkCtx) + + for (const { prop, ops } of main) { + walkCtx.prop = prop.id + for (const op of ops) { + const condition = comparison(prop, op.op, op.val, op.opts) + ctx.query.set(condition, ctx.query.length) + } + } + + if (ast.and) { + if (ast.or) { + const { offset, condition } = createCondition( + { + id: PropType.id, + size: 8, + start: 0, + type: PropType.null, + }, + FilterOpCompare.nextOrIndex, + ) + writeUint64( + condition, + MAX_INDEX + Math.floor(Math.random() * 1e9), + offset, + ) + andOrReplace = condition + filter( + ast.and, + ctx, + typeDef, + ctx.query.length - startIndex, + walkCtx.prop, + edgeType, + andOrReplace, + ) + } else { + filter(ast.and, ctx, typeDef, ctx.query.length - startIndex, walkCtx.prop) + } + } + + if (ast.or) { + const resultSize = ctx.query.length - startIndex + const nextOrIndex = resultSize + filterIndex + + const { offset, condition } = createCondition( + { id: lastProp, size: 8, start: 0, type: PropType.null }, + FilterOpCompare.nextOrIndex, + ) + + writeUint64(condition, nextOrIndex, offset) + ctx.query.set(condition, startIndex) + + if (prevOr) { + if (ast.or.or) { + } else { + ctx.query.set(prevOr, ctx.query.length) + prevOr = undefined + } + } + + if (andOrReplace) { + // REMOVE THIS! FIX + let index = indexOf( + ctx.query.data, + andOrReplace, + startIndex, + ctx.query.length, + ) + if (index === -1) { + throw new Error('Cannot find AND OR REPLACE INDEX') + } + writeUint64(ctx.query.data, nextOrIndex, offset + index) + writeFilterConditionProps.prop( + ctx.query.data, + walkCtx.prop, + index + FilterConditionAlignOf + 1, + ) + } + filter( + ast.or, + ctx, + typeDef, + ctx.query.length - startIndex + filterIndex, + walkCtx.prop, + edgeType, + prevOr, + ) + } + + return ctx.query.length - startIndex +} diff --git a/src/db-query/ast/filter/operatorToEnum.ts b/src/db-query/ast/filter/operatorToEnum.ts new file mode 100644 index 0000000000..2dae54c5d3 --- /dev/null +++ b/src/db-query/ast/filter/operatorToEnum.ts @@ -0,0 +1,62 @@ +import { FilterOpCompareEnum, FilterOpCompare } from '../../../zigTsExports.js' +import { Operator } from '../ast.js' +import { PropDef } from '../../../schema/defs/index.js' + +export const operatorToEnum = ( + op: Operator, + val: any[], + prop: PropDef, +): FilterOpCompareEnum => { + const size = prop.size + const vectorLen = 16 / size + + if (op === '=' && val.length === 1) { + return FilterOpCompare.eq + } + + if (op === '=' && val.length > vectorLen) { + return FilterOpCompare.eqBatch + } + + if (op === '=' && val.length <= vectorLen) { + return FilterOpCompare.eqBatchSmall + } + + if (op === '!=' && val.length === 1) { + return FilterOpCompare.neq + } + + if (op === '!=' && val.length > vectorLen) { + return FilterOpCompare.neqBatch + } + + if (op === '!=' && val.length <= vectorLen) { + return FilterOpCompare.neqBatchSmall + } + + if (op === '>') { + return FilterOpCompare.gt + } + + if (op === '>=') { + return FilterOpCompare.ge + } + + if (op === '<') { + return FilterOpCompare.lt + } + + if (op === '<=') { + return FilterOpCompare.le + } + + if (op === 'includes') { + return FilterOpCompare.inc + } + + if (op === '!includes') { + return FilterOpCompare.ninc + } + + throw new Error(`Unsupported compare operator ${op}`) +} diff --git a/src/db-query/ast/include.ts b/src/db-query/ast/include.ts new file mode 100644 index 0000000000..fc76d55939 --- /dev/null +++ b/src/db-query/ast/include.ts @@ -0,0 +1,146 @@ +import { + PropDef, + PropTree, + TypeDef, + isPropDef, +} from '../../schema/defs/index.js' +import { + IncludeOp, + MAIN_PROP, + PropType, + pushIncludeHeader, + pushIncludePartialHeader, + pushIncludePartialProp, +} from '../../zigTsExports.js' +import { Ctx, Include, QueryAst } from './ast.js' +import { references } from './multiple.js' +import { readPropDef } from './readSchema.js' +import { reference } from './single.js' + +type WalkCtx = { + tree: PropTree + main: { prop: PropDef; include: Include }[] +} + +const includeProp = (ctx: Ctx, prop: PropDef, include: Include) => { + pushIncludeHeader(ctx.query, { + op: IncludeOp.default, + prop: prop.id, + propType: prop.type, + }) + ctx.readSchema.props[prop.id] = readPropDef(prop, ctx.locales, include) +} + +const includeMainProps = ( + ctx: Ctx, + props: { prop: PropDef; include: Include }[], + typeDef: TypeDef, +) => { + props.sort((a, b) => + a.prop.start < b.prop.start ? -1 : a.prop.start === b.prop.start ? 0 : 1, + ) + + let i = 0 + for (const { include, prop } of props) { + ctx.readSchema.main.props[i] = readPropDef(prop, ctx.locales, include) + ctx.readSchema.main.len += prop.size + i += prop.size + } + if (props.length === typeDef.main.length) { + pushIncludeHeader(ctx.query, { + op: IncludeOp.default, + prop: 0, + propType: PropType.microBuffer, + }) + } else { + pushIncludePartialHeader(ctx.query, { + op: IncludeOp.partial, + prop: MAIN_PROP, + propType: PropType.microBuffer, + amount: props.length, + }) + for (const { prop, include } of props) { + pushIncludePartialProp(ctx.query, { + start: prop.start, + size: prop.size, + }) + } + } +} + +const walkProp = ( + astProp: QueryAst, + ctx: Ctx, + typeDef: TypeDef, + walkCtx: WalkCtx, + field: string, +) => { + const { main, tree } = walkCtx + const prop = tree.props.get(field) + const include = astProp.include + + if (isPropDef(prop)) { + if (prop.type === PropType.references) { + references(astProp, ctx, prop) + } else if (prop.type === PropType.reference) { + reference(astProp, ctx, prop) + } else if (include) { + if (prop.id === 0) { + main.push({ prop, include }) + } else { + includeProp(ctx, prop, include) + } + } + } else if (prop) { + walk(astProp, ctx, typeDef, { + main, + tree: prop, + }) + } else { + // if EN, if NL + throw new Error(`Prop does not exist ${field}`) + } +} + +const walk = (ast: QueryAst, ctx: Ctx, typeDef: TypeDef, walkCtx: WalkCtx) => { + if (ast.include) { + ast.props ??= {} + ast.props['*'] ??= {} + ast.props['*'].include ??= ast.include + } + // if ast.include.glob === '*' include all from schema + // youri thinks we can just set this as a field, simpler (also for nested things like bla.**.id) + // same for ast.include.glob === '**' + for (const field in ast.props) { + const astProp = ast.props[field] + if (field === 'id') { + continue + } + if (field === '*') { + for (const [field, prop] of walkCtx.tree.props) { + if (!('ref' in prop)) { + walkProp(astProp, ctx, typeDef, walkCtx, field) + } + } + } else if (field === '**') { + for (const [field, prop] of typeDef.tree.props) { + if ('ref' in prop) { + walkProp(astProp, ctx, typeDef, walkCtx, field) + } + } + } else { + walkProp(astProp, ctx, typeDef, walkCtx, field) + } + } + return walkCtx +} + +export const include = (ast: QueryAst, ctx: Ctx, typeDef: TypeDef): number => { + const startIndex = ctx.query.length + const { main } = walk(ast, ctx, typeDef, { + main: [], + tree: typeDef.tree, + }) + if (main.length) includeMainProps(ctx, main, typeDef) + return ctx.query.length - startIndex +} diff --git a/src/db-query/ast/iteratorType.ts b/src/db-query/ast/iteratorType.ts new file mode 100644 index 0000000000..c083506aa1 --- /dev/null +++ b/src/db-query/ast/iteratorType.ts @@ -0,0 +1,135 @@ +import { PropDef } from '../../schema.js' +import { + QUERY_ITERATOR_DEFAULT, + QUERY_ITERATOR_EDGE, + QUERY_ITERATOR_EDGE_INCLUDE, + QUERY_ITERATOR_SEARCH, + QUERY_ITERATOR_SEARCH_VEC, + QueryIteratorTypeEnum, + QueryHeader, + QueryIteratorType, + QueryIteratorTypeInverse, +} from '../../zigTsExports.js' +import { EdgeStrategy, QueryAst } from './ast.js' + +export const getIteratorType = ( + header: QueryHeader, + ast: QueryAst, +): QueryIteratorTypeEnum => { + const hasFilter: boolean = header.filterSize != 0 + const edge: boolean = header.edgeTypeId != 0 + const edgeInclude: boolean = header.edgeSize != 0 + const hasSort = header.sort + const isDesc = ast.order === 'desc' + // const hasSearch = false + // const isVector = false + + let base = QUERY_ITERATOR_DEFAULT + + if (edge && !edgeInclude) { + base = QUERY_ITERATOR_EDGE + } + + if (edgeInclude) { + base = QUERY_ITERATOR_EDGE_INCLUDE + } + + // console.log('EDGE TIME', edgeInclude, edge) + + // if (hasSearch && !isVector) { + // base = QUERY_ITERATOR_SEARCH + // } + + // if (hasSearch && isVector) { + // base = QUERY_ITERATOR_SEARCH_VEC + // } + + // if (hasSearch) { + // if (hasFilter) { + // base += 1 + // } + // } + + if (hasSort) { + if (hasFilter) { + if (isDesc) { + base += 7 + } else { + base += 3 + } + } else if (isDesc) { + base += 5 + } else { + base += 1 + } + } else if (hasFilter) { + if (isDesc) { + base += 6 + } else { + base += 2 + } + } else if (isDesc) { + base += 4 + } else { + base += 0 + } + + console.log( + QueryIteratorTypeInverse[base], + base, + QueryIteratorType.edgeFilterOnEdge, + ) + + if ( + ast.filter?.edgeStrategy === EdgeStrategy.edgeOnly || + ast.filter?.edgeStrategy === EdgeStrategy.edgeAndProps + ) { + if (ast.filter?.edgeStrategy === EdgeStrategy.edgeAndProps) { + if (header.edgeSize === 0) { + if (base === QueryIteratorType.edgeFilter) { + base = QueryIteratorType.edgeFilterAndFilterOnEdge + } else if (base === QueryIteratorType.edgeDescFilter) { + base = QueryIteratorType.edgeFilterAndFilterOnEdgeDesc + } else if (base === QueryIteratorType.edgeFilterSort) { + base = QueryIteratorType.edgeFilterAndFilterOnEdgeSort + } else if (base === QueryIteratorType.edgeDescFilterSort) { + base = QueryIteratorType.edgeFilterAndFilterOnEdgeSortDesc + } + } else { + if (base === QueryIteratorType.edgeIncludeFilter) { + base = QueryIteratorType.edgeIncludeFilterAndFilterOnEdge + } else if (base === QueryIteratorType.edgeIncludeDescFilter) { + base = QueryIteratorType.edgeIncludeFilterAndFilterOnEdgeDesc + } else if (base === QueryIteratorType.edgeIncludeFilterSort) { + base = QueryIteratorType.edgeIncludeFilterAndFilterOnEdgeSort + } else if (base === QueryIteratorType.edgeIncludeDescFilterSort) { + base = QueryIteratorType.edgeIncludeFilterAndFilterOnEdgeSortDesc + } + } + } else { + if (header.edgeSize === 0) { + if (base === QueryIteratorType.edge) { + base = QueryIteratorType.edgeFilterOnEdge + } else if (base === QueryIteratorType.edgeDesc) { + base = QueryIteratorType.edgeFilterOnEdgeDesc + } else if (base === QueryIteratorType.edgeSort) { + base = QueryIteratorType.edgeFilterOnEdgeSort + } else if (base === QueryIteratorType.edgeDescSort) { + base = QueryIteratorType.edgeFilterOnEdgeSortDesc + } + } else { + if (base === QueryIteratorType.edgeInclude) { + base = QueryIteratorType.edgeIncludeFilterOnEdge + } else if (base === QueryIteratorType.edgeIncludeDesc) { + base = QueryIteratorType.edgeIncludeFilterOnEdgeDesc + } else if (base === QueryIteratorType.edgeIncludeSort) { + base = QueryIteratorType.edgeIncludeFilterOnEdgeSort + } else if (base === QueryIteratorType.edgeIncludeDescSort) { + base = QueryIteratorType.edgeIncludeFilterOnEdgeSortDesc + } + } + } + } + + return base as QueryIteratorTypeEnum +} diff --git a/src/db-query/ast/multiple.ts b/src/db-query/ast/multiple.ts new file mode 100644 index 0000000000..e5659544e0 --- /dev/null +++ b/src/db-query/ast/multiple.ts @@ -0,0 +1,153 @@ +import { ReaderSchemaEnum } from '../../protocol/index.js' +import { PropDef, TypeDef } from '../../schema/defs/index.js' +import { + pushQueryHeader, + QueryType, + ID_PROP, + writeQueryHeaderProps as props, + QueryIteratorType, + readQueryHeader, + pushSortHeader, +} from '../../zigTsExports.js' +import { Ctx, EdgeStrategy, QueryAst } from './ast.js' +import { filter } from './filter/filter.js' +import { include } from './include.js' +import { getIteratorType } from './iteratorType.js' +import { readPropDef, readSchema } from './readSchema.js' +import { isAggregateAst, pushAggregatesQuery } from './aggregates.js' +import { sort } from './sort.js' + +export const defaultMultiple = (ast: QueryAst, ctx: Ctx, typeDef: TypeDef) => { + const rangeStart = ast.range?.start || 0 + + if (isAggregateAst(ast)) { + pushAggregatesQuery(ast, ctx, typeDef) + return + } + + const headerIndex = pushQueryHeader(ctx.query, { + op: QueryType.default, + prop: ID_PROP, + includeSize: 0, + typeId: typeDef.id, + offset: rangeStart, + limit: (ast.range?.end || 1000) + rangeStart, + sort: !!ast.sort, + filterSize: 0, + // Lets remove all this from the header and make specific ones + searchSize: 0, + iteratorType: QueryIteratorType.default, + edgeTypeId: 0, + edgeSize: 0, + edgeFilterSize: 0, + size: 0, + }) + + if (ast.sort) { + pushSortHeader(ctx.query, sort(ast, ctx, typeDef)) + } + + if (ast.filter) { + const filterSize = filter(ast.filter, ctx, typeDef) + props.filterSize(ctx.query.data, filterSize, headerIndex) + } + + props.includeSize(ctx.query.data, include(ast, ctx, typeDef), headerIndex) + + props.iteratorType( + ctx.query.data, + getIteratorType(readQueryHeader(ctx.query.data, headerIndex), ast), + headerIndex, + ) +} + +// ADD IDS + +export const references = (ast: QueryAst, ctx: Ctx, prop: PropDef) => { + const rangeStart = ast.range?.start || 0 + const headerIndex = pushQueryHeader(ctx.query, { + op: QueryType.references, + prop: prop.id, + includeSize: 0, + typeId: prop.ref!.id, + offset: rangeStart, + limit: (ast.range?.end || 100) + rangeStart, + sort: !!ast.sort, + filterSize: 0, + searchSize: 0, + iteratorType: QueryIteratorType.default, + edgeTypeId: prop.edges?.id ?? 0, + edgeSize: 0, + edgeFilterSize: 0, + size: 0, // this is only used for [IDS] handle this differently + }) + + const schema = readSchema() + ctx.readSchema.refs[prop.id] = { + schema, + prop: readPropDef(prop, ctx.locales, ast.include), + } + + if (ast.sort) { + pushSortHeader(ctx.query, sort(ast, ctx, prop.ref!, prop)) + } + + if ( + ast.filter && + (ast.filter.edgeStrategy == EdgeStrategy.noEdge || + ast.filter.edgeStrategy == EdgeStrategy.edgeAndProps) + ) { + const filterSize = filter(ast.filter, ctx, prop.ref!) + props.filterSize(ctx.query.data, filterSize, headerIndex) + } + + if ( + ast.filter && + (ast.filter.edgeStrategy == EdgeStrategy.edgeOnly || + ast.filter.edgeStrategy == EdgeStrategy.edgeAndProps) && + ast.filter.edges + ) { + const edges = prop.edges + if (!edges) { + throw new Error('Ref does not have edges (for filter)') + } + props.edgeTypeId(ctx.query.data, edges.id, headerIndex) + const filterSize = filter(ast.filter.edges, ctx, prop.edges!) + props.edgeFilterSize(ctx.query.data, filterSize, headerIndex) + } + + const size = include( + ast, + { + ...ctx, + readSchema: schema, + }, + prop.ref!, + ) + + props.includeSize(ctx.query.data, size, headerIndex) + + if (ast.edges) { + const edges = prop.edges + if (!edges) { + throw new Error('Ref does not have edges') + } + schema.edges = readSchema(ReaderSchemaEnum.edge) + props.edgeTypeId(ctx.query.data, edges.id, headerIndex) + const size = include( + ast.edges, + { + ...ctx, + readSchema: schema.edges, + }, + edges, + ) + props.edgeSize(ctx.query.data, size, headerIndex) + } + + props.iteratorType( + ctx.query.data, + getIteratorType(readQueryHeader(ctx.query.data, headerIndex), ast), + headerIndex, + ) +} diff --git a/src/db-query/ast/readSchema.ts b/src/db-query/ast/readSchema.ts new file mode 100644 index 0000000000..83c620a9b2 --- /dev/null +++ b/src/db-query/ast/readSchema.ts @@ -0,0 +1,104 @@ +import { + ReaderLocales, + ReaderPropDef, + ReaderSchema, + ReaderSchemaEnum, +} from '../../protocol/index.js' +import { SchemaOut } from '../../schema.js' +import { PropDef } from '../../schema/defs/index.js' +import { LangCode, PropType, VectorBaseType } from '../../zigTsExports.js' +import { Include } from './ast.js' + +export const readSchema = (type?: ReaderSchemaEnum): ReaderSchema => { + return { + readId: 0, + props: {}, + search: false, + main: { len: 0, props: {} }, + refs: {}, + type: type ?? ReaderSchemaEnum.default, + } +} + +export const getReaderLocales = (schema: SchemaOut): ReaderLocales => { + const locales: ReaderLocales = {} + for (const lang in schema.locales) { + locales[LangCode[lang]] = lang + } + return locales +} + +export const readPropDef = ( + p: PropDef, + locales: ReaderLocales, //add in ctx + include?: Include, +): ReaderPropDef => { + const readerPropDef: ReaderPropDef = { + path: p.isEdge ? p.path.slice(1) : p.path, + typeIndex: include?.raw ? PropType.binary : p.type, + readBy: 0, + } + // if (opts?.meta) { + // if (opts?.codes?.size === 1 && opts.codes.has(opts.localeFromDef!)) { + // readerPropDef.meta = + // opts?.meta === 'only' + // ? ReaderMeta.onlyFallback + // : ReaderMeta.combinedFallback + // } else { + // readerPropDef.meta = + // opts?.meta === 'only' ? ReaderMeta.only : ReaderMeta.combined + // } + // } + if ('vals' in p) { + // @ts-ignore TODO make this nice + readerPropDef.enum = Array.from(p.vals.keys()) + } + + if (p.type === PropType.text) { + // @ts-ignore TODO make this nice + readerPropDef.locales = Object.keys(p.typeDef.schemaRoot.locales).reduce( + (map, lang: string) => { + map[LangCode[lang]] = lang + return map + }, + {}, + ) + } + if (p.type === PropType.vector || p.type === PropType.colVec) { + // TODO Do something so that this works without ignore + // @ts-ignore + readerPropDef.vectorBaseType = VectorBaseType[p.schema.baseType] + // @ts-ignore + readerPropDef.len = p.schema.size + } + // if (p.type === PropType.cardinality) { + // readerPropDef.cardinalityMode = p.cardinalityMode + // readerPropDef.cardinalityPrecision = p.cardinalityPrecision + // } + // if (p.type === PropType.text && opts?.codes) { + // if (opts.codes.has(0)) { + // readerPropDef.locales = locales + // } else { + // if (opts.codes.size === 1 && opts.codes.has(opts.localeFromDef!)) { + // if (readerPropDef.meta) { + // readerPropDef.locales = {} + // for (const code of opts.codes) { + // readerPropDef.locales[code] = LangCodeInverse[code] + // } + // if (opts.fallBacks) { + // for (const code of opts.fallBacks) { + // readerPropDef.locales[code] = LangCodeInverse[code] + // } + // } + // } + // // dont add locales - interpets it as a normal prop + // } else { + // readerPropDef.locales = {} + // for (const code of opts.codes) { + // readerPropDef.locales[code] = LangCodeInverse[code] + // } + // } + // } + // } + return readerPropDef +} diff --git a/src/db-query/ast/single.ts b/src/db-query/ast/single.ts new file mode 100644 index 0000000000..3523cc7a23 --- /dev/null +++ b/src/db-query/ast/single.ts @@ -0,0 +1,111 @@ +import { ReaderSchemaEnum } from '../../protocol/index.js' +import { PropDef, type TypeDef } from '../../schema/defs/index.js' +import { + pushQueryHeaderSingleReference, + QueryType, + writeQueryHeaderSingleReferenceProps as props, + pushQueryHeaderSingle, + writeQueryHeaderSingleProps, + PropType, + Modify, +} from '../../zigTsExports.js' +import { Ctx, QueryAst } from './ast.js' +import { include } from './include.js' +import { readPropDef, readSchema } from './readSchema.js' + +export const defaultSingle = (ast: QueryAst, ctx: Ctx, typeDef: TypeDef) => { + let id = 0 + let prop = 0 + let aliasProp: PropDef | undefined + let aliasValue + + // ADD FILTER AND ALIAS + + if (typeof ast.target === 'number') { + id = ast.target + } else if (typeof ast.target === 'object' && ast.target !== null) { + for (const key in ast.target) { + aliasProp = typeDef.props.get(key) + if (aliasProp?.type !== PropType.alias) { + throw new Error('invalid alias target') + } + prop = aliasProp.id + aliasValue = ast.target[key] + break + } + } else { + throw new Error('ast.target not supported (yet)') + } + + const headerIndex = pushQueryHeaderSingle(ctx.query, { + op: aliasProp ? QueryType.alias : QueryType.id, + includeSize: 0, + typeId: typeDef.id, + filterSize: 0, + aliasSize: 0, + id, + prop, + }) + + if (aliasProp) { + const start = ctx.query.length + aliasProp.pushValue(ctx.query, aliasValue, Modify.create) + writeQueryHeaderSingleProps.aliasSize( + ctx.query.data, + ctx.query.length - start, + headerIndex, + ) + } + + writeQueryHeaderSingleProps.includeSize( + ctx.query.data, + include(ast, ctx, typeDef), + headerIndex, + ) +} + +export const reference = (ast: QueryAst, ctx: Ctx, prop: PropDef) => { + const headerIndex = pushQueryHeaderSingleReference(ctx.query, { + op: QueryType.reference, + typeId: prop.ref!.id, + includeSize: 0, + edgeTypeId: 0, + edgeSize: 0, + prop: prop.id, + }) + + const schema = readSchema() + ctx.readSchema.refs[prop.id] = { + schema, + prop: readPropDef(prop, ctx.locales, ast.include), + } + const size = include( + ast, + { + ...ctx, + readSchema: schema, + }, + prop.ref!, + ) + props.includeSize(ctx.query.data, size, headerIndex) + + if (ast.edges) { + const edges = prop.edges + if (!edges) { + throw new Error('Ref does not have edges') + } + schema.edges = readSchema(ReaderSchemaEnum.edge) + props.op(ctx.query.data, QueryType.referenceEdge, headerIndex) + props.edgeTypeId(ctx.query.data, edges.id, headerIndex) + const size = include( + ast.edges, + { + ...ctx, + readSchema: schema.edges, + }, + edges, + ) + + props.edgeSize(ctx.query.data, size, headerIndex) + } +} diff --git a/src/db-query/ast/sort.ts b/src/db-query/ast/sort.ts new file mode 100644 index 0000000000..6133ef7a05 --- /dev/null +++ b/src/db-query/ast/sort.ts @@ -0,0 +1,42 @@ +import { PropDef, TypeDef } from '../../schema/defs/index.js' +import { LangCode, Order } from '../../zigTsExports.js' +import { Ctx, QueryAst } from './ast.js' + +export const sort = ( + ast: QueryAst, + ctx: Ctx, + typeDef: TypeDef, + fromProp?: PropDef, +) => { + const field = ast.sort!.prop + + if (field[0] === '$') { + const prop = fromProp?.edges?.props.get(field) + if (!prop) { + throw new Error(`Cannot find edge prop in sort ${ast.sort!.prop}`) + } + return { + prop: prop.id, + propType: prop?.type, + start: prop.start, + len: prop.size, + edgeType: fromProp?.edges?.id || 0, + order: ast.order === 'asc' ? Order.asc : Order.desc, + lang: LangCode.none, + } + } + + const prop = typeDef.props.get(field) + if (!prop) { + throw new Error(`Cannot find prop in sort ${ast.sort!.prop}`) + } + return { + prop: prop.id, + propType: prop?.type, + start: prop.start, + len: prop.size, + edgeType: 0, // do in a bit + order: ast.order === 'asc' ? Order.asc : Order.desc, + lang: LangCode.none, + } +} diff --git a/src/db-query/ast/toCtx.ts b/src/db-query/ast/toCtx.ts new file mode 100644 index 0000000000..35d7af7a2f --- /dev/null +++ b/src/db-query/ast/toCtx.ts @@ -0,0 +1,59 @@ +import { crc32 } from '../../db-client/crc32.js' +import { registerQuery } from '../../db-client/query/registerQuery.js' +import { ReaderSchema, ReaderSchemaEnum } from '../../protocol/index.js' +import { PropDef, SchemaOut } from '../../schema.js' +import { getTypeDefs } from '../../schema/defs/getTypeDefs.js' +import { TypeDef } from '../../schema/defs/index.js' +import { AutoSizedUint8Array } from '../../utils/AutoSizedUint8Array.js' +import { Ctx, QueryAst } from './ast.js' +import { defaultMultiple } from './multiple.js' +import { getReaderLocales, readSchema } from './readSchema.js' +import { defaultSingle } from './single.js' + +export const astToQueryCtx = ( + schema: SchemaOut, + ast: QueryAst, + query: AutoSizedUint8Array, + // sub: AutoSizedUint8Array, maybe we can just check the query for subs + // PREPARE +): { + query: Uint8Array + readSchema: ReaderSchema +} => { + query.length = 0 + + if (!ast.type) { + throw new Error('Query requires type') + } + + const typeDefs = getTypeDefs(schema) + const typeDef = typeDefs.get(ast.type) + + if (!typeDef) { + throw new Error('Type does not exist') + } + + const queryIdPos = query.reserveUint32() + + const ctx: Ctx = { + query, + readSchema: readSchema(), + locales: getReaderLocales(schema), + } + + if (ast.target) { + defaultSingle(ast, ctx, typeDef) + ctx.readSchema.type = ReaderSchemaEnum.single + } else { + defaultMultiple(ast, ctx, typeDef) + } + + query.pushUint64(schema.hash) + query.writeUint32(crc32(query.view), queryIdPos) + + // can use same buf for sub + return { + query: query.view.slice(), + readSchema: ctx.readSchema, + } +} diff --git a/src/db-server/blocks.ts b/src/db-server/blocks.ts index b6149e5c51..7504807a67 100644 --- a/src/db-server/blocks.ts +++ b/src/db-server/blocks.ts @@ -66,12 +66,12 @@ export async function loadCommon( db.addOpOnceListener(OpType.loadCommon, id, (buf: Uint8Array) => { const err = readInt32(buf, 0) if (err) { - // TODO read errlog - const errMsg = `Load failed: ${native.selvaStrerror(err)}` const errLog = DECODER.decode(buf.subarray(4)) + const errMsg = `Load failed: ${native.selvaStrerror(err)}\n${errLog}` - console.log(errLog) - db.emit('error', errMsg) + if (!errMsg.includes('ERR_SELVA ENOENT')) { + db.emit('error', errMsg) + } reject(new Error(errMsg)) } else { resolve() diff --git a/src/db-server/index.ts b/src/db-server/index.ts index c55c27de5c..8fc0cafed4 100644 --- a/src/db-server/index.ts +++ b/src/db-server/index.ts @@ -1,17 +1,13 @@ import native from '../native.js' import { rm } from 'node:fs/promises' -import { start, StartOpts } from './start.js' +import { realStart, start, StartOpts } from './start.js' import { migrate } from './migrate/index.js' import { debugServer } from '../utils/debug.js' import { DbShared } from '../shared/DbBase.js' -import { - setNativeSchema, - setSchemaOnServer, - writeSchemaFile, -} from './schema.js' +import { writeSchemaFile } from './schema.js' import { save, SaveOpts } from './blocks.js' -import { OpType, OpTypeEnum, OpTypeInverse } from '../zigTsExports.js' +import { OpType, OpTypeEnum } from '../zigTsExports.js' import { MAX_ID, type SchemaMigrateFns, @@ -19,16 +15,18 @@ import { } from '../schema/index.js' import { readUint32, writeUint32 } from '../utils/uint8.js' +const EXPIRE_BUF = new Uint8Array([0, 0, 0, 0, OpType.expire]) + export class DbServer extends DbShared { dbCtxExternal: any // pointer to zig dbCtx - migrating: number + migrating!: number saveInProgress: boolean = false fileSystemPath: string activeReaders = 0 // processing queries or other DB reads modifyQueue: Map = new Map() queryQueue: Map = new Map() - stopped: boolean // = true does not work + stopped!: boolean // = true does not work saveIntervalInSeconds?: number saveInterval?: NodeJS.Timeout delayInMs?: number @@ -152,6 +150,7 @@ export class DbServer extends DbShared { if (onceListeners) { console.log('💤 Query already staged dont exec again', id) } else { + native.modify(EXPIRE_BUF, this.dbCtxExternal) native.query(buf, this.dbCtxExternal) } }) @@ -173,6 +172,7 @@ export class DbServer extends DbShared { if (qIdListeners) { console.log('💤 Subscription already staged dont exec again', id) } else { + native.modify(EXPIRE_BUF, this.dbCtxExternal) native.query(query, this.dbCtxExternal) } @@ -182,18 +182,19 @@ export class DbServer extends DbShared { // allow 10 ids for special listeners on mod thread modifyCnt = 10 - modify(payload: Uint8Array): Promise { + modify(payload: Uint8Array): Promise { this.modifyCnt++ if (this.modifyCnt > MAX_ID) { this.modifyCnt = 10 } const id = this.modifyCnt++ writeUint32(payload, id, 0) + payload[4] = OpType.modify return new Promise((resolve) => { native.modify(payload, this.dbCtxExternal) this.addOpOnceListener(OpType.modify, id, (v) => { - const resultLen = readUint32(v, 0) - resolve(v.subarray(4, resultLen)) + const end = readUint32(v, 0) + resolve(v.subarray(4, end)) }) }) } @@ -202,7 +203,7 @@ export class DbServer extends DbShared { schema: SchemaOut, transformFns?: SchemaMigrateFns, ): Promise { - if (this.stopped || !this.dbCtxExternal) { + if (this.stopped) { throw new Error('Db is stopped') } @@ -219,9 +220,10 @@ export class DbServer extends DbShared { await migrate(this, this.schema, schema, transformFns) return this.schema.hash } - - setSchemaOnServer(this, schema) - await setNativeSchema(this, schema) + if (this.dbCtxExternal) { + throw new Error('Db is already running') + } + realStart(this, schema) await writeSchemaFile(this, schema) process.nextTick(() => { diff --git a/src/db-server/migrate/index.ts b/src/db-server/migrate/index.ts index 329bab2c00..f6330c8db6 100644 --- a/src/db-server/migrate/index.ts +++ b/src/db-server/migrate/index.ts @@ -5,7 +5,6 @@ import native from '../../native.js' import { DbServer } from '../index.js' import { fileURLToPath } from 'url' import { - setNativeSchema, setSchemaOnServer, writeSchemaFile, } from '../schema.js' @@ -123,7 +122,7 @@ export const migrate = async ( } setSchemaOnServer(tmpDb.server, toSchema) - await setNativeSchema(tmpDb.server, toSchema) + //await setNativeSchema(tmpDb.server, toSchema) if (abort()) { await tmpDb.destroy() diff --git a/src/db-server/schema.ts b/src/db-server/schema.ts index 0d4ba632a7..5e8b532b8d 100644 --- a/src/db-server/schema.ts +++ b/src/db-server/schema.ts @@ -1,49 +1,22 @@ import { DbServer } from './index.js' import { join } from 'node:path' import { writeFile } from 'node:fs/promises' -import native, { idGenerator } from '../native.js' -import { schemaToSelvaBuffer } from './schemaSelvaBuffer.js' -import { readUint32, writeUint32 } from '../utils/index.js' -import { OpType } from '../zigTsExports.js' -import { serialize, updateTypeDefs, type SchemaOut } from '../schema/index.js' +import { + LangCode, + Modify, + PropTypeSelva, + pushSelvaSchemaHeader, + pushSelvaSchemaMicroBuffer, +} from '../zigTsExports.js' +import { + BLOCK_CAPACITY_DEFAULT, + serialize, + updateTypeDefs, + type SchemaOut, +} from '../schema/index.js' import { SCHEMA_FILE } from '../index.js' - -const schemaOpId = idGenerator() - -async function getSchemaIds(db: DbServer): Promise { - const id = schemaOpId.next().value - const msg = new Uint8Array(5) - - writeUint32(msg, id, 0) - msg[4] = OpType.getSchemaIds - - return new Promise((resolve) => { - db.addOpOnceListener(OpType.getSchemaIds, id, (buf: Uint8Array) => { - const ids = new Uint32Array(buf.length / Uint32Array.BYTES_PER_ELEMENT) - const tmp = new Uint8Array(ids.buffer) - tmp.set(buf) - resolve(ids) - }) - native.query(msg, db.dbCtxExternal) - }) -} - -function setSchemaIds(db: DbServer, ids: Uint32Array): Promise { - const id = schemaOpId.next().value - - const msg = new Uint8Array(5 + ids.byteLength) - - writeUint32(msg, id, 0) - msg[4] = OpType.setSchemaIds - msg.set(new Uint8Array(ids.buffer, ids.byteOffset), 5) - - return new Promise((resolve) => { - db.addOpOnceListener(OpType.setSchemaIds, id, () => { - resolve() - }) - native.modify(msg, db.dbCtxExternal) - }) -} +import { getTypeDefs, propIndexOffset } from '../schema/defs/getTypeDefs.js' +import { AutoSizedUint8Array } from '../utils/AutoSizedUint8Array.js' export const setSchemaOnServer = async ( server: DbServer, @@ -66,61 +39,58 @@ export const writeSchemaFile = async (server: DbServer, schema: SchemaOut) => { } } -export async function createSelvaType( - server: DbServer, - typeId: number, - schema: Uint8Array, -): Promise { - const msg = new Uint8Array(5 + schema.byteLength) +export const makeNativeSchema = (schema: SchemaOut): Uint8Array => { + const buf = new AutoSizedUint8Array() + const typeDefs = getTypeDefs(schema) - writeUint32(msg, typeId, 0) + for (const typeDef of typeDefs.values()) { + let nrFixedFields = 1 + let nrVirtualFields = 0 - msg[4] = OpType.createType - msg.set(schema, 5) - return new Promise((resolve, reject) => { - server.addOpOnceListener(OpType.createType, typeId, (buf: Uint8Array) => { - const err = readUint32(buf, 0) - if (err) { - const errMsg = `Create type ${typeId} failed: ${native.selvaStrerror(err)}` - server.emit('error', errMsg) - reject(new Error(errMsg)) - } else { - resolve() + buf.pushUint16(typeDef.id) + const typeLenIndex = buf.reserveUint32() + const startIndex = buf.length + + for (const prop of typeDef.separate) { + const offset = propIndexOffset(prop) + if (offset < 0) { + nrFixedFields++ + } else if (offset > 0) { + nrVirtualFields++ } - server.keepRefAliveTillThisPoint(msg) + } + + pushSelvaSchemaHeader(buf, { + blockCapacity: typeDef.schema.blockCapacity || BLOCK_CAPACITY_DEFAULT, + nrFields: 1 + typeDef.separate.length, + nrFixedFields, + nrVirtualFields, + sdbVersion: 8, }) - native.modify(msg, server.dbCtxExternal) - }) -} -/** - * Set schema used in native code. - * This function should be only called when a new schema is set to an empty DB - * instance. If a `common.sdb` file is loaded then calling this function isn't - * necessary because `common.sdb` already contains the required schema. - */ -export const setNativeSchema = async (server: DbServer, schema: SchemaOut) => { - const types = Object.keys(server.schemaTypesParsed) - const s = schemaToSelvaBuffer(server.schemaTypesParsed) - let maxTid = 0 + // handle main + const mainLen = typeDef.main.reduce((len, { size }) => len + size, 0) + pushSelvaSchemaMicroBuffer(buf, { + type: PropTypeSelva.microBuffer, + len: mainLen, + hasDefault: 1, + }) - await Promise.all( - s.map(async (ab, i) => { - const type = server.schemaTypesParsed[types[i]] - maxTid = Math.max(maxTid, type.id) - try { - await createSelvaType(server, type.id, new Uint8Array(ab)) - } catch (err) { - throw new Error( - `Cannot update schema on selva (native) ${type.type} ${err.message}`, - ) + for (const prop of typeDef.main) { + if ('default' in prop.schema && prop.schema.default) { + prop.pushValue(buf, prop.schema.default, Modify.create, LangCode.none) + } else { + buf.fill(0, buf.length, buf.length + prop.size) } - }), - ) + } - await setSchemaIds(server, new Uint32Array(maxTid)) + // handle separate + for (const prop of typeDef.separate) { + prop.pushSelvaSchema(buf) + } - if (server.fileSystemPath) { - server.save({ skipDirtyCheck: true }).catch(console.error) + buf.writeUint32(buf.length - startIndex, typeLenIndex) } + + return buf.view } diff --git a/src/db-server/schemaSelvaBuffer.ts b/src/db-server/schemaSelvaBuffer.ts deleted file mode 100644 index 74acc66e83..0000000000 --- a/src/db-server/schemaSelvaBuffer.ts +++ /dev/null @@ -1,228 +0,0 @@ -import { - writeUint16, - writeUint32, -} from '../utils/index.js' -import native from '../native.js' -import { LangCode, PropType, PropTypeEnum } from '../zigTsExports.js' -import { - EMPTY_MICRO_BUFFER, - VECTOR_BASE_TYPE_SIZE_MAP, - type PropDef, - type PropDefEdge, - type SchemaTypeDef, -} from '../schema/index.js' -import { write as writeString } from '../db-client/string.js' -import { fillEmptyMain } from '../schema/def/fillEmptyMain.js' -import {Ctx} from '../db-client/modify/Ctx.js' - -const selvaFieldType: Readonly> = { - NULL: 0, - MICRO_BUFFER: 1, - STRING: 2, - TEXT: 3, - REFERENCE: 4, - REFERENCES: 5, - ALIAS: 8, - ALIASES: 9, - COLVEC: 10, -} - -const selvaTypeMap = new Uint8Array(32) // 1.2x faster than JS array -selvaTypeMap[PropType.microBuffer] = selvaFieldType.MICRO_BUFFER -selvaTypeMap[PropType.vector] = selvaFieldType.MICRO_BUFFER -selvaTypeMap[PropType.binary] = selvaFieldType.STRING -selvaTypeMap[PropType.cardinality] = selvaFieldType.STRING -selvaTypeMap[PropType.json] = selvaFieldType.STRING -selvaTypeMap[PropType.string] = selvaFieldType.STRING -selvaTypeMap[PropType.text] = selvaFieldType.TEXT -selvaTypeMap[PropType.reference] = selvaFieldType.REFERENCE -selvaTypeMap[PropType.references] = selvaFieldType.REFERENCES -selvaTypeMap[PropType.alias] = selvaFieldType.ALIAS -selvaTypeMap[PropType.aliases] = selvaFieldType.ALIASES -selvaTypeMap[PropType.colVec] = selvaFieldType.COLVEC - -const EDGE_FIELD_CONSTRAINT_FLAG_DEPENDENT = 0x01 - -const supportedDefaults = new Set([ - PropType.binary, - PropType.string, - PropType.text, - PropType.vector, - PropType.json, // same as binary (Uint8Array) -]) -const STRING_EXTRA_MAX = 10 - -function blockCapacity(blockCapacity: number): Uint8Array { - const buf = new Uint8Array(Uint32Array.BYTES_PER_ELEMENT) - writeUint32(buf, blockCapacity, 0) - return buf -} - -function sepPropCount(props: Array): number { - return props.filter((prop) => prop.separate).length -} - -function makeEdgeConstraintFlags(prop: PropDef): number { - let flags = 0 - - flags |= prop.dependent ? EDGE_FIELD_CONSTRAINT_FLAG_DEPENDENT : 0x00 - - return flags -} - -const propDefBuffer = ( - schema: { [key: string]: SchemaTypeDef }, - prop: PropDef, -): number[] => { - const type = prop.typeIndex - const selvaType = selvaTypeMap[type] - - if (prop.len && (type === PropType.microBuffer || type === PropType.vector)) { - const buf = new Uint8Array(4) - - buf[0] = selvaType - writeUint16(buf, prop.len, 1) - if (prop.default) { - buf[3] = 1 // has default - return [...buf, ...prop.default] - } else { - buf[3] = 0 // has default - return [...buf] - } - } else if (prop.len && type === PropType.colVec) { - const buf = new Uint8Array(5) - - buf[0] = selvaType - const baseSize = VECTOR_BASE_TYPE_SIZE_MAP[prop.vectorBaseType!] - - writeUint16(buf, prop.len / baseSize, 1) // elements - writeUint16(buf, baseSize, 3) // element size - return [...buf] - } else if (type === PropType.reference || type === PropType.references) { - const buf = new Uint8Array(11) - const dstType: SchemaTypeDef = schema[prop.inverseTypeName!] - - buf[0] = selvaType // field type - buf[1] = makeEdgeConstraintFlags(prop) // flags - writeUint16(buf, dstType.id, 2) // dst_node_type - buf[4] = prop.inversePropNumber! // inverse_field - writeUint16(buf, prop.edgeNodeTypeId ?? 0, 5) // edge_node_type - writeUint32(buf, prop.referencesCapped ?? 0, 7) - - return [...buf] - } else if ( - type === PropType.string || - type === PropType.binary || - type === PropType.cardinality || - type === PropType.json - ) { - if (prop.default && supportedDefaults.has(type)) { - const defaultValue = typeof prop.default === 'string' - ? prop.default.normalize('NFKD') - : type === PropType.json - ? JSON.stringify(prop.default) - : prop.default - const defaultLen = defaultValue instanceof Uint8Array - ? defaultValue.byteLength - : 2 * native.stringByteLength(defaultValue) + STRING_EXTRA_MAX - let buf = new Uint8Array(6 + defaultLen) - - buf[0] = selvaType - buf[1] = prop.len < 50 ? prop.len : 0 - const l = (defaultValue instanceof Uint8Array) - ? (buf.set(defaultValue, 6), defaultLen) - : writeString({ buf } as Ctx, defaultValue, 6, LangCode.none, false) - if (l != buf.length) { - buf = buf.subarray(0, 6 + l) - } - writeUint32(buf, l, 2) // default len - - return [...buf] - } else { - const buf = new Uint8Array(6) - - buf[0] = selvaType - buf[1] = prop.len < 50 ? prop.len : 0 - writeUint32(buf, 0, 2) // no default - - return [...buf] - } - } else if (type === PropType.text) { - const fs: number[] = [selvaType, Object.keys(prop.default).length] - // [ type, nrDefaults, [len, default], [len, default]...] - - for (const langName in prop.default) { - const lang = LangCode[langName] - const value = prop.default[langName].normalize('NFKD') - const tmpLen = 4 + 2 * native.stringByteLength(value) + STRING_EXTRA_MAX - let buf = new Uint8Array(tmpLen) - - const l = writeString({ buf } as Ctx, value, 4, lang, false) - if (l != buf.length) { - buf = buf.subarray(0, 4 + l) - } - writeUint32(buf, l, 0) // length of the default - fs.push(...buf) - } - - return fs - } - return [selvaType] -} - -export function schemaToSelvaBuffer(schema: { - [key: string]: SchemaTypeDef -}): ArrayBuffer[] { - return Object.values(schema).map((t) => { - const props: PropDef[] = Object.values(t.props) - const rest: PropDef[] = [] - const nrFields = 1 + sepPropCount(props) - let nrFixedFields = 1 - let virtualFields = 0 - - if (nrFields >= 250) { - throw new Error('Too many fields') - } - - const mainLen = t.mainLen === 0 ? 1 : t.mainLen - const main = { - ...EMPTY_MICRO_BUFFER, - default: fillEmptyMain(props, mainLen), - len: mainLen, - } - - for (const f of props) { - if (!f.separate) { - continue - } - - if (f.default && supportedDefaults.has(f.typeIndex)) { - nrFixedFields++ - } else if ( - f.typeIndex === PropType.reference || - f.typeIndex === PropType.references - ) { - nrFixedFields++ - } else if ( - f.typeIndex === PropType.alias || - f.typeIndex === PropType.aliases || - f.typeIndex === PropType.colVec - ) { - // We assume that these are always the last props! - virtualFields++ - } - rest.push(f) - } - - rest.sort((a, b) => a.prop - b.prop) - return Uint8Array.from([ - ...blockCapacity(t.blockCapacity), // u32 blockCapacity - nrFields, // u8 nrFields - nrFixedFields, // u8 nrFixedFields - virtualFields, // u8 nrVirtualFields - 8, // u8 version (generally follows the sdb version) - ...propDefBuffer(schema, main), - ...rest.map((f) => propDefBuffer(schema, f)).flat(1), - ]).buffer - }) -} diff --git a/src/db-server/start.ts b/src/db-server/start.ts index 6cdbc5ae5e..3aab6a4f6e 100644 --- a/src/db-server/start.ts +++ b/src/db-server/start.ts @@ -5,14 +5,15 @@ import { rm, mkdir, readFile } from 'node:fs/promises' import { join } from 'node:path' import { loadCommon, } from './blocks.js' import { readUint32, wait } from '../utils/index.js' -import { setSchemaOnServer } from './schema.js' +import { setSchemaOnServer, makeNativeSchema } from './schema.js' import { OpTypeEnum, BridgeResponseEnum, BridgeResponse, } from '../zigTsExports.js' import { deSerialize } from '../schema/serialize.js' -import { SCHEMA_FILE, SCHEMA_FILE_DEPRECATED } from '../index.js' +import { SCHEMA_FILE } from '../index.js' +import { SchemaOut } from '../schema.js' export type StartOpts = { clean?: boolean @@ -71,16 +72,7 @@ const handleModifyResponse = (db: DbServer, arr: ArrayBuffer) => { } } -export async function start(db: DbServer, opts?: StartOpts) { - const path = db.fileSystemPath - const noop = () => {} - - if (opts?.clean) { - await rm(path, { recursive: true, force: true }).catch(noop) - } - - await mkdir(path, { recursive: true }).catch(noop) - +export async function realStart(db: DbServer, schema: SchemaOut) { let nrThreads: number nrThreads = ((nrThreads = availableParallelism()), nrThreads < 2 ? 2 : nrThreads - 1) @@ -94,37 +86,51 @@ export async function start(db: DbServer, opts?: StartOpts) { } else if (id === BridgeResponse.flushModify) { handleModifyResponse(db, buffer) } - }, db.fileSystemPath, nrThreads) + }, db.fileSystemPath, nrThreads, makeNativeSchema(schema)) - // Load the common dump try { + setSchemaOnServer(db, schema) await loadCommon(db) - - // Load schema - const schema = await readFile(join(path, SCHEMA_FILE)).catch(noop) - if (schema) { - const s = deSerialize(schema) - setSchemaOnServer(db, s) - } else { - const schemaJson = await readFile(join(path, SCHEMA_FILE_DEPRECATED)) - if (schemaJson) { - setSchemaOnServer(db, JSON.parse(schemaJson.toString())) - } - } } catch (e) { - console.error(e.message) + if (!e.message.includes('ERR_SELVA ENOENT')) { + throw e + } } - // use timeout - if (db.saveIntervalInSeconds && db.saveIntervalInSeconds > 0) { - db.saveInterval ??= setInterval(() => { - db.save() - }, db.saveIntervalInSeconds * 1e3) + if (db.fileSystemPath) { + db.save({ skipDirtyCheck: true }).catch(console.error) + + // use timeout + if (db.saveIntervalInSeconds && db.saveIntervalInSeconds > 0) { + db.saveInterval ??= setInterval(() => { + db.save() + }, db.saveIntervalInSeconds * 1e3) + } } if (db.schema) { db.emit('schema', db.schema) } +} + +export async function start(db: DbServer, opts?: StartOpts) { + const path = db.fileSystemPath + const noop = () => {} + + if (opts?.clean) { + await rm(path, { recursive: true, force: true }).catch(noop) + } + + await mkdir(path, { recursive: true }).catch(noop) + + try { + const schema = await readFile(join(path, SCHEMA_FILE)) + realStart(db, deSerialize(schema)) + } catch (e) { + if (e.code !== 'ENOENT') { + throw new Error('Schema read failed', { cause: e }) + } + } if (opts?.delayInMs) { db.delayInMs = opts.delayInMs diff --git a/src/functions/client.ts b/src/functions/client.ts index f7f7619980..d5587bb977 100644 --- a/src/functions/client.ts +++ b/src/functions/client.ts @@ -9,8 +9,8 @@ import type { DbClient } from '../db-client/index.js' export abstract class BasedFunctionClient { server: any - db: DbClient - dbs: Record + db!: DbClient + dbs!: Record abstract call(name: string, payload?: any, ctx?: Context): Promise diff --git a/src/index.ts b/src/index.ts index 0706436eb6..4d3fcfe130 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,12 +1,21 @@ -import { stringCompress } from './db-client/string.js' +// import { stringCompress } from './db-client/string.js' import { DbServer } from './db-server/index.js' import { DbClient } from './db-client/index.js' import { debugMode, debugServer } from './utils/debug.js' import { getDefaultHooks } from './db-client/hooks.js' import { Emitter } from './shared/Emitter.js' import wait from './utils/wait.js' -export { stringCompress } +// export { stringCompress } export { DbClient, DbServer } +export type { + BasedCreatePromise, + BasedUpdatePromise, + BasedDeletePromise, + BasedUpsertPromise, + BasedInsertPromise, + ModifyOpts, +} from './db-client/index.js' +export type { InferPayload, InferTarget } from './db-client/modify/types.js' export { xxHash64 } from './db-client/xxHash64.js' export { crc32 } from './db-client/crc32.js' export { default as createHash } from './db-server/dbHash.js' @@ -15,13 +24,14 @@ export * from './db-client/query/query.js' export * from './db-client/query/BasedDbQuery.js' export * from './db-client/query/BasedQueryResponse.js' export * from './db-client/hooks.js' +export { BasedModify } from './db-client/modify/index.js' -export const SCHEMA_FILE_DEPRECATED = 'schema.json' export const SCHEMA_FILE = 'schema.bin' export const COMMON_SDB_FILE = 'common.sdb' export type BasedDbOpts = { path: string + /** Minimum: 256 */ maxModifySize?: number debug?: boolean | 'server' | 'client' saveIntervalInSeconds?: number @@ -64,9 +74,9 @@ export class BasedDb extends Emitter { return this.client.create.apply(this.client, arguments) } - copy: DbClient['copy'] = function (this: BasedDb) { - return this.client.copy.apply(this.client, arguments) - } + // copy: DbClient['copy'] = function (this: BasedDb) { + // return this.client.copy.apply(this.client, arguments) + // } update: DbClient['update'] = function (this: BasedDb) { return this.client.update.apply(this.client, arguments) @@ -84,9 +94,9 @@ export class BasedDb extends Emitter { return this.client.delete.apply(this.client, arguments) } - expire: DbClient['expire'] = function (this: BasedDb) { - return this.client.expire.apply(this.client, arguments) - } + // expire: DbClient['expire'] = function (this: BasedDb) { + // return this.client.expire.apply(this.client, arguments) + // } query: DbClient['query'] = function (this: BasedDb) { return this.client.query.apply(this.client, arguments) @@ -135,7 +145,7 @@ export class BasedDb extends Emitter { await this.isModified() // Tmp fix: Gives node time to GC existing buffers else it can incorrectly re-asign to mem // Todo: clear all active queries, queues ETC - await wait(Math.max(this.client.flushTime + 10, 10)) + await wait(Math.max(this.client.modifyCtx.flushTime + 10, 10)) this.client.destroy() await this.server.destroy() } diff --git a/src/native.ts b/src/native.ts index 211ece8bf6..e8555a29bc 100644 --- a/src/native.ts +++ b/src/native.ts @@ -35,10 +35,10 @@ const native = { return db.modify(q, dbCtx) }, - start: (bridge: (id: number, payload: any) => void, fsPath: string, nrThreads: number) => { + start: (bridge: (id: number, payload: any) => void, fsPath: string, nrThreads: number, selvaSchema: Uint8Array) => { const fsPathBuf = new Uint8Array(db.stringByteLength(fsPath)) ENCODER.encodeInto(fsPath, fsPathBuf) - return db.start(bridge, fsPathBuf, nrThreads) + return db.start(bridge, fsPathBuf, nrThreads, selvaSchema) }, stop: (dbCtx: any) => { diff --git a/src/protocol/db-read/aggregate.ts b/src/protocol/db-read/aggregate.ts index 3e8f6475ba..16c83f7b23 100644 --- a/src/protocol/db-read/aggregate.ts +++ b/src/protocol/db-read/aggregate.ts @@ -140,8 +140,9 @@ const readAggValues = ( const val = readFn(result, baseOffset + agg.resultPos) - const pathSuffix = - agg.type === AggFunction.count ? [] : [AggFunctionInverse[agg.type]] + const typeName = AggFunctionInverse[agg.type] + + const pathSuffix = agg.type === AggFunction.count ? [] : [typeName] // MV: check for edgesagg.path[1][0] == '$` setByPath(targetObject, [...agg.path, ...pathSuffix], val) diff --git a/src/protocol/db-read/main.ts b/src/protocol/db-read/main.ts index 80969d38ef..237c39e1e6 100644 --- a/src/protocol/db-read/main.ts +++ b/src/protocol/db-read/main.ts @@ -20,6 +20,7 @@ const readMainValue = ( item: Item, ) => { const typeIndex = prop.typeIndex + if (typeIndex === PropType.timestamp) { addProp(prop, readInt64(result, i), item) } else if (typeIndex === PropType.number) { @@ -34,7 +35,10 @@ const readMainValue = ( } else { addProp(prop, prop.enum![result[i] - 1], item) } - } else if (typeIndex === PropType.string) { + } else if ( + typeIndex === PropType.string || + typeIndex === PropType.stringFixed + ) { const len = result[i] i++ const value = len === 0 ? '' : readUtf8(result, i, len) @@ -48,12 +52,15 @@ const readMainValue = ( } else { addProp(prop, value, item) } - } else if (typeIndex === PropType.json) { + } else if (typeIndex === PropType.json || typeIndex === PropType.jsonFixed) { const len = result[i] i++ const value = len === 0 ? null : global.JSON.parse(readUtf8(result, i, len)) addProp(prop, value, item) - } else if (typeIndex === PropType.binary) { + } else if ( + typeIndex === PropType.binary || + typeIndex === PropType.binaryFixed + ) { const len = result[i] i++ const value = len === 0 ? new Uint8Array(0) : result.subarray(i, i + len) diff --git a/src/protocol/db-read/prop.ts b/src/protocol/db-read/prop.ts index e26bfea113..385db3db71 100644 --- a/src/protocol/db-read/prop.ts +++ b/src/protocol/db-read/prop.ts @@ -4,6 +4,7 @@ import { addProp, addLangProp } from './addProps.js' import { readString } from './string.js' import { readVector } from './vector.js' import { PropType } from '../../zigTsExports.js' +import { VECTOR_BASE_TYPE_SIZE_MAP } from '../../schema.js' const readStringProp = ( prop: ReaderPropDef, @@ -83,9 +84,10 @@ export const readProp = ( prop.typeIndex === PropType.vector || prop.typeIndex === PropType.colVec ) { - const tmp = result.slice(i, i + prop.len!) // maybe align? + const vecSize = prop.len! * VECTOR_BASE_TYPE_SIZE_MAP[prop.vectorBaseType!] + const tmp = result.slice(i, i + vecSize) // maybe align? addProp(prop, readVector(prop, tmp), item) - i += prop.len! + i += vecSize } return i } diff --git a/src/protocol/db-read/read.ts b/src/protocol/db-read/read.ts index df69f40204..1c995f0067 100644 --- a/src/protocol/db-read/read.ts +++ b/src/protocol/db-read/read.ts @@ -18,6 +18,7 @@ import { PropType, readIncludeResponseMeta, ReadOp, + ReadOpInverse, } from '../../zigTsExports.js' export * from './types.js' @@ -109,7 +110,9 @@ const references: ReadInstruction = (q, result, i, item) => { } const edge: ReadInstruction = (q, result, i, item) => { - return readInstruction(result[i], q.edges!, result, i + 1, item) + const size = readUint32(result, i) + i += 4 + return readProps(q.edges!, result, i, i + size, item) } const readInstruction = ( @@ -119,7 +122,6 @@ const readInstruction = ( i: number, item: Item, ): number => { - console.log(result) if (instruction === ReadOp.meta) { return meta(q, result, i, item) } else if (instruction === ReadOp.aggregation) { @@ -166,6 +168,7 @@ export const resultToObject = ( result: Uint8Array, end: number, offset: number = 0, + items: AggItem | [Item] = [], ) => { if (q.aggregate) { return readAggregate(q, result, 0, result.byteLength - 4) @@ -177,10 +180,9 @@ export const resultToObject = ( if (q.type === ReaderSchemaEnum.single) { return null } - return [] + return items } - let items: AggItem | [Item] = [] let i = 5 + offset const readHook = q.hook diff --git a/src/protocol/db-read/types.ts b/src/protocol/db-read/types.ts index ced25e63ee..dd264fad17 100644 --- a/src/protocol/db-read/types.ts +++ b/src/protocol/db-read/types.ts @@ -1,5 +1,6 @@ import type { SchemaHooks } from '../../schema/index.js' import type { PropTypeEnum, VectorBaseTypeEnum } from '../../zigTsExports.js' +import type { TypedArray } from '../../schema.js' export type Item = { id: number @@ -17,16 +18,6 @@ export type Meta = { export type AggItem = Partial -export type TypedArray = - | Int8Array - | Uint8Array - | Int16Array - | Uint16Array - | Int32Array - | Uint32Array - | Float32Array - | Float64Array - export enum ReaderSchemaEnum { edge = 1, default = 2, @@ -77,14 +68,12 @@ export type ReaderGroupBy = { typeIndex: PropTypeEnum stepRange?: number stepType?: boolean - display?: Intl.DateTimeFormat // find a way for this -- shitty + display?: Intl.DateTimeFormat enum?: any[] } -// Move these types to seperate pkg including query def agg export type ReaderSchema = { readId: number - // maybe current read id that you add props: { [prop: string]: ReaderPropDef } main: { props: { [start: string]: ReaderPropDef }; len: number } type: ReaderSchemaEnum diff --git a/src/schema/def/typeDef.ts b/src/schema/def/typeDef.ts index 25e8cf4452..1102e220ed 100644 --- a/src/schema/def/typeDef.ts +++ b/src/schema/def/typeDef.ts @@ -30,13 +30,14 @@ import { fillEmptyMain, isZeroes } from './fillEmptyMain.js' import type { SchemaType } from '../schema/type.js' import { PropType } from '../../zigTsExports.js' import type { SchemaLocales } from '../schema/locales.js' +import { getTypeDefs } from '../defs/getTypeDefs.js' export const updateTypeDefs = (schema: SchemaOut) => { const schemaTypesParsed: { [key: string]: SchemaTypeDef } = {} const schemaTypesParsedById: { [id: number]: SchemaTypeDef } = {} let typeIdCnt = 1 - for (const typeName in schema.types) { + for (const typeName of Object.keys(schema.types).sort()) { const type = schema.types[typeName] const locales = schema.locales ?? { en: {} } const result = createEmptyDef(typeName, type, locales) @@ -85,11 +86,8 @@ export const updateTypeDefs = (schema: SchemaOut) => { if (prop.edges) { const edgeTypeName = `_${[`${schemaType.type}_${prop.path.join('_')}`, `${dstType.type}_${dstType.props[prop.inversePropName as string].path.join('_')}`].sort().join(':')}` - // console.log(edgeTypeName, Object.keys(schemaTypesParsed)) - if (!schemaTypesParsed[edgeTypeName]) { // make it - // console.log('have to make edge type') //prop.edges, schema.types // const type = schema.types[edgeTypeName] @@ -115,9 +113,11 @@ export const updateTypeDefs = (schema: SchemaOut) => { } const edgeType = schemaTypesParsed[edgeTypeName] + prop.edgeNodeTypeId = edgeType.id dstType.props[prop.inversePropName as string].edgeNodeTypeId = edgeType.id + // prop.edgeType = edgeType } else { prop.edgeNodeTypeId = 0 } @@ -127,6 +127,17 @@ export const updateTypeDefs = (schema: SchemaOut) => { } } + // A hack to sync the type and prop ids: + const newDefs = getTypeDefs(schema) + for (const [type, typeDef] of newDefs) { + const oldTypeDef = schemaTypesParsed[type] + if (oldTypeDef) { + for (const path in oldTypeDef.props) { + oldTypeDef.props[path].prop = typeDef.props.get(path)!.id + } + } + } + return { schemaTypesParsed, schemaTypesParsedById } } @@ -268,7 +279,7 @@ const createSchemaTypeDef = ( prop.typeIndex === PropType.colVec ) { prop.vectorBaseType = schemaVectorBaseTypeToEnum( - ('baseType' in schemaProp && schemaProp.baseType) || 'number', + ('baseType' in schemaProp && schemaProp.baseType) || 'float64', ) } @@ -281,9 +292,12 @@ const createSchemaTypeDef = ( } if (schemaProp.type === 'enum') { + // @ts-ignore prop.enum = Array.isArray(schemaProp) ? schemaProp : schemaProp.enum prop.reverseEnum = {} + // @ts-ignore for (let i = 0; i < prop.enum.length; i++) { + // @ts-ignore prop.reverseEnum[prop.enum[i]] = i } } else if (schemaProp.type === 'references') { diff --git a/src/schema/def/types.ts b/src/schema/def/types.ts index 13ffa6680a..f0d6d12d97 100644 --- a/src/schema/def/types.ts +++ b/src/schema/def/types.ts @@ -15,14 +15,18 @@ import type { SchemaLocales } from '../schema/locales.js' export type PropDef = { __isPropDef: true schema: SchemaProp - prop: number // (0-250) + /** 0-250 */ + prop: number typeIndex: PropTypeEnum separate: boolean path: string[] start: number - len: number // bytes or count - compression?: 0 | 1 // 0 == none , 1 == standard deflate + /** bytes or count */ + len: number + /** 0 == none , 1 == standard deflate */ + compression?: 0 | 1 enum?: any[] + /** The node is deleted if this reference(s) prop becomes empty. */ dependent?: boolean // default here? validation: Validation @@ -45,6 +49,7 @@ export type PropDef = { hasDefaultEdges?: boolean reverseEnum?: { [key: string]: number } edgesSeperateCnt?: number + edgeType?: SchemaTypeDef edges?: { [key: string]: PropDefEdge } diff --git a/src/schema/def/utils.ts b/src/schema/def/utils.ts index eced26bf36..ec1e90a822 100644 --- a/src/schema/def/utils.ts +++ b/src/schema/def/utils.ts @@ -53,7 +53,7 @@ export const propIsNumerical = (prop: PropDef | PropDefEdge) => { export const schemaVectorBaseTypeToEnum = ( vector: SchemaVector['baseType'], ): VectorBaseTypeEnum => { - if (vector === 'number' || vector === undefined) { + if (vector === undefined) { return VectorBaseType.float64 } return VectorBaseTypeInverse[vector] @@ -110,15 +110,10 @@ export const sortMainProps = ( ) => { const sizeA = REVERSE_SIZE_MAP[a.typeIndex] const sizeB = REVERSE_SIZE_MAP[b.typeIndex] - if (sizeA === 8) { - return -1 - } - if (sizeA === 4 && sizeB !== 8) { - return -1 - } - if (sizeA === sizeB) { - return 0 - } + if (sizeA === 8) return -1 + if (sizeA === 4 && sizeB !== 8) return -1 + if (sizeA === 2 && sizeB !== 4 && sizeB !== 8) return -1 + if (sizeA === sizeB) return 0 return 1 } diff --git a/src/schema/def/validation.ts b/src/schema/def/validation.ts index 6e4ed3d77e..847ce00939 100644 --- a/src/schema/def/validation.ts +++ b/src/schema/def/validation.ts @@ -5,6 +5,7 @@ import type { SchemaTimestamp } from '../schema/timestamp.js' import type { SchemaNumber } from '../schema/number.js' import type { SchemaEnum } from '../schema/enum.js' import { + isVector, MAX_ID, MIN_ID, type SchemaObject, @@ -361,11 +362,7 @@ export const VALIDATION_MAP: Record = { return true }, [PropType.vector]: (value) => { - // Array should be supported - if (!(value instanceof Float32Array)) { - return false - } - return true + return isVector(value) }, // @ts-ignore [PropType.text]: null, diff --git a/src/schema/defs/getTypeDefs.ts b/src/schema/defs/getTypeDefs.ts new file mode 100644 index 0000000000..763abfcdc2 --- /dev/null +++ b/src/schema/defs/getTypeDefs.ts @@ -0,0 +1,219 @@ +import { + type SchemaOut, + type SchemaProp, + type SchemaPropHooks, + type SchemaProps, + type SchemaType, +} from '../../schema.js' +import { getByPath, setByPath } from '../../utils/path.js' +import { PropType } from '../../zigTsExports.js' +import { defs, type PropDef, type PropTree, type TypeDef } from './index.js' + +const mainSorter = (a, b) => { + if (a.size === 8) return -1 + if (a.size === 4 && b.size !== 8) return -1 + if (a.size === 2 && b.size !== 4 && b.size !== 8) return -1 + if (a.size === b.size) return 0 + return 1 +} + +export const propIndexOffset = (prop: PropDef): number => { + switch (prop.type) { + // We pack default on the beginning, for smallest possible mem + case PropType.microBuffer: + case PropType.vector: + // microbuffers first + return 'default' in prop.schema ? -600 : 0 + case PropType.string: + case PropType.binary: + case PropType.json: + // then strings + return 'default' in prop.schema ? -500 : 0 + // then text + case PropType.text: + return 'default' in prop.schema ? -400 : 0 + // References go behind the defaults + case PropType.references: + case PropType.reference: + return -300 + // Aliases and colVec go last + case PropType.alias: + case PropType.aliases: // TODO remove ALIASES + case PropType.colVec: + return 300 + default: + return 0 + } +} + +const separateSorter = (a: PropDef, b: PropDef) => + propIndexOffset(a) - propIndexOffset(b) + +const addPropDef = ( + prop: SchemaProp, + path: string[], + typeDef: TypeDef, +) => { + const Def = defs[prop.type] + if (!Def) { + throw new Error('Unknown def') + } + + const def: PropDef = new Def(prop, path, typeDef) + if (def.size) { + typeDef.main.push(def) + } else { + typeDef.separate.push(def) + } + return def +} + +const getTypeDef = ( + name: string, + schema: SchemaType, + schemaRoot: SchemaOut, +): TypeDef => { + const { props } = schema + const typeDef: TypeDef = { + id: 0, + name, + separate: [], + props: new Map(), + main: [], + tree: { path: [], schema, props: new Map(), required: [] }, + schema, + schemaRoot, + propHooks: { + create: [], + update: [], + read: [], + search: [], + include: [], + filter: [], + groupBy: [], + aggregate: [], + }, + } + + const walk = ( + props: SchemaProps, + pPath: string[], + tree: TypeDef['tree'], + ): boolean | undefined => { + for (const key in props) { + const prop = props[key] + const path = [...pPath, key] + let required = prop.required + let def: PropTree | PropDef + if (prop.type === 'object') { + def = { + path, + schema: prop, + props: new Map(), + required: [], + } + if (walk(prop.props, path, def)) { + required = true + } + tree.props.set(key, def) + } else { + def = addPropDef(prop, path, typeDef) + typeDef.props.set(path.join('.'), def) + tree.props.set(key, def) + } + if (required) { + tree.required.push(key) + } + if (prop.hooks) { + for (const key in typeDef.propHooks) { + if (prop.hooks[key]) { + typeDef.propHooks[key].push(def) + } + } + } + } + return !!tree.required.length + } + + walk(props, [], typeDef.tree) + + return typeDef +} + +const cache = new WeakMap() +export const getTypeDefs = (schema: SchemaOut): Map => { + const cached = cache.get(schema) + if (cached) return cached + const typeDefs = new Map( + Object.entries(schema.types) + .sort() + .map(([name, type]) => [name, getTypeDef(name, type, schema)]), + ) + + // -------- connect references, add edges and assign ids -------- + let typeId = 1 + for (const [typeName, typeDef] of typeDefs) { + typeDef.id = typeId++ + for (const [propPath, def] of typeDef.props) { + const prop = + def.schema.type === 'references' ? def.schema.items : def.schema + if (prop.type !== 'reference') continue + def.ref = typeDefs.get(prop.ref)! + if (prop.prop) { + def.refProp = def.ref.props.get(prop.prop)! + } else { + def.refProp = addPropDef( + { + type: 'references', + items: { + type: 'reference', + ref: typeName, + prop: propPath, + }, + }, + [`${typeName}.${propPath}`], + def.ref, + ) + def.refProp.ref = typeDef + def.refProp.refProp = def + } + const inverseEdges = def.refProp.edges + if (inverseEdges) { + def.edges = inverseEdges + continue + } + let edges: undefined | Record> + for (const edge in prop) { + if (edge[0] !== '$') continue + edges ??= {} + edges[edge] = prop[edge] + } + if (edges) { + const edgeTypeName = `_${typeName}.${propPath}` + def.edges = getTypeDef(edgeTypeName, { props: edges }, schema) + typeDefs.set(edgeTypeName, def.edges) + } + } + } + + for (const [, typeDef] of typeDefs) { + // -------- sort and assign main -------- + typeDef.main.sort(mainSorter) + let start = 0 + for (const prop of typeDef.main) { + prop.start = start + start += prop.size + } + + // -------- sort and assign separate --------- + typeDef.separate.sort(separateSorter) + let propId = 1 + for (const prop of typeDef.separate) { + prop.id = propId++ + } + } + + // ----------- add to cache -------- + cache.set(schema, typeDefs) + return typeDefs +} diff --git a/src/schema/defs/index.ts b/src/schema/defs/index.ts new file mode 100644 index 0000000000..ec891909f1 --- /dev/null +++ b/src/schema/defs/index.ts @@ -0,0 +1,92 @@ +import type { + SchemaHooks, + SchemaObject, + SchemaOut, + SchemaProp, + SchemaType, +} from '../../schema.js' +import type { + LangCodeEnum, + ModifyEnum, + PropTypeEnum, +} from '../../zigTsExports.js' +import type { AutoSizedUint8Array } from '../../utils/AutoSizedUint8Array.js' +import * as references from './props/references.js' +import * as fixed from './props/fixed.js' +import * as alias from './props/alias.js' +import * as binary from './props/binary.js' +import * as cardinality from './props/cardinality.js' +import * as strings from './props/strings.js' +import * as vector from './props/vector.js' + +export type PropTree = { + props: Map + required: string[] + path: string[] + schema: SchemaObject | SchemaType +} + +export type TypeDef = { + id: number + name: string + main: PropDef[] + separate: PropDef[] + props: Map + tree: PropTree + schema: SchemaType + schemaRoot: SchemaOut + propHooks: Record +} + +export type PropDef = { + id: number + type: PropTypeEnum + start: number + path: string[] + size: number + schema: SchemaProp + edges?: TypeDef + ref?: TypeDef + refProp?: PropDef + typeDef: TypeDef + isEdge: boolean + pushValue( + buf: AutoSizedUint8Array, + value: unknown, + op: ModifyEnum, + lang?: LangCodeEnum, + ): void + + write( + buf: Uint8Array, + val: any, + offset: number, + op?: ModifyEnum, + lang?: LangCodeEnum, + ): void + + pushSelvaSchema(buf: AutoSizedUint8Array): void + validate(val: unknown, lang?: LangCodeEnum): void +} + +export const isPropDef = (p: any): p is PropDef => { + return p && 'pushValue' in p && typeof p.pushValue === 'function' +} + +export type PropDefClass = { + new (schema: SchemaProp, path: string[], typeDef: TypeDef): PropDef +} + +export const defs: Record< + Exclude['type'], 'object'>, + PropDefClass +> = { + ...references, + ...fixed, + ...alias, + ...binary, + ...cardinality, + ...strings, + ...vector, + enum: fixed.enum_, +} diff --git a/src/schema/defs/props/alias.ts b/src/schema/defs/props/alias.ts new file mode 100644 index 0000000000..99bd0d2019 --- /dev/null +++ b/src/schema/defs/props/alias.ts @@ -0,0 +1,22 @@ +import { PropType, PropTypeSelva } from '../../../zigTsExports.js' +import type { AutoSizedUint8Array } from '../../../utils/AutoSizedUint8Array.js' +import { BasePropDef } from './base.js' + +export const alias = class Alias extends BasePropDef { + override type = PropType.alias + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is string { + if (typeof value !== 'string') { + throw new Error('Invalid type for alias ' + this.path.join('.')) + } + if (!value.trim()) { + throw new Error('Invalid alias ' + this.path.join('.')) + } + buf.pushString(value) + } + override pushSelvaSchema(buf: AutoSizedUint8Array) { + buf.pushUint8(PropTypeSelva.alias) + } +} diff --git a/src/schema/defs/props/base.ts b/src/schema/defs/props/base.ts new file mode 100644 index 0000000000..112b03034b --- /dev/null +++ b/src/schema/defs/props/base.ts @@ -0,0 +1,58 @@ +import type { SchemaProp } from '../../../schema.js' +import { + LangCode, + Modify, + PropType, + type LangCodeEnum, + type ModifyEnum, + type PropTypeEnum, +} from '../../../zigTsExports.js' +import { AutoSizedUint8Array } from '../../../utils/AutoSizedUint8Array.js' +import type { PropDef, TypeDef } from '../index.js' + +let writeBuf: AutoSizedUint8Array +let validateBuf: AutoSizedUint8Array +export class BasePropDef implements PropDef { + constructor(schema: SchemaProp, path: string[], typeDef: TypeDef) { + this.schema = schema + this.path = path + this.typeDef = typeDef + } + id = 0 + start = 0 + size = 0 + type: PropTypeEnum = PropType.null + schema: SchemaProp + path: string[] + isEdge: boolean = false + typeDef: TypeDef + pushValue( + buf: AutoSizedUint8Array, + value: unknown, + op: ModifyEnum, + lang: LangCodeEnum, + ): void { + // To be implemented by subclasses + } + write( + buf: Uint8Array, + value: unknown, + offset: number, + op: ModifyEnum = Modify.create, + lang: LangCodeEnum = LangCode.none, + ): void { + writeBuf ??= new AutoSizedUint8Array(0, 0, buf) + writeBuf.data = buf + writeBuf.length = offset + writeBuf.maxLength = buf.length + this.pushValue(writeBuf, value, op, lang) + } + validate(value: unknown, lang: LangCodeEnum = LangCode.none) { + validateBuf ??= new AutoSizedUint8Array() + validateBuf.length = 0 + this.pushValue(writeBuf, value, Modify.create, lang) + } + pushSelvaSchema(buf: AutoSizedUint8Array): void { + // To be implemented by subclasses + } +} diff --git a/src/schema/defs/props/binary.ts b/src/schema/defs/props/binary.ts new file mode 100644 index 0000000000..ef2b5d485b --- /dev/null +++ b/src/schema/defs/props/binary.ts @@ -0,0 +1,61 @@ +import native from '../../../native.js' +import { NOT_COMPRESSED } from '../../../protocol/index.js' +import type { SchemaBinary, SchemaString } from '../../../schema.js' +import { + PropType, + type PropTypeEnum, + PropTypeSelva, + pushSelvaSchemaString, + LangCode, +} from '../../../zigTsExports.js' +import type { AutoSizedUint8Array } from '../../../utils/AutoSizedUint8Array.js' +import { BasePropDef } from './base.js' +import type { TypeDef } from '../index.js' +import { validateMaxBytes } from './utils.js' + +export const binary = class Binary extends BasePropDef { + constructor(prop: SchemaString, path: string[], typeDef: TypeDef) { + super(prop, path, typeDef) + if (prop.maxBytes && prop.maxBytes < 61) { + this.size = prop.maxBytes + 1 + } + if (this.size) { + this.type = PropType.binaryFixed + this.pushValue = this.pushFixedValue + } + } + override type: PropTypeEnum = PropType.binary + declare schema: SchemaBinary + override validate(value: unknown): asserts value is Uint8Array { + if (!(value instanceof Uint8Array)) { + throw new Error('Invalid type for binary ' + this.path.join('.')) + } + validateMaxBytes(value.byteLength, this.schema, this.path) + } + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is Uint8Array { + this.validate(value) + const crc = native.crc32(value) + buf.pushUint8(LangCode.none) + buf.pushUint8(NOT_COMPRESSED) + buf.set(value, buf.length) + buf.pushUint32(crc) + } + pushFixedValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is Uint8Array { + this.validate(value) + buf.pushUint8(value.byteLength) + buf.set(value, buf.length) + } + override pushSelvaSchema(buf: AutoSizedUint8Array) { + pushSelvaSchemaString(buf, { + type: PropTypeSelva.string, + fixedLenHint: this.schema.maxBytes ?? 0, + defaultLen: 0, + }) + } +} diff --git a/src/schema/defs/props/cardinality.ts b/src/schema/defs/props/cardinality.ts new file mode 100644 index 0000000000..36cc1cb489 --- /dev/null +++ b/src/schema/defs/props/cardinality.ts @@ -0,0 +1,52 @@ +import type { SchemaCardinality } from '../../../schema.js' +import { ENCODER } from '../../../utils/uint8.js' +import { + pushModifyCardinalityHeader, + PropType, + PropTypeSelva, + pushSelvaSchemaString, +} from '../../../zigTsExports.js' +import { xxHash64 } from '../../../db-client/xxHash64.js' +import type { AutoSizedUint8Array } from '../../../utils/AutoSizedUint8Array.js' +import { BasePropDef } from './base.js' +import type { TypeDef } from '../index.js' + +export const cardinality = class Cardinality extends BasePropDef { + constructor(prop: SchemaCardinality, path: string[], typeDef: TypeDef) { + super(prop, path, typeDef) + this.sparse = prop.mode === 'sparse' + this.precision = prop.precision ?? 8 + } + sparse: boolean + precision: number + override type = PropType.cardinality + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is any { + if (!Array.isArray(value)) { + value = [value] + } + + const items = value as any[] + if (items.length === 0) return + pushModifyCardinalityHeader(buf, this) + for (const item of items) { + if (typeof item === 'string') { + buf.reserveUint64() + xxHash64(ENCODER.encode(item), buf.data, buf.length - 8) + } else if (item instanceof Uint8Array && item.byteLength === 8) { + buf.set(item, buf.length) + } else { + throw new Error('Invalid value for cardinality ' + this.path.join('.')) + } + } + } + override pushSelvaSchema(buf: AutoSizedUint8Array) { + pushSelvaSchemaString(buf, { + type: PropTypeSelva.string, + fixedLenHint: 0, + defaultLen: 0, + }) + } +} diff --git a/src/schema/defs/props/fixed.ts b/src/schema/defs/props/fixed.ts new file mode 100644 index 0000000000..63e58c5a0e --- /dev/null +++ b/src/schema/defs/props/fixed.ts @@ -0,0 +1,227 @@ +import type { + EnumItem, + SchemaEnum, + SchemaNumber, + SchemaProp, +} from '../../../schema.js' +import { convertToTimestamp } from '../../../utils/timestamp.js' +import { + PropType, + type PropTypeEnum, + type ModifyEnum, + type LangCodeEnum, +} from '../../../zigTsExports.js' +import type { AutoSizedUint8Array } from '../../../utils/AutoSizedUint8Array.js' +import { BasePropDef } from './base.js' +import type { TypeDef } from '../index.js' + +export const number = class Number extends BasePropDef { + constructor(schema: SchemaNumber, path: string[], typeDef: TypeDef) { + super(schema, path, typeDef) + if (schema.min !== undefined) this.min = schema.min + if (schema.max !== undefined) this.max = schema.max + } + override type: PropTypeEnum = PropType.number + override size = 8 + min = -globalThis.Number.MAX_VALUE + max = globalThis.Number.MAX_VALUE + override validate(value: unknown): asserts value is number { + if (typeof value !== 'number' || !globalThis.Number.isFinite(value)) { + throw new Error( + `Invalid type for ${this.schema.type} ${this.path.join('.')}`, + ) + } + if (this.min !== undefined && value < this.min) { + throw new Error( + `Value ${value} is smaller than min ${this.min} for ${this.path.join( + '.', + )}`, + ) + } + if (this.max !== undefined && value > this.max) { + throw new Error( + `Value ${value} is larger than max ${this.max} for ${this.path.join( + '.', + )}`, + ) + } + } + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is number { + this.validate(value) + buf.pushDoubleLE(value) + } +} + +export const timestamp = class Timestamp extends number { + override type: PropTypeEnum = PropType.timestamp + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is number | string { + const ts = convertToTimestamp(value as any) + this.validate(ts) + buf.pushInt64(ts) + } +} + +class integer extends number { + override validate(value: unknown): asserts value is number { + super.validate(value) + if (!Number.isInteger(value)) { + throw new Error( + `Invalid type for ${this.schema.type} ${this.path.join('.')}`, + ) + } + } +} + +export const uint8 = class Uint8 extends integer { + constructor(schema: SchemaNumber, path: string[], typeDef: TypeDef) { + super(schema, path, typeDef) + if (schema.min === undefined) this.min = 0 + else if (schema.min < 0) this.min = 0 + if (schema.max === undefined) this.max = 255 + else if (schema.max > 255) this.max = 255 + } + override type: PropTypeEnum = PropType.uint8 + override size = 1 + override validate(value: unknown): asserts value is number { + super.validate(value) + } + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is number { + this.validate(value) + buf.pushUint8(value) + } +} + +export const int8 = class Int8 extends uint8 { + constructor(schema: SchemaNumber, path: string[], typeDef: TypeDef) { + super(schema, path, typeDef) + if (schema.min === undefined) this.min = -128 + else if (schema.min < -128) this.min = -128 + if (schema.max === undefined) this.max = 127 + else if (schema.max > 127) this.max = 127 + } + override type = PropType.int8 +} + +export const uint16 = class Uint16 extends integer { + constructor(schema: SchemaNumber, path: string[], typeDef: TypeDef) { + super(schema, path, typeDef) + if (schema.min === undefined) this.min = 0 + else if (schema.min < 0) this.min = 0 + if (schema.max === undefined) this.max = 65535 + else if (schema.max > 65535) this.max = 65535 + } + override type: PropTypeEnum = PropType.uint16 + override size = 2 + override validate(value: unknown): asserts value is number { + super.validate(value) + } + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is number { + this.validate(value) + buf.pushUint16(value) + } +} + +export const int16 = class Int16 extends uint16 { + constructor(schema: SchemaNumber, path: string[], typeDef: TypeDef) { + super(schema, path, typeDef) + if (schema.min === undefined) this.min = -32768 + else if (schema.min < -32768) this.min = -32768 + if (schema.max === undefined) this.max = 32767 + else if (schema.max > 32767) this.max = 32767 + } + override type = PropType.int16 +} + +export const uint32 = class Uint32 extends integer { + constructor(schema: SchemaNumber, path: string[], typeDef: TypeDef) { + super(schema, path, typeDef) + if (schema.min === undefined) this.min = 0 + else if (schema.min < 0) this.min = 0 + if (schema.max === undefined) this.max = 4294967295 + else if (schema.max > 4294967295) this.max = 4294967295 + } + override type: PropTypeEnum = PropType.uint32 + override size = 4 + override validate(value: unknown): asserts value is number { + super.validate(value) + } + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is number { + this.validate(value) + buf.pushUint32(value) + } +} + +export const int32 = class Int32 extends uint32 { + constructor(schema: SchemaNumber, path: string[], typeDef: TypeDef) { + super(schema, path, typeDef) + if (schema.min === undefined) this.min = -2147483648 + else if (schema.min < -2147483648) this.min = -2147483648 + if (schema.max === undefined) this.max = 2147483647 + else if (schema.max > 2147483647) this.max = 2147483647 + } + override type = PropType.int32 +} + +export const enum_ = class Enum extends BasePropDef { + constructor(prop: SchemaEnum, path: string[], typeDef: TypeDef) { + super(prop, path, typeDef) + prop.enum.forEach((val, i) => { + const byte = i + 1 + this.enum[byte] = val + this.vals.set(val, byte) + }) + } + + override type = PropType.enum + override size = 1 + + enum: Record = {} + vals = new Map() + override validate(value: unknown): asserts value is EnumItem { + if (typeof value !== 'string' && typeof value !== 'number') { + throw new Error('Invalid type for enum ' + this.path.join('.')) + } + if (!this.vals.has(value)) { + throw new Error(`Invalid enum value ${value} for ${this.path.join('.')}`) + } + } + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is EnumItem { + this.validate(value) + buf.pushUint8(this.vals.get(value) ?? 0) + } +} + +export const boolean = class Boolean extends BasePropDef { + override type = PropType.boolean + override size = 1 + override validate(value: unknown): asserts value is boolean { + if (typeof value !== 'boolean') { + throw new Error('Invalid type for boolean ' + this.path.join('.')) + } + } + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is boolean { + this.validate(value) + buf.pushUint8(~~value) + } +} diff --git a/src/schema/defs/props/references.ts b/src/schema/defs/props/references.ts new file mode 100644 index 0000000000..1f4072968d --- /dev/null +++ b/src/schema/defs/props/references.ts @@ -0,0 +1,344 @@ +import { + Modify, + ModifyReferences, + PropType, + PropTypeSelva, + pushModifyReferenceMetaHeader, + pushModifyReferencesHeader, + pushModifyReferencesMetaHeader, + pushSelvaSchemaRef, + writeModifyReferenceMetaHeaderProps, + writeModifyReferencesHeaderProps, + writeModifyReferencesMetaHeaderProps, + type LangCodeEnum, + type ModifyEnum, + type PropTypeEnum, +} from '../../../zigTsExports.js' +import { AutoSizedUint8Array } from '../../../utils/AutoSizedUint8Array.js' +import type { + SchemaProp, + SchemaReference, + SchemaReferences, +} from '../../../schema.js' +import { BasePropDef } from './base.js' +import type { PropDef, TypeDef } from '../index.js' +import { + BasedModify, + getRealId, + getTmpId, +} from '../../../db-client/modify/index.js' +import { serializeProps } from '../../../db-client/modify/props.js' + +type Edges = Record<`${string}`, unknown> | undefined + +const getEdges = (obj: Record): Edges => { + let edges: Edges + for (const i in obj) { + if (i[0] === '$' && i !== '$index') { + edges ??= {} + edges[i] = obj[i] + } + } + return edges +} + +const serializeIds = ( + buf: AutoSizedUint8Array, + ids: number[], + offset: number, +): number => { + let i = offset + // one extra for padding + buf.pushUint32(0) + for (; i < ids.length; i++) { + const id = getRealId(ids[i]) + if (!id) break + buf.pushUint32(id) + } + return i +} + +const serializeTmpIds = ( + buf: AutoSizedUint8Array, + items: BasedModify[], + offset: number, +): undefined | any => { + let i = offset + // one extra for padding + buf.pushUint32(0) + for (; i < items.length; i++) { + const tmpId = getTmpId(items[i]) + if (tmpId === undefined) break + buf.pushUint32(tmpId) + } + + return i +} + +const serializeIdsAndMeta = ( + buf: AutoSizedUint8Array, + items: any[], + op: ModifyEnum, + offset: number, + lang: LangCodeEnum, + edgesType?: TypeDef, +): number => { + let i = offset + const start = buf.reserveUint32() + + for (; i < items.length; i++) { + const item = items[i] + if (!isValidRefObj(item)) { + break + } + const realId = getRealId(item.id) + const id = realId || getTmpId(item.id) + if (id === undefined) { + break + } + const index = pushModifyReferencesMetaHeader(buf, { + id: id, + isTmp: !realId, + withIndex: '$index' in item, + index: item.$index ?? -1, + size: 0, + }) + + if (edgesType) { + const edges = getEdges(item) + if (edges) { + const start = buf.length + serializeProps(edgesType.tree, edges, buf, op, lang) + writeModifyReferencesMetaHeaderProps.size( + buf.data, + buf.length - start, + index, + ) + } + } + } + + // store the amount of refs (for prealloc) + buf.writeUint32(i - offset, start) + + return i +} + +const isValidRefObj = (item: any) => { + if (typeof item === 'object' && item !== null) { + return getRealId(item.id) || getTmpId(item.id) !== undefined + } +} + +const setReferences = ( + buf: AutoSizedUint8Array, + value: any[], + prop: BasePropDef & { edges?: TypeDef }, + op: ModifyEnum, + lang: LangCodeEnum, +) => { + if (!Array.isArray(value)) { + throw new Error('References value must be an array') + } + let offset = 0 + const len = value.length + while (offset < len) { + const item = value[offset] + if (getRealId(item)) { + const index = pushModifyReferencesHeader(buf, { + op: ModifyReferences.ids, + size: 0, + }) + const start = buf.length + offset = serializeIds(buf, value, offset) + writeModifyReferencesHeaderProps.size(buf.data, buf.length - start, index) + } else if (getTmpId(item) !== undefined) { + const index = pushModifyReferencesHeader(buf, { + op: ModifyReferences.tmpIds, + size: 0, + }) + const start = buf.length + offset = serializeTmpIds(buf, value, offset) + writeModifyReferencesHeaderProps.size(buf.data, buf.length - start, index) + } else if (isValidRefObj(item)) { + const index = pushModifyReferencesHeader(buf, { + op: ModifyReferences.idsWithMeta, + size: 0, + }) + const start = buf.length + offset = serializeIdsAndMeta(buf, value, op, offset, lang, prop.edges) + writeModifyReferencesHeaderProps.size(buf.data, buf.length - start, index) + } else if (item instanceof BasedModify) { + throw item + } else if (typeof item === 'object' && item?.id instanceof BasedModify) { + throw item.id + } else { + throw 'bad ref!' + } + } +} + +const deleteReferences = (buf: AutoSizedUint8Array, value: any[]) => { + if (!Array.isArray(value)) { + throw new Error('References value must be an array') + } + let offset = 0 + while (offset < value.length) { + const item = value[offset] + if (getRealId(item)) { + const index = pushModifyReferencesHeader(buf, { + op: ModifyReferences.delIds, + size: 0, + }) + const start = buf.length + offset = serializeIds(buf, value, offset) + writeModifyReferencesHeaderProps.size(buf.data, buf.length - start, index) + } else if (getTmpId(item) !== undefined) { + const index = pushModifyReferencesHeader(buf, { + op: ModifyReferences.delTmpIds, + size: 0, + }) + const start = buf.length + offset = serializeTmpIds(buf, value, offset) + writeModifyReferencesHeaderProps.size(buf.data, buf.length - start, index) + } else if (item instanceof BasedModify) { + throw item + } else { + throw 'bad ref' + } + } +} + +export const references = class References extends BasePropDef { + override type: PropTypeEnum = PropType.references + declare schema: SchemaReferences + declare ref: TypeDef + declare refProp: PropDef + declare edges?: TypeDef + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + op: ModifyEnum, + lang: LangCodeEnum, + ): asserts value is any { + if (typeof value !== 'object' || value === null) { + throw new Error('References value must be an object and not null') + } + + const val = value as { + add?: any[] + update?: any[] + delete?: any[] + } + + if (Array.isArray(value)) { + if (op === Modify.update) { + buf.push(ModifyReferences.clear) + } + setReferences(buf, value, this, op, lang) + } + if (val.add) { + setReferences(buf, val.add, this, op, lang) + } + if (val.update) { + setReferences(buf, val.update, this, op, lang) + } + if (val.delete) { + deleteReferences(buf, val.delete) + } + } + override pushSelvaSchema(buf: AutoSizedUint8Array) { + pushSelvaSchemaRef(buf, { + type: PropTypeSelva.references, + flags: makeEdgeConstraintFlags(this.schema.items), + dstNodeType: this.ref.id, + inverseField: this.refProp.id, + edgeNodeType: this.edges?.id ?? 0, + capped: this.schema.capped ?? 0, + }) + } +} + +export const reference = class Reference extends BasePropDef { + override type: PropTypeEnum = PropType.reference + declare schema: SchemaReference + declare ref: TypeDef + declare refProp: PropDef + declare edges?: TypeDef + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + op: ModifyEnum, + lang: LangCodeEnum, + ): asserts value is any { + const id = getRealId(value) + if (id) { + pushModifyReferenceMetaHeader(buf, { + id, + isTmp: false, + size: 0, + }) + return + } + const tmpId = getTmpId(value) + if (tmpId !== undefined) { + pushModifyReferenceMetaHeader(buf, { + id: tmpId, + isTmp: true, + size: 0, + }) + return + } + + if (value instanceof BasedModify) { + throw value + } + + if (typeof value === 'object' && value !== null) { + const val = value as { id: any } + const realId = getRealId(val.id) + const id = realId || getTmpId(val.id) + if (id !== undefined) { + const index = pushModifyReferenceMetaHeader(buf, { + id, + isTmp: !realId, + size: 0, + }) + const prop = this + if (prop.edges) { + const edges = getEdges(val) + if (edges) { + const start = buf.length + serializeProps(prop.edges.tree, edges, buf, op, lang) + writeModifyReferenceMetaHeaderProps.size( + buf.data, + buf.length - start, + index, + ) + } + } + return + } + + if (val.id instanceof BasedModify) { + throw val.id + } + } + } + override pushSelvaSchema(buf: AutoSizedUint8Array) { + pushSelvaSchemaRef(buf, { + type: PropTypeSelva.reference, + flags: makeEdgeConstraintFlags(this.schema), + dstNodeType: this.ref.id, + inverseField: this.refProp.id, + edgeNodeType: this.edges?.id ?? 0, + capped: 0, + }) + } +} + +function makeEdgeConstraintFlags(schema: SchemaReference): number { + let flags = 0 + flags |= schema.dependent ? 0x01 : 0x00 + return flags +} diff --git a/src/schema/defs/props/strings.ts b/src/schema/defs/props/strings.ts new file mode 100644 index 0000000000..bc4085b174 --- /dev/null +++ b/src/schema/defs/props/strings.ts @@ -0,0 +1,201 @@ +import native from '../../../native.js' +import { COMPRESSED, NOT_COMPRESSED } from '../../../protocol/index.js' +import type { SchemaString, SchemaText } from '../../../schema.js' +import { + PropType, + type LangCodeEnum, + type PropTypeEnum, + type ModifyEnum, + PropTypeSelva, + pushSelvaSchemaString, + pushSelvaSchemaText, + LangCode, + writeSelvaSchemaStringProps, +} from '../../../zigTsExports.js' +import type { AutoSizedUint8Array } from '../../../utils/AutoSizedUint8Array.js' +import { BasePropDef } from './base.js' +import type { TypeDef } from '../index.js' +import { validateMaxBytes } from './utils.js' + +function validateString( + value: unknown, + prop: { min?: number; max?: number }, + path: string[], +): asserts value is string { + if (typeof value !== 'string') { + throw new Error('Invalid type for string ' + path.join('.')) + } + if (prop.min !== undefined && value.length < prop.min) { + throw new Error( + `Length ${value.length} is smaller than min ${prop.min} for ${path.join( + '.', + )}`, + ) + } + if (prop.max !== undefined && value.length > prop.max) { + throw new Error( + `Length ${value.length} is larger than max ${prop.max} for ${path.join( + '.', + )}`, + ) + } +} + +export const string = class String extends BasePropDef { + constructor(prop: SchemaString, path: string[], typeDef: TypeDef) { + super(prop, path, typeDef) + if (prop.maxBytes && prop.maxBytes < 61) { + // TODO explain why 61 bytes (1 byte is for size but why 60 byte and not 47 or 63? */ + this.size = prop.maxBytes + 1 + } else if (prop.max && prop.max < 31) { + // TODO Explain why this is here + this.size = prop.max * 2 + 1 + } + if (this.size) { + this.type = PropType.stringFixed + this.pushValue = this.pushFixedValue + } else if (prop.compression === 'none') { + this.deflate = false + } + } + deflate = true + declare schema: SchemaString + override type: PropTypeEnum = PropType.string + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + _op?: ModifyEnum, + lang: LangCodeEnum = LangCode.none, + ): asserts value is string | Uint8Array { + if (value instanceof Uint8Array) { + buf.pushUint32(value.byteLength) + buf.set(value, buf.length) + } else { + validateString(value, this.schema, this.path) + const normalized = value.normalize('NFKD') + buf.pushUint8(lang) + if (this.deflate && normalized.length > 200) { + buf.pushUint8(COMPRESSED) + const sizePos = buf.reserveUint32() + const stringPos = buf.length + const written = buf.pushString(normalized) + buf.ensure(buf.length + written) + buf.data.copyWithin(buf.length, buf.length - written, buf.length) + const size = native.compress(buf.data, stringPos, written) + if (size !== 0) { + buf.writeUint32(written, sizePos) + buf.length = stringPos + size + validateMaxBytes(size, this.schema, this.path) + const crc = native.crc32(buf.subarray(stringPos)) + buf.pushUint32(crc) + return + } + buf.length = sizePos - 1 + } + buf.pushUint8(NOT_COMPRESSED) + const written = buf.pushString(normalized) + validateMaxBytes(written, this.schema, this.path) + const crc = native.crc32(buf.subarray(buf.length - written)) + buf.pushUint32(crc) + } + } + + pushFixedValue( + buf: AutoSizedUint8Array, + value: unknown, + ): asserts value is string { + validateString(value, this.schema, this.path) + const size = native.stringByteLength(value) + validateMaxBytes(size, this.schema, this.path) + buf.pushUint8(size) + buf.pushString(value) + const padEnd = this.size - size - 1 + if (padEnd) { + buf.fill(0, buf.length, buf.length + padEnd) + } + } + + override pushSelvaSchema(buf: AutoSizedUint8Array) { + const index = pushSelvaSchemaString(buf, { + type: PropTypeSelva.string, + fixedLenHint: this.schema.maxBytes ?? 0, // Note that selva doesn't do actual validation + defaultLen: 0, // TODO also check that defaultLen <= maxBytes + }) + if (this.schema.default) { + const start = buf.length + this.pushValue(buf, this.schema.default) + writeSelvaSchemaStringProps.defaultLen( + buf.data, + buf.length - start, + index, + ) + } + } +} + +export const text = class Text extends string { + override type = PropType.text + // @ts-ignore + declare schema: SchemaText + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + op?: ModifyEnum, + lang: LangCodeEnum = LangCode.none, + ) { + if (typeof value === 'string') { + if (lang === LangCode.none) { + throw new Error( + `Invalid type, text needs to be an object ${this.path.join('.')}`, + ) + } + const index = buf.reserveUint32() + const start = buf.length + super.pushValue(buf, value, op, lang) + buf.writeUint32(buf.length - start, index) + } else if (typeof value === 'object' && value !== null) { + if (Array.isArray(value)) { + throw new Error('Invalid type for text ' + this.path.join('.')) + } + for (const key in value) { + if (!(key in LangCode)) { + throw new Error( + `Invalid locale ${key} for text ${this.path.join('.')}`, + ) + } + const index = buf.reserveUint32() + const start = buf.length + super.pushValue(buf, value[key], op, LangCode[key]) + buf.writeUint32(buf.length - start, index) + } + } else { + throw new Error('Invalid type for text ' + this.path.join('.')) + } + } + override pushSelvaSchema(buf: AutoSizedUint8Array) { + if (this.schema.default) { + pushSelvaSchemaText(buf, { + type: PropTypeSelva.text, + nrDefaults: Object.keys(this.schema.default).length, + }) + this.pushValue(buf, this.schema.default) + } else { + pushSelvaSchemaText(buf, { + type: PropTypeSelva.text, + nrDefaults: 0, + }) + } + } +} + +export const json = class Json extends string { + override type = PropType.json + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + op?: ModifyEnum, + lang: LangCodeEnum = LangCode.none, + ) { + super.pushValue(buf, JSON.stringify(value), op, lang) + } +} diff --git a/src/schema/defs/props/utils.ts b/src/schema/defs/props/utils.ts new file mode 100644 index 0000000000..6daf82778f --- /dev/null +++ b/src/schema/defs/props/utils.ts @@ -0,0 +1,15 @@ +export const validateMaxBytes = ( + bytes: number, + prop: { maxBytes?: number }, + path: string[], +) => { + if (prop.maxBytes !== undefined) { + if (bytes > prop.maxBytes) { + throw new Error( + `Byte length ${bytes} is larger than maxBytes ${ + prop.maxBytes + } for ${path.join('.')}`, + ) + } + } +} diff --git a/src/schema/defs/props/vector.ts b/src/schema/defs/props/vector.ts new file mode 100644 index 0000000000..198e80975c --- /dev/null +++ b/src/schema/defs/props/vector.ts @@ -0,0 +1,113 @@ +import { + VECTOR_BASE_TYPE_SIZE_MAP, + type SchemaVector, +} from '../../../schema.js' +import { vectorBaseType2TypedArray } from '../../../schema/schema/vector.js' +import { + PropType, + type LangCodeEnum, + type PropTypeEnum, + type ModifyEnum, + PropTypeSelva, + pushSelvaSchemaColvec, + pushSelvaSchemaMicroBuffer, + VectorBaseType, +} from '../../../zigTsExports.js' +import type { AutoSizedUint8Array } from '../../../utils/AutoSizedUint8Array.js' +import { BasePropDef } from './base.js' +import type { TypeDef } from '../index.js' +import { TypedArray } from '../../../schema/index.js' + +export const vector = class Vector extends BasePropDef { + constructor(schema: SchemaVector, path: string[], typeDef: TypeDef) { + super(schema, path, typeDef) + this.vectorSize = + schema.size * VECTOR_BASE_TYPE_SIZE_MAP[VectorBaseType[schema.baseType]] + } + vectorSize: number + override type: PropTypeEnum = PropType.vector + override validate(value: unknown): asserts value is TypedArray { + const t = vectorBaseType2TypedArray[this.schema['baseType']] + if (!(value instanceof t)) { + throw new Error(`Not a ${t.name}`) + } + if ((value as TypedArray).byteLength > this.vectorSize) { + throw new Error('Vector too long') + } + } + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + _op?: ModifyEnum, + _lang?: LangCodeEnum, + ): asserts value is any { + this.validate(value) + const v = new Uint8Array(value.buffer).subarray( + 0, + Math.min(value.byteLength, this.vectorSize), + ) + buf.set(v, buf.length) + } + override pushSelvaSchema(buf: AutoSizedUint8Array) { + const defaultValue = this.schema['default'] + pushSelvaSchemaMicroBuffer(buf, { + type: PropTypeSelva.microBuffer, + len: this.vectorSize, + hasDefault: ~~!!defaultValue, + }) + if (defaultValue) { + const v = new Uint8Array(defaultValue.buffer, 0, this.vectorSize) + buf.set(v, buf.length) + } + } +} + +// This will become similar to Main BUFFER +// and it can use it if there is an option used like "appendOnly: true" on the type +// then we can switch to colvec for all main buffer props +// if there are no var props we can iterate straight trough the colvec list using another iterator +export const colvec = class ColVec extends BasePropDef { + constructor(schema: SchemaVector, path: string[], typeDef: TypeDef) { + super(schema, path, typeDef) + this.compSize = VECTOR_BASE_TYPE_SIZE_MAP[VectorBaseType[schema.baseType]] + this.vecLen = schema.size + } + compSize: number + vecLen: number + override type = PropType.colVec + override validate(value: unknown): asserts value is Uint8Array { + const t = vectorBaseType2TypedArray[this.schema['baseType']] + if (!(value instanceof t)) { + throw new Error(`Not a ${t.name}`) + } + if ((value as TypedArray).byteLength > this.vecLen * this.compSize) { + throw new Error('Vector too long') + } + } + override pushValue( + buf: AutoSizedUint8Array, + value: unknown, + _op: ModifyEnum, + _lang: LangCodeEnum, + ): asserts value is any { + this.validate(value) + const v = new Uint8Array(value.buffer).subarray( + 0, + Math.min(value.byteLength, this.vecLen * this.compSize), + ) + buf.set(v, buf.length) + } + override pushSelvaSchema(buf: AutoSizedUint8Array) { + const defaultValue = this.schema['default'] + pushSelvaSchemaColvec(buf, { + type: PropTypeSelva.colVec, + vecLen: this.vecLen, + compSize: this.compSize, + hasDefault: ~~!!defaultValue, + }) + if (defaultValue) { + const v = new Uint8Array(defaultValue.buffer, 0, this.vecLen * this.compSize) + buf.set(v, buf.length) + } + } +} diff --git a/src/schema/index.ts b/src/schema/index.ts index f98fd52034..88fe38d063 100644 --- a/src/schema/index.ts +++ b/src/schema/index.ts @@ -13,7 +13,12 @@ import { BLOCK_CAPACITY_DEFAULT, } from './def/types.js' import { propIsNumerical } from './def/utils.js' -import { parseSchema, type SchemaIn, type SchemaOut } from './schema/schema.js' +import { + parseSchema, + type SchemaIn, + type SchemaOut, + type StrictSchema, +} from './schema/schema.js' export * from './schema/alias.js' export * from './schema/base.js' @@ -40,8 +45,21 @@ export * from './serialize.js' export * from './infer.js' export * as semver from './semver/mod.js' -export const parse = (schema: SchemaIn): { schema: SchemaOut } => ({ - schema: parseSchema(schema), +export type TypedArray = + | Int8Array + | Uint8Array + | Int16Array + | Uint16Array + | Int32Array + | Uint32Array + | Float32Array + | Float64Array + +// eslint-disable-next-line +export const parse = ( + schema: StrictSchema, +): { schema: SchemaOut } => ({ + schema: parseSchema(schema as any) as unknown as SchemaOut, }) export const MAX_ID = 4294967295 export const MIN_ID = 1 diff --git a/src/schema/infer.ts b/src/schema/infer.ts index 05d718e91c..1a052ab7bc 100644 --- a/src/schema/infer.ts +++ b/src/schema/infer.ts @@ -1,16 +1,6 @@ +import type { TypedArray } from './index.js' import type { Schema } from './schema/schema.js' -type TypedArray = - | Uint8Array - | Float32Array - | Uint8Array - | Int16Array - | Uint16Array - | Int32Array - | Uint32Array - | Float32Array - | Float64Array - // Map schema types to TypeScript types type TypeMap = { string: string diff --git a/src/schema/schema/alias.ts b/src/schema/schema/alias.ts index d3dc1f9ae6..d44df0bb1b 100644 --- a/src/schema/schema/alias.ts +++ b/src/schema/schema/alias.ts @@ -1,11 +1,14 @@ +import { assert } from './shared.js' import { parseString, type SchemaString } from './string.js' -export type SchemaAlias = Omit & { +export type SchemaAlias = Omit & { type: 'alias' + default?: never } export const parseAlias = (def: Record): SchemaAlias => { def.type = 'string' + assert(def.default === undefined, 'Default alias not allowed') const { type, ...rest } = parseString(def) - return { type: 'alias', ...rest } + return { type: 'alias', ...rest } as SchemaAlias } diff --git a/src/schema/schema/base.ts b/src/schema/schema/base.ts index a219967a28..77f2af847e 100644 --- a/src/schema/schema/base.ts +++ b/src/schema/schema/base.ts @@ -26,6 +26,7 @@ const isValidation = (v: unknown): v is Validation => isFunction(v) export const parseBase = >( def: Record, result: T, + skipValidation = false, ): T => { assert( def.required === undefined || isBoolean(def.required), @@ -53,7 +54,7 @@ export const parseBase = >( assertExpectedProps(result, def) - if ('default' in result && result.default !== undefined) { + if (!skipValidation && 'default' in result && result.default !== undefined) { // @ts-ignore const validation = getValidator(result) // @ts-ignore diff --git a/src/schema/schema/enum.ts b/src/schema/schema/enum.ts index 760ed91c97..86369bde49 100644 --- a/src/schema/schema/enum.ts +++ b/src/schema/schema/enum.ts @@ -11,7 +11,7 @@ export type EnumItem = string | number export type SchemaEnum = Base & RequiredIfStrict<{ type: 'enum' }, strict> & { default?: EnumItem - enum: EnumItem[] + enum: EnumItem[] | readonly EnumItem[] } const isEnumItem = (v: unknown): v is EnumItem => diff --git a/src/schema/schema/hooks.ts b/src/schema/schema/hooks.ts index 77e708293b..12e6e2db1f 100644 --- a/src/schema/schema/hooks.ts +++ b/src/schema/schema/hooks.ts @@ -1,12 +1,13 @@ import { isFunction, isRecord } from './shared.js' +// TODO remove these type BasedDbQuery = any type Operator = any -export type SchemaHooks = { - create?: (payload: Record) => void | Record - update?: (payload: Record) => void | Record - read?: (result: Record) => void | null | Record +export type SchemaHooks> = { + create?: (payload: Payload) => void | Payload + update?: (payload: Payload) => void | Payload + read?: (result: Payload) => void | null | Payload search?: (query: BasedDbQuery, fields: Set) => void include?: ( query: BasedDbQuery, diff --git a/src/schema/schema/locales.ts b/src/schema/schema/locales.ts index 835a466685..3707ea51ca 100644 --- a/src/schema/schema/locales.ts +++ b/src/schema/schema/locales.ts @@ -1,6 +1,6 @@ import { LangCode } from '../../zigTsExports.js' import { assert, isBoolean, isRecord, type RequiredIfStrict } from './shared.js' -type LangName = keyof typeof LangCode +export type LangName = keyof typeof LangCode export type SchemaLocale = RequiredIfStrict< { diff --git a/src/schema/schema/payload.ts b/src/schema/schema/payload.ts new file mode 100644 index 0000000000..bec3d70ce5 --- /dev/null +++ b/src/schema/schema/payload.ts @@ -0,0 +1,125 @@ +import type { TypedArray } from '../index.js' + +type BasedModify = any // Mock BasedModify to avoid circular dependency + +type NumInc = number | { increment: number } + +type TypeMap = { + string: string + number: NumInc + int8: NumInc + uint8: NumInc + int16: NumInc + uint16: NumInc + int32: NumInc + uint32: NumInc + boolean: boolean + text: string | Record + json: any + timestamp: NumInc | string | Date + binary: Uint8Array + alias: string + vector: TypedArray + colvec: TypedArray + cardinality: string | string[] +} + +type EdgeKeys = keyof T extends infer K + ? K extends string + ? string extends K + ? never + : K extends `$${string}` + ? K + : never + : never + : never + +type InferEdgeProps< + Prop, + Types, + Locales extends Record = Record, +> = { + [K in EdgeKeys]?: Prop[K] extends keyof TypeMap + ? TypeMap[Prop[K]] + : InferProp +} + +type InferRefValue< + Prop, + Types, + Locales extends Record = Record, +> = + | number + | BasedModify + | (EdgeKeys extends never + ? { id: number | BasedModify } + : { id: number | BasedModify } & InferEdgeProps< + Prop, + Types, + Locales + >) + +type InferReferences< + Prop, + Types, + Locales extends Record = Record, +> = + | InferRefValue[] + | { + add?: Prettify>[] + update?: Prettify>[] + delete?: (number | BasedModify)[] + } + +type InferProp< + Prop, + Types, + Locales extends Record = Record, +> = Prop extends { type: 'text' } + ? string | Partial> + : Prop extends { type: 'object'; props: infer P } + ? InferType + : Prop extends { type: infer T extends keyof TypeMap } + ? TypeMap[T] + : Prop extends { enum: infer E extends readonly any[] } + ? E[number] + : Prop extends { ref: string } + ? Prettify> + : Prop extends { items: { ref: string } } + ? Prettify> + : Prop extends keyof TypeMap + ? TypeMap[Prop] + : never + +type Prettify = Target extends any + ? Target extends (infer U)[] + ? Prettify[] + : Target extends object + ? { + -readonly [K in keyof Target]: Target[K] + } + : Target + : never + +export type InferType< + Props, + Types, + Locales extends Record = Record, +> = { + [K in keyof Props as Props[K] extends { required: true } + ? K + : never]: InferProp +} & { + [K in keyof Props as Props[K] extends { required: true } + ? never + : K]?: InferProp | null +} + +export type InferPayload< + S extends { types: any; locales?: any }, + T extends keyof S['types'], +> = InferType< + S['types'][T]['props'], + S['types'], + S['locales'] extends Record ? S['locales'] : {} +> diff --git a/src/schema/schema/prop.ts b/src/schema/schema/prop.ts index af2db158a1..ca7649ff28 100644 --- a/src/schema/schema/prop.ts +++ b/src/schema/schema/prop.ts @@ -47,6 +47,7 @@ export type SchemaPropShorthand = | 'cardinality' | NumberType | EnumItem[] + | readonly EnumItem[] export type SchemaProp = | SchemaPropObj diff --git a/src/schema/schema/reference.ts b/src/schema/schema/reference.ts index 905f6274bf..46c6f552de 100644 --- a/src/schema/schema/reference.ts +++ b/src/schema/schema/reference.ts @@ -10,21 +10,30 @@ import { import { parseProp, type SchemaProp } from './prop.js' import type { SchemaReferences } from './references.js' import type { SchemaOut } from './schema.js' +import type { SchemaAlias } from './alias.js' -type EdgeExcludedProps = 'prop' | `$${string}` +type ReferenceProps = nested extends true + ? { prop?: never; dependent?: never; [edge: `$${string}`]: never } + : { + prop: string + /** The node is deleted if this reference(s) prop becomes empty. */ + dependent?: boolean + [edge: `$${string}`]: + | Exclude< + SchemaProp, + | SchemaReferences + | SchemaReference + | SchemaAlias + | 'alias' + > + | SchemaReferences + | SchemaReference + } -export type SchemaReference = Base & +export type SchemaReference = Base & RequiredIfStrict<{ type: 'reference' }, strict> & { ref: string - } & { - prop: string - dependent?: boolean - [edge: `$${string}`]: - | Exclude, SchemaReferences> - | (Omit, 'items'> & { - items: Omit, EdgeExcludedProps> - }) - } + } & ReferenceProps let parsingEdges: boolean export const parseReference = ( @@ -55,11 +64,19 @@ export const parseReference = ( } parsingEdges = true - for (const key in def) { - if (key.startsWith('$')) { - result[key] = parseProp(def[key], locales) + try { + for (const key in def) { + if (key.startsWith('$')) { + const edge = parseProp(def[key], locales) + assert(edge.type !== 'alias', 'Edge alias not allowed') + result[key] = edge + } } + } catch (e) { + parsingEdges = false + throw e } + parsingEdges = false if (fromReferences) { deleteUndefined(result) diff --git a/src/schema/schema/references.ts b/src/schema/schema/references.ts index 86e4a9398b..741fef9e6c 100644 --- a/src/schema/schema/references.ts +++ b/src/schema/schema/references.ts @@ -1,12 +1,12 @@ import { parseBase, type Base } from './base.js' import { parseReference, type SchemaReference } from './reference.js' import type { SchemaOut } from './schema.js' -import { assert, isRecord, type RequiredIfStrict } from './shared.js' +import { assert, isNatural, isRecord, type RequiredIfStrict } from './shared.js' -export type SchemaReferences = Base & +export type SchemaReferences = Base & RequiredIfStrict<{ type: 'references' }, strict> & { capped?: number - items: Omit, keyof Base> + items: Omit, keyof Base> } export const parseReferences = ( @@ -14,8 +14,13 @@ export const parseReferences = ( locales: SchemaOut['locales'], ): SchemaReferences => { assert(isRecord(def.items), 'Items should be record') + assert( + def.capped === undefined || isNatural(def.capped), + 'Capped should be a number', + ) return parseBase(def, { type: 'references', + capped: def.capped, items: parseReference(def.items, locales, true), }) } diff --git a/src/schema/schema/schema.ts b/src/schema/schema/schema.ts index 7531f7c57e..0be0156de8 100644 --- a/src/schema/schema/schema.ts +++ b/src/schema/schema/schema.ts @@ -2,17 +2,18 @@ import { assert, assertExpectedProps, deleteUndefined, - isBoolean, isFunction, isRecord, isString, type RequiredIfStrict, } from './shared.js' +import { type LangName, type SchemaLocale } from './locales.js' import { parseType, type SchemaType } from './type.js' import { inspect } from 'node:util' import { postParseRefs } from './reference.js' import hash from '../../hash/hash.js' import { parseLocales, type SchemaLocales } from './locales.js' +import { type SchemaHooks } from './hooks.js' export type SchemaTypes = Record> export type SchemaMigrateFn = ( node: Record, @@ -33,6 +34,194 @@ export type Schema = { export type SchemaIn = Schema | Schema export type SchemaOut = Schema +type NormalizeProp = T extends string + ? { type: T } + : T extends readonly (infer U)[] + ? { type: 'enum'; enum: U[] } + : T extends { type: 'object'; props: infer P } + ? Omit & { + type: 'object' + props: { [K in keyof P]: NormalizeProp } + } + : T extends { props: infer P } + ? Omit & { + type: 'object' + props: { [K in keyof P]: NormalizeProp } + } + : T extends { items: infer I } + ? Omit & { type: 'references'; items: NormalizeProp } + : T extends { ref: string } + ? { + [K in keyof T]: K extends `$${string}` + ? NormalizeProp + : T[K] + } & { type: 'reference' } + : T extends { enum: readonly any[] } + ? T & { type: 'enum' } + : T + +// Utility to normalize properties in an object +type NormalizeEdges = { + [K in keyof T]: NormalizeProp +} + +// Utility to convert a Union to an Intersection +type UnionToIntersection = (U extends any ? (k: U) => void : never) extends ( + k: infer I, +) => void + ? I + : never + +// Helper to find Props in other types that reference TName with a specific 'prop' field +type GetBackRefs = UnionToIntersection< + { + [K in keyof Types]: ( + Types[K] extends { props: infer P } ? P : Types[K] + ) extends infer Props + ? { + [P in keyof Props as Props[P] extends { + ref: TName + prop: infer BackProp extends string + } + ? BackProp + : Props[P] extends { + items: { ref: TName; prop: infer BackProp extends string } + } + ? BackProp + : never]: { + type: 'references' + items: { + type: 'reference' + ref: K & string + prop: P & string + } & NormalizeEdges< + Props[P] extends { items: infer I } + ? Omit + : Omit + > + } + } + : never + }[keyof Types] +> + +// ResolvedProps combines explicit props with inferred back-reference props +export type ResolvedProps< + Types, + TName extends keyof Types, + Props = NormalizeType extends { props: infer P } ? P : {}, + BackRefs = GetBackRefs, +> = string extends keyof Types + ? any + : { + [K in keyof (Props & + ([BackRefs] extends [never] + ? {} + : Omit)) as Extract]: (Props & + ([BackRefs] extends [never] ? {} : Omit))[K] + } + +type NormalizeType = T extends { props: infer P } + ? Omit & { props: { [K in keyof P]: NormalizeProp } } + : { props: { [K in keyof T]: NormalizeProp } } + +// Helper to extract props from a type definition (explicit or shorthand) +type GetProps = T extends { props: infer P } ? P : T + +// Helper to find "Incoming Claims" - properties on TargetRef that explicitly point to MyType.MyProp +type GetIncomingClaims = { + [K in keyof GetProps]: GetProps< + Types[TargetRef] + >[K] extends infer TargetProp + ? TargetProp extends { ref: MyType; prop: MyProp } + ? K + : TargetProp extends { items: { ref: MyType; prop: MyProp } } + ? K + : never + : never +}[keyof GetProps] + +type ValidateProp = Prop extends { + ref: infer Ref extends string + prop: infer BackProp extends string +} + ? Ref extends keyof Types + ? GetIncomingClaims extends infer Claims + ? [Claims] extends [never] + ? Prop + : BackProp extends Claims + ? Prop + : { ref: Ref; prop: Claims } & Omit + : never + : Prop + : Prop extends { + items: { + ref: infer Ref extends string + prop: infer BackProp extends string + } + } + ? Ref extends keyof Types + ? GetIncomingClaims extends infer Claims + ? [Claims] extends [never] + ? Prop + : BackProp extends Claims + ? Prop + : Prop extends { items: infer I } + ? { + items: { ref: Ref; prop: Claims } & Omit + } & Omit + : Prop + : never + : Prop + : Prop + +type ValidateProps = { + [K in keyof Props]: ValidateProp +} + +type ValidateSchema = Omit & { + types: { + [K in keyof S['types']]: S['types'][K] extends { props: infer P } + ? { + props: ValidateProps + hooks?: SchemaHooks + } & Omit + : { + [P in keyof S['types'][K]]: ValidateProp< + S['types'][K][P], + S['types'], + K & string, + P & string + > + } + } +} + +export type StrictSchema = S & ValidateSchema + +type Prettify = { + [K in keyof T]: T[K] +} & {} + +export type ResolveSchema = Prettify< + Omit & { + types: { + [K in keyof S['types']]: Prettify< + Omit, 'props'> & { + props: ResolvedProps + } + > + } + locales: S extends { locales: infer L } + ? L extends readonly (infer K extends LangName)[] + ? Partial>> + : L extends Record + ? Partial>> + : SchemaLocales + : SchemaLocales + } +> + const isMigrations = (v: unknown): v is SchemaMigrations => isRecord(v) && Object.values(v).every( @@ -86,7 +275,12 @@ const track =

>(input: P): P => { return _track(input, 0, input) } -export const parseSchema = (input: SchemaIn): SchemaOut => { +/* + This returns a "public" parsed schema, suitable for external users +*/ +export const parseSchema = ( + input: StrictSchema, +): ResolveSchema => { const v: unknown = track(input) assert(isRecord(v), 'Schema should be record') try { @@ -119,6 +313,7 @@ export const parseSchema = (input: SchemaIn): SchemaOut => { defaultTimezone: v.defaultTimezone, migrations: v.migrations, types, + hash: v.hash, }) as SchemaOut assertExpectedProps(result, v) @@ -130,9 +325,10 @@ export const parseSchema = (input: SchemaIn): SchemaOut => { } } + // TODO we can remove hash from here after we finish new schema defs (internal schema) result.hash = hash(result) - return result + return result as unknown as ResolveSchema } catch (e) { if (tracking) { e = Error(`${path.join('.')}: ${inspect(value)} - ${e}`, { cause: e }) diff --git a/src/schema/schema/timestamp.ts b/src/schema/schema/timestamp.ts index da2f7ac857..72d090204e 100644 --- a/src/schema/schema/timestamp.ts +++ b/src/schema/schema/timestamp.ts @@ -1,4 +1,4 @@ -import { assert, isNumber, isRecord, isString } from './shared.js' +import { assert, isBoolean, isNumber, isRecord, isString } from './shared.js' import { parseBase, type Base } from './base.js' import { convertToTimestamp } from '../../utils/index.js' @@ -11,6 +11,7 @@ export type SchemaTimestamp = Base & { max?: strict extends true ? number : Timestamp default?: strict extends true ? number : Timestamp step?: strict extends true ? number : number | string + expire?: boolean } export const isTimestamp = (v: unknown): v is Timestamp => @@ -37,9 +38,15 @@ export const parseTimestamp = ( 'Invalid default timestamp', ) + assert( + def.expire === undefined || isBoolean(def.expire), + 'Invalid expire timestamp', + ) + return parseBase>(def, { type: 'timestamp', on: def.on, + expire: def.expire, min: convertToTsIfDefined(def.min), max: convertToTsIfDefined(def.max), step: convertToTsIfDefined(def.step), diff --git a/src/schema/schema/type.ts b/src/schema/schema/type.ts index 4f763ffe84..a193026253 100644 --- a/src/schema/schema/type.ts +++ b/src/schema/schema/type.ts @@ -58,10 +58,15 @@ export const parseType = ( type.partial === undefined || isBoolean(type.partial), 'Should be boolean', ) + assert( + type.insertOnly === undefined || isBoolean(type.insertOnly), + 'Should be boolean', + ) assert(isRecord(type.props), 'Should be record') const result = { hooks: type.hooks, blockCapacity: type.blockCapacity, + insertOnly: type.insertOnly, capped: type.capped, partial: type.partial, props: parseProps(type.props, locales), diff --git a/src/schema/schema/vector.ts b/src/schema/schema/vector.ts index 735090d904..4a6fecce97 100644 --- a/src/schema/schema/vector.ts +++ b/src/schema/schema/vector.ts @@ -1,20 +1,61 @@ import { assert, isNatural, isString } from './shared.js' import { parseBase, type Base } from './base.js' -import { numberTypes } from './number.js' +import { VectorBaseType } from '../../zigTsExports.js' -const vectorBaseTypes = [...numberTypes, 'float32', 'float64'] as const +const vectorBaseTypes = Object.keys( + VectorBaseType, +) as (keyof typeof VectorBaseType)[] -export type SchemaVector = Base & { - type: 'vector' | 'colvec' - /** - * Number of elements in the vector. - */ - size: number - /** - * Base type of the vector. - * float64 == number - */ - baseType?: (typeof vectorBaseTypes)[number] +export type VectorBaseTypeStr = keyof typeof VectorBaseType +export const vectorBaseType2TypedArray = { + int8: Int8Array, + uint8: Uint8Array, + int16: Int16Array, + uint16: Uint16Array, + int32: Int32Array, + uint32: Uint32Array, + float32: Float32Array, + float64: Float64Array, +} +export type VectorBaseType2TypedArray = typeof vectorBaseType2TypedArray + +export type SchemaVector = + Base & { + type: 'vector' | 'colvec' + /** + * Number of elements in the vector. + */ + size: number + /** + * Base type of the vector. + * float64 == number + */ + baseType: T + /** + * Default vector. + */ + default?: InstanceType + } + +export function isVector( + value: unknown, +): value is + | Int8Array + | Uint8Array + | Int16Array + | Uint16Array + | Int32Array + | Uint32Array + | Float32Array + | Float64Array { + for (const k in vectorBaseType2TypedArray) { + if ( + value instanceof + vectorBaseType2TypedArray[k as keyof typeof vectorBaseType2TypedArray] + ) + return true + } + return false } export const parseVector = (def: Record): SchemaVector => { @@ -27,10 +68,16 @@ export const parseVector = (def: Record): SchemaVector => { isString(def.baseType) && vectorBaseTypes.includes(def.baseType as any), 'Invalid baseType', ) + assert(def.default === undefined || isVector(def.default), 'Invalid default') - return parseBase(def, { - type: def.type, - size: def.size, - baseType: def.baseType as SchemaVector['baseType'], - }) + return parseBase( + def, + { + type: def.type, + size: def.size, + baseType: def.baseType as SchemaVector['baseType'], + default: def.default, + }, + true, + ) } diff --git a/src/schema/serialize.ts b/src/schema/serialize.ts index f6a5e62ac7..01d3ed84da 100644 --- a/src/schema/serialize.ts +++ b/src/schema/serialize.ts @@ -468,10 +468,7 @@ export const deSerializeInner = ( if (isSchemaProp) { const type = buf[i] - const parsedType = PropTypeInverse[type] - if (type !== PropType.enum) { - obj.type = parsedType - } + obj.type = PropTypeInverse[type] i += 1 } diff --git a/src/schema/utils.ts b/src/schema/utils.ts new file mode 100644 index 0000000000..bc033ee978 --- /dev/null +++ b/src/schema/utils.ts @@ -0,0 +1,100 @@ +// writer.$role +// snurf +// writer.* +// writer.** +// writer.flap[0] +// writer.flap[-1] +// writer. + +// writer + opts +// * +// ** + +// getPropsFromPaths(['writer.description.nl'], schema, type, langCode) +// { +// def: PropDef +// resultPath: ['writer', 'description', 'nl'] +// lang: langcode +// select: false +// }[] + +// getPropsFromPaths(['writer.flap[0..5]'], schema, type, langCode) +// { +// def: PropDef +// resultPath: ['writer', 'flap'] +// lang: langcode +// opts: { start: 10, end: 14 } +// }[] + +/* + + + .query('user') + .filter('nr', '>', 8) + .and('nr' '<', 10) + .and(filter => filter('nr', >, 5).or('nr', <, 10) ) + .and('flap', '>', 10) + .and('writer.$role', '=', 'admin') + .or(() => { + filter('nr', '<', 1).or('nr', '=', 5) + }) + +{ + type: 'user', + // target: 21 // [21,21,23] + + and: [ + + props: { + nr: { ops: [ + { val: 8, op: '>' }, + { val: 10, op: '<' } + ]}, + writer: { + props: { + $role: { + ops: [ + {val: 'admin', op: ''} +]} + }, + ops: [{ val: 23, op: '='}] + }, +} + { + props: { nr: { ops: [{val: 1, op: '<}] } }, + or: [{ nr: { val: 5, op: '='}}] + }], + or: [{ + props: { nr: [{ val: 1, op: '<'}] } + or: [{ + nr: [{ val: 5, op: '='}] + }], + }] + } + locale: 'en', + sort: { prop: 'nr', order: 'asc' }, + include: { + props: { + writer: { + props: { + '*': { opts: {} }, + $rating: { opts: {} }, + + } + }, + likes: { + select: { start: 2, end: 10 } + }, + flap: { + props: {}, + filter: ... + } + } + } + // agg? +} + +// validateQuery() + + +*/ diff --git a/src/server/functionApi/client/query.ts b/src/server/functionApi/client/query.ts index b4beff133d..8009352bc9 100644 --- a/src/server/functionApi/client/query.ts +++ b/src/server/functionApi/client/query.ts @@ -19,7 +19,7 @@ export class BasedQuery extends BasedQueryAbstract { public payload: any public name: string public ctx: Context - public attachedCtx: AttachedCtx + public attachedCtx!: AttachedCtx public id: number public route: BasedRoute<'query'> diff --git a/src/server/functions/index.ts b/src/server/functions/index.ts index 78849901a2..1e9f7ce60e 100644 --- a/src/server/functions/index.ts +++ b/src/server/functions/index.ts @@ -24,9 +24,9 @@ export class BasedFunctions { reqId: number = 0 - config: FunctionConfig + config!: FunctionConfig - unregisterTimeout: NodeJS.Timeout + unregisterTimeout!: NodeJS.Timeout installsInProgress: Record> = {} diff --git a/src/server/incoming/index.ts b/src/server/incoming/index.ts index 082e46bd54..9c0db494e1 100644 --- a/src/server/incoming/index.ts +++ b/src/server/incoming/index.ts @@ -142,6 +142,8 @@ export default ( app.options('/*', (res, req) => httpHandler(server, req, res)) app.head('/*', (res, req) => httpHandler(server, req, res)) app.trace('/*', (res, req) => httpHandler(server, req, res)) + + // app.ad } server.uwsApp = app diff --git a/src/server/server.ts b/src/server/server.ts index 16a734fd85..cbfb8f2b55 100644 --- a/src/server/server.ts +++ b/src/server/server.ts @@ -109,6 +109,7 @@ server.functions.addRoutes({ await server.start() ``` */ + export class BasedServer { public console: Console = console @@ -120,15 +121,15 @@ export class BasedServer { public auth: BasedAuth - public port: number + public port!: number - public uwsApp: uws.TemplatedApp + public uwsApp!: uws.TemplatedApp - public silent: boolean + public silent!: boolean - public queryEvents: QueryEvents + public queryEvents!: QueryEvents - public channelEvents: ChannelEvents + public channelEvents!: ChannelEvents public rateLimit: RateLimit = { ws: 2e3, @@ -140,7 +141,7 @@ export class BasedServer { public forceReloadLastSeqId: number = -1 - public encodedForceReload: Uint8Array + public encodedForceReload!: Uint8Array public sendInitialForceReload: boolean = false @@ -209,7 +210,7 @@ export class BasedServer { public requestsCounterInProgress: boolean = false - public requestsCounterTimeout: NodeJS.Timeout + public requestsCounterTimeout!: NodeJS.Timeout public obsCleanTimeout?: NodeJS.Timeout @@ -237,7 +238,7 @@ export class BasedServer { [E in Event]?: Listener[] } = {} - public workerRequest: (type: string, payload?: any) => void | Promise + public workerRequest!: (type: string, payload?: any) => void | Promise private http: ServerOptions['http'] = {}; diff --git a/src/utils/AutoSizedUint8Array.ts b/src/utils/AutoSizedUint8Array.ts new file mode 100644 index 0000000000..0c8011742a --- /dev/null +++ b/src/utils/AutoSizedUint8Array.ts @@ -0,0 +1,298 @@ +import native from '../native.js' +import { + writeDoubleLE, + writeFloatLE, + writeUint64, + writeInt64, + ENCODER, +} from './index.js' + +// Runtime method injection +const delegateMethods = [ + 'toString', + 'toLocaleString', + 'join', + 'indexOf', + 'lastIndexOf', + 'includes', + 'every', + 'some', + 'forEach', + 'map', + 'filter', + 'find', + 'findIndex', + 'reduce', + 'reduceRight', + 'entries', + 'keys', + 'values', +] as const + +export class AutoSizedUint8Array { + static readonly ERR_OVERFLOW = Error('ERR_OVERFLOW') + + data: Uint8Array + length: number + maxLength: number + + constructor( + initialCapacity: number = 256, + maxLength: number = 1024 * 1024 * 1024, + data: Uint8Array = new Uint8Array( + new ArrayBuffer(initialCapacity, { + maxByteLength: maxLength, + }), + ), + ) { + this.data = data + this.length = 0 + this.maxLength = maxLength + } + + ensure(requiredLength: number): void { + const currentLength = this.data.byteLength + if (currentLength >= requiredLength) return + if (requiredLength > this.maxLength) { + throw AutoSizedUint8Array.ERR_OVERFLOW + } + + // Manual Max for speed + const doubleCapacity = currentLength * 2 + const newCapacity = + requiredLength > doubleCapacity ? requiredLength : doubleCapacity + // Cap at maxLength + const finalCapacity = + newCapacity > this.maxLength ? this.maxLength : newCapacity + + ;(this.data.buffer as any).resize(finalCapacity) + } + + get view(): Uint8Array { + return this.data.subarray(0, this.length) + } + + set(array: ArrayLike, offset: number = 0): void { + const end = offset + array.length + if (end > this.data.length) { + this.ensure(end) + } + this.data.set(array, offset) + if (end > this.length) this.length = end + } + + fill(value: number, start: number = 0, end: number = this.length): this { + if (end > this.length) { + if (end > this.data.length) { + this.ensure(end) + } + this.length = end + } + this.data.fill(value, start, end) + return this + } + + get(index: number): number | undefined { + return index < this.length ? this.data[index] : undefined + } + + pushUint8(value: number): void { + const end = this.length + 1 + if (end > this.data.length) { + this.ensure(end) + } + this.data[this.length] = value + this.length = end + } + + writeUint8(value: number, offset: number): void { + const end = offset + 1 + if (end > this.data.length) { + this.ensure(end) + } + this.data[offset] = value + if (end > this.length) this.length = end + } + + pushUint16(value: number): void { + const end = this.length + 2 + if (end > this.data.length) { + this.ensure(end) + } + this.data[this.length] = value + this.data[this.length + 1] = value >> 8 + this.length = end + } + + writeUint16(value: number, offset: number): void { + const end = offset + 2 + if (end > this.data.length) { + this.ensure(end) + } + this.data[offset] = value + this.data[offset + 1] = value >> 8 + if (end > this.length) this.length = end + } + + pushUint32(value: number): void { + const end = this.length + 4 + if (end > this.data.length) { + this.ensure(end) + } + this.data[this.length] = value + this.data[this.length + 1] = value >> 8 + this.data[this.length + 2] = value >> 16 + this.data[this.length + 3] = value >> 24 + this.length = end + } + + pushDoubleLE(value: number): void { + const end = this.length + 8 + if (end > this.data.length) { + this.ensure(end) + } + writeDoubleLE(this.data, value, this.length) + this.length = end + } + + pushFloatLE(value: number): void { + const end = this.length + 4 + if (end > this.data.length) { + this.ensure(end) + } + writeFloatLE(this.data, value, this.length) + this.length = end + } + + pushUint64(value: number): void { + const end = this.length + 8 + if (end > this.data.length) { + this.ensure(end) + } + writeUint64(this.data, value, this.length) + this.length = end + } + + pushInt64(value: number): void { + const end = this.length + 8 + if (end > this.data.length) { + this.ensure(end) + } + writeInt64(this.data, value, this.length) + this.length = end + } + + pushString(value: string): number { + const maxBytes = native.stringByteLength(value) + const end = this.length + maxBytes + if (end > this.data.length) { + this.ensure(end) + } + const { written } = ENCODER.encodeInto( + value, + this.data.subarray(this.length), + ) + this.length += written + return written + } + + writeUint32(value: number, offset: number): void { + const end = offset + 4 + if (end > this.data.length) { + this.ensure(end) + } + this.data[offset] = value + this.data[offset + 1] = value >> 8 + this.data[offset + 2] = value >> 16 + this.data[offset + 3] = value >> 24 + if (end > this.length) this.length = end + } + + writeUint64(value: number, offset: number): void { + const requiredEnd = offset + 8 + if (requiredEnd > this.data.length) { + this.ensure(requiredEnd) + } + writeUint64(this.data, value, offset) + if (requiredEnd > this.length) this.length = requiredEnd + } + + reserveUint32(): number { + const index = this.length + const end = index + 4 + if (end > this.data.length) { + this.ensure(end) + } + this.length = end + return index + } + + reserveUint64(): number { + const index = this.length + const end = index + 8 + if (end > this.data.length) { + this.ensure(end) + } + this.length = end + return index + } + + reserve(amount: number): number { + const index = this.length + const end = index + amount + if (end > this.data.length) { + this.ensure(end) + } + this.length = end + return index + } + + // Core array methods restored for type safety and performance + push(byte: number): void { + return this.pushUint8(byte) + } + + subarray(begin: number = 0, end: number = this.length): Uint8Array { + return this.view.subarray(begin, end) + } + + slice(start?: number, end?: number): Uint8Array { + return this.view.slice(start, end) + } +} + +// Methods delegated to the underlying Uint8Array view +type DelegatedProps = (typeof delegateMethods)[number] + +export interface AutoSizedUint8Array extends Pick { + [Symbol.iterator](): IterableIterator + reverse(): this + sort(compareFn?: (a: number, b: number) => number): this +} + +// Runtime method injection +for (const method of delegateMethods) { + ;(AutoSizedUint8Array.prototype as any)[method] = function ( + this: AutoSizedUint8Array, + ...args: any[] + ) { + return (this.view as any)[method](...args) + } +} + +const mutatingDelegateMethods = ['reverse', 'sort'] +for (const method of mutatingDelegateMethods) { + ;(AutoSizedUint8Array.prototype as any)[method] = function ( + this: AutoSizedUint8Array, + ...args: any[] + ) { + ;(this.view as any)[method](...args) + return this + } +} + +;(AutoSizedUint8Array.prototype as any)[Symbol.iterator] = function ( + this: AutoSizedUint8Array, +) { + return this.view[Symbol.iterator]() +} diff --git a/src/zigTsExports.ts b/src/zigTsExports.ts index 267c02d780..d9ce35f1a9 100644 --- a/src/zigTsExports.ts +++ b/src/zigTsExports.ts @@ -8,9 +8,14 @@ import { readUint64, readInt64, readFloatLE, readDoubleLE } from './utils/index.js' +import { AutoSizedUint8Array } from './utils/AutoSizedUint8Array.js' export type TypeId = number +export type SelvaFieldType = number + +export type SelvaField = number + export const BridgeResponse = { query: 1, modify: 2, @@ -54,9 +59,8 @@ export const OpType = { loadBlock: 128, unloadBlock: 129, loadCommon: 130, - createType: 131, - setSchemaIds: 132, emptyMod: 133, + expire: 134, noOp: 255, } as const @@ -81,9 +85,8 @@ export const OpTypeInverse = { 128: 'loadBlock', 129: 'unloadBlock', 130: 'loadCommon', - 131: 'createType', - 132: 'setSchemaIds', 133: 'emptyMod', + 134: 'expire', 255: 'noOp', } as const @@ -108,9 +111,8 @@ export const OpTypeInverse = { loadBlock, unloadBlock, loadCommon, - createType, - setSchemaIds, emptyMod, + expire, noOp */ export type OpTypeEnum = (typeof OpType)[keyof typeof OpType] @@ -136,6 +138,7 @@ export const ModOp = { deleteTextField: 16, upsert: 17, insert: 18, + end: 254, padding: 255, } as const @@ -160,6 +163,7 @@ export const ModOpInverse = { 16: 'deleteTextField', 17: 'upsert', 18: 'insert', + 254: 'end', 255: 'padding', } as const @@ -184,258 +188,1312 @@ export const ModOpInverse = { deleteTextField, upsert, insert, + end, padding */ export type ModOpEnum = (typeof ModOp)[keyof typeof ModOp] -export const PropType = { - null: 0, - timestamp: 1, - number: 4, - cardinality: 5, - uint8: 6, - uint32: 7, - boolean: 9, - enum: 10, - string: 11, - stringFixed: 12, - text: 13, - reference: 15, - references: 16, - microBuffer: 17, - alias: 18, - aliases: 19, - int8: 20, - int16: 21, - uint16: 22, - int32: 23, - binary: 25, - binaryFixed: 26, - vector: 27, - json: 28, - jsonFixed: 29, - object: 30, - colVec: 31, - id: 255, +export const Modify = { + create: 0, + createRing: 1, + update: 2, + delete: 3, + upsert: 4, + insert: 5, } as const -export const PropTypeInverse = { - 0: 'null', - 1: 'timestamp', - 4: 'number', - 5: 'cardinality', - 6: 'uint8', - 7: 'uint32', - 9: 'boolean', - 10: 'enum', - 11: 'string', - 12: 'stringFixed', - 13: 'text', - 15: 'reference', - 16: 'references', - 17: 'microBuffer', - 18: 'alias', - 19: 'aliases', - 20: 'int8', - 21: 'int16', - 22: 'uint16', - 23: 'int32', - 25: 'binary', - 26: 'binaryFixed', - 27: 'vector', - 28: 'json', - 29: 'jsonFixed', - 30: 'object', - 31: 'colVec', - 255: 'id', +export const ModifyInverse = { + 0: 'create', + 1: 'createRing', + 2: 'update', + 3: 'delete', + 4: 'upsert', + 5: 'insert', } as const /** - null, - timestamp, - number, - cardinality, - uint8, - uint32, - boolean, - enum, - string, - stringFixed, - text, - reference, - references, - microBuffer, - alias, - aliases, - int8, - int16, - uint16, - int32, - binary, - binaryFixed, - vector, - json, - jsonFixed, - object, - colVec, - id + create, + createRing, + update, + delete, + upsert, + insert */ -export type PropTypeEnum = (typeof PropType)[keyof typeof PropType] +export type ModifyEnum = (typeof Modify)[keyof typeof Modify] -export const RefOp = { - clear: 0, - del: 1, - end: 2, - set: 3, - setIndex: 4, - setTmp: 5, - setEdge: 6, - setIndexTmp: 7, - setEdgeIndex: 8, - setEdgeIndexTmp: 9, - setEdgeTmp: 10, -} as const +export type ModifyHeader = { + opId: number + opType: OpTypeEnum + schema: number + count: number +} -export const RefOpInverse = { - 0: 'clear', - 1: 'del', - 2: 'end', - 3: 'set', - 4: 'setIndex', - 5: 'setTmp', - 6: 'setEdge', - 7: 'setIndexTmp', - 8: 'setEdgeIndex', - 9: 'setEdgeIndexTmp', - 10: 'setEdgeTmp', -} as const +export const ModifyHeaderByteSize = 17 -/** - clear, - del, - end, - set, - setIndex, - setTmp, - setEdge, - setIndexTmp, - setEdgeIndex, - setEdgeIndexTmp, - setEdgeTmp - */ -export type RefOpEnum = (typeof RefOp)[keyof typeof RefOp] +export const ModifyHeaderAlignOf = 16 -export const ReadOp = { - none: 0, - id: 255, - edge: 252, - references: 253, - reference: 254, - aggregation: 250, - meta: 249, -} as const +export const writeModifyHeader = ( + buf: Uint8Array, + header: ModifyHeader, + offset: number, +): number => { + writeUint32(buf, Number(header.opId), offset) + offset += 4 + buf[offset] = Number(header.opType) + offset += 1 + writeUint64(buf, header.schema, offset) + offset += 8 + writeUint32(buf, Number(header.count), offset) + offset += 4 + return offset +} -export const ReadOpInverse = { - 0: 'none', - 255: 'id', - 252: 'edge', - 253: 'references', - 254: 'reference', - 250: 'aggregation', - 249: 'meta', -} as const +export const writeModifyHeaderProps = { + opId: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset) + }, + opType: (buf: Uint8Array, value: OpTypeEnum, offset: number) => { + buf[offset + 4] = Number(value) + }, + schema: (buf: Uint8Array, value: number, offset: number) => { + writeUint64(buf, value, offset + 5) + }, + count: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 13) + }, +} -/** - none, - id, - edge, - references, - reference, - aggregation, - meta - */ -export type ReadOpEnum = (typeof ReadOp)[keyof typeof ReadOp] +export const readModifyHeader = ( + buf: Uint8Array, + offset: number, +): ModifyHeader => { + const value: ModifyHeader = { + opId: readUint32(buf, offset), + opType: (buf[offset + 4]) as OpTypeEnum, + schema: readUint64(buf, offset + 5), + count: readUint32(buf, offset + 13), + } + return value +} -export const ReferencesSelect = { - index: 1, - any: 2, - all: 3, -} as const +export const readModifyHeaderProps = { + opId: (buf: Uint8Array, offset: number) => readUint32(buf, offset), + opType: (buf: Uint8Array, offset: number) => (buf[offset + 4]) as OpTypeEnum, + schema: (buf: Uint8Array, offset: number) => readUint64(buf, offset + 5), + count: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 13), +} -export const ReferencesSelectInverse = { - 1: 'index', - 2: 'any', - 3: 'all', -} as const +export const createModifyHeader = (header: ModifyHeader): Uint8Array => { + const buffer = new Uint8Array(ModifyHeaderByteSize) + writeModifyHeader(buffer, header, 0) + return buffer +} -/** - index, - any, - all - */ -export type ReferencesSelectEnum = (typeof ReferencesSelect)[keyof typeof ReferencesSelect] +export const pushModifyHeader = ( + buf: AutoSizedUint8Array, + header: ModifyHeader, +): number => { + const index = buf.length + buf.pushUint32(Number(header.opId)) + buf.pushUint8(Number(header.opType)) + buf.pushUint64(header.schema) + buf.pushUint32(Number(header.count)) + return index +} + +export type ModifyUpdateHeader = { + op: ModifyEnum + type: TypeId + isTmp: boolean + id: number + size: number +} -export const RefEdgeOp = { - noEdgeNoIndexRealId: 0, - edgeNoIndexRealId: 1, - edgeIndexRealId: 2, - noEdgeIndexRealId: 3, - noEdgeNoIndexTmpId: 4, - edgeNoIndexTmpId: 5, - edgeIndexTmpId: 6, - noEdgeIndexTmpId: 7, -} as const +export const ModifyUpdateHeaderByteSize = 12 -export const RefEdgeOpInverse = { - 0: 'noEdgeNoIndexRealId', - 1: 'edgeNoIndexRealId', - 2: 'edgeIndexRealId', - 3: 'noEdgeIndexRealId', - 4: 'noEdgeNoIndexTmpId', - 5: 'edgeNoIndexTmpId', - 6: 'edgeIndexTmpId', - 7: 'noEdgeIndexTmpId', -} as const +export const ModifyUpdateHeaderAlignOf = 16 -/** - noEdgeNoIndexRealId, - edgeNoIndexRealId, - edgeIndexRealId, - noEdgeIndexRealId, - noEdgeNoIndexTmpId, - edgeNoIndexTmpId, - edgeIndexTmpId, - noEdgeIndexTmpId - */ -// this needs number because it has a any (_) condition -export type RefEdgeOpEnum = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | (number & {}) +export const writeModifyUpdateHeader = ( + buf: Uint8Array, + header: ModifyUpdateHeader, + offset: number, +): number => { + buf[offset] = Number(header.op) + offset += 1 + writeUint16(buf, Number(header.type), offset) + offset += 2 + buf[offset] = 0 + buf[offset] |= (((header.isTmp ? 1 : 0) >>> 0) & 1) << 0 + buf[offset] |= ((0 >>> 0) & 127) << 1 + offset += 1 + writeUint32(buf, Number(header.id), offset) + offset += 4 + writeUint32(buf, Number(header.size), offset) + offset += 4 + return offset +} -export const LangCode = { - none: 0, - aa: 1, - ab: 2, - af: 3, - ak: 4, - sq: 5, - am: 6, - ar: 7, - an: 8, - hy: 9, - as: 10, - av: 11, - ae: 12, - ay: 13, - az: 14, - eu: 15, - be: 16, - bn: 17, - bi: 18, - bs: 19, - br: 20, - bg: 21, - my: 22, - ca: 23, - km: 24, +export const writeModifyUpdateHeaderProps = { + op: (buf: Uint8Array, value: ModifyEnum, offset: number) => { + buf[offset] = Number(value) + }, + type: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 1) + }, + isTmp: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 3] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + }, + id: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 4) + }, + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 8) + }, +} + +export const readModifyUpdateHeader = ( + buf: Uint8Array, + offset: number, +): ModifyUpdateHeader => { + const value: ModifyUpdateHeader = { + op: (buf[offset]) as ModifyEnum, + type: (readUint16(buf, offset + 1)) as TypeId, + isTmp: (((buf[offset + 3] >>> 0) & 1)) === 1, + id: readUint32(buf, offset + 4), + size: readUint32(buf, offset + 8), + } + return value +} + +export const readModifyUpdateHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as ModifyEnum, + type: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 1)) as TypeId, + isTmp: (buf: Uint8Array, offset: number) => (((buf[offset + 3] >>> 0) & 1)) === 1, + id: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 4), + size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 8), +} + +export const createModifyUpdateHeader = (header: ModifyUpdateHeader): Uint8Array => { + const buffer = new Uint8Array(ModifyUpdateHeaderByteSize) + writeModifyUpdateHeader(buffer, header, 0) + return buffer +} + +export const pushModifyUpdateHeader = ( + buf: AutoSizedUint8Array, + header: ModifyUpdateHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint16(Number(header.type)) + buf.pushUint8(0) + buf.view[buf.length - 1] |= (((header.isTmp ? 1 : 0) >>> 0) & 1) << 0 + buf.view[buf.length - 1] |= ((0 >>> 0) & 127) << 1 + buf.pushUint32(Number(header.id)) + buf.pushUint32(Number(header.size)) + return index +} + +export type ModifyDeleteHeader = { + op: ModifyEnum + type: TypeId + isTmp: boolean + id: number +} + +export const ModifyDeleteHeaderByteSize = 8 + +export const ModifyDeleteHeaderAlignOf = 8 + +export const writeModifyDeleteHeader = ( + buf: Uint8Array, + header: ModifyDeleteHeader, + offset: number, +): number => { + buf[offset] = Number(header.op) + offset += 1 + writeUint16(buf, Number(header.type), offset) + offset += 2 + buf[offset] = 0 + buf[offset] |= (((header.isTmp ? 1 : 0) >>> 0) & 1) << 0 + buf[offset] |= ((0 >>> 0) & 127) << 1 + offset += 1 + writeUint32(buf, Number(header.id), offset) + offset += 4 + return offset +} + +export const writeModifyDeleteHeaderProps = { + op: (buf: Uint8Array, value: ModifyEnum, offset: number) => { + buf[offset] = Number(value) + }, + type: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 1) + }, + isTmp: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 3] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + }, + id: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 4) + }, +} + +export const readModifyDeleteHeader = ( + buf: Uint8Array, + offset: number, +): ModifyDeleteHeader => { + const value: ModifyDeleteHeader = { + op: (buf[offset]) as ModifyEnum, + type: (readUint16(buf, offset + 1)) as TypeId, + isTmp: (((buf[offset + 3] >>> 0) & 1)) === 1, + id: readUint32(buf, offset + 4), + } + return value +} + +export const readModifyDeleteHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as ModifyEnum, + type: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 1)) as TypeId, + isTmp: (buf: Uint8Array, offset: number) => (((buf[offset + 3] >>> 0) & 1)) === 1, + id: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 4), +} + +export const createModifyDeleteHeader = (header: ModifyDeleteHeader): Uint8Array => { + const buffer = new Uint8Array(ModifyDeleteHeaderByteSize) + writeModifyDeleteHeader(buffer, header, 0) + return buffer +} + +export const pushModifyDeleteHeader = ( + buf: AutoSizedUint8Array, + header: ModifyDeleteHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint16(Number(header.type)) + buf.pushUint8(0) + buf.view[buf.length - 1] |= (((header.isTmp ? 1 : 0) >>> 0) & 1) << 0 + buf.view[buf.length - 1] |= ((0 >>> 0) & 127) << 1 + buf.pushUint32(Number(header.id)) + return index +} + +export type ModifyCreateHeader = { + op: ModifyEnum + type: TypeId + size: number +} + +export const ModifyCreateHeaderByteSize = 7 + +export const ModifyCreateHeaderAlignOf = 8 + +export const writeModifyCreateHeader = ( + buf: Uint8Array, + header: ModifyCreateHeader, + offset: number, +): number => { + buf[offset] = Number(header.op) + offset += 1 + writeUint16(buf, Number(header.type), offset) + offset += 2 + writeUint32(buf, Number(header.size), offset) + offset += 4 + return offset +} + +export const writeModifyCreateHeaderProps = { + op: (buf: Uint8Array, value: ModifyEnum, offset: number) => { + buf[offset] = Number(value) + }, + type: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 1) + }, + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 3) + }, +} + +export const readModifyCreateHeader = ( + buf: Uint8Array, + offset: number, +): ModifyCreateHeader => { + const value: ModifyCreateHeader = { + op: (buf[offset]) as ModifyEnum, + type: (readUint16(buf, offset + 1)) as TypeId, + size: readUint32(buf, offset + 3), + } + return value +} + +export const readModifyCreateHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as ModifyEnum, + type: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 1)) as TypeId, + size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 3), +} + +export const createModifyCreateHeader = (header: ModifyCreateHeader): Uint8Array => { + const buffer = new Uint8Array(ModifyCreateHeaderByteSize) + writeModifyCreateHeader(buffer, header, 0) + return buffer +} + +export const pushModifyCreateHeader = ( + buf: AutoSizedUint8Array, + header: ModifyCreateHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint16(Number(header.type)) + buf.pushUint32(Number(header.size)) + return index +} + +export type ModifyCreateRingHeader = { + op: ModifyEnum + type: TypeId + maxNodeId: number + size: number +} + +export const ModifyCreateRingHeaderByteSize = 11 + +export const ModifyCreateRingHeaderAlignOf = 16 + +export const writeModifyCreateRingHeader = ( + buf: Uint8Array, + header: ModifyCreateRingHeader, + offset: number, +): number => { + buf[offset] = Number(header.op) + offset += 1 + writeUint16(buf, Number(header.type), offset) + offset += 2 + writeUint32(buf, Number(header.maxNodeId), offset) + offset += 4 + writeUint32(buf, Number(header.size), offset) + offset += 4 + return offset +} + +export const writeModifyCreateRingHeaderProps = { + op: (buf: Uint8Array, value: ModifyEnum, offset: number) => { + buf[offset] = Number(value) + }, + type: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 1) + }, + maxNodeId: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 3) + }, + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 7) + }, +} + +export const readModifyCreateRingHeader = ( + buf: Uint8Array, + offset: number, +): ModifyCreateRingHeader => { + const value: ModifyCreateRingHeader = { + op: (buf[offset]) as ModifyEnum, + type: (readUint16(buf, offset + 1)) as TypeId, + maxNodeId: readUint32(buf, offset + 3), + size: readUint32(buf, offset + 7), + } + return value +} + +export const readModifyCreateRingHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as ModifyEnum, + type: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 1)) as TypeId, + maxNodeId: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 3), + size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 7), +} + +export const createModifyCreateRingHeader = (header: ModifyCreateRingHeader): Uint8Array => { + const buffer = new Uint8Array(ModifyCreateRingHeaderByteSize) + writeModifyCreateRingHeader(buffer, header, 0) + return buffer +} + +export const pushModifyCreateRingHeader = ( + buf: AutoSizedUint8Array, + header: ModifyCreateRingHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint16(Number(header.type)) + buf.pushUint32(Number(header.maxNodeId)) + buf.pushUint32(Number(header.size)) + return index +} + +export type ModifyMainHeader = { + id: number + type: PropTypeEnum + increment: boolean + incrementPositive: boolean + expire: boolean + size: number + start: number +} + +export const ModifyMainHeaderByteSize = 6 + +export const ModifyMainHeaderAlignOf = 8 + +export const writeModifyMainHeader = ( + buf: Uint8Array, + header: ModifyMainHeader, + offset: number, +): number => { + buf[offset] = Number(header.id) + offset += 1 + buf[offset] = Number(header.type) + offset += 1 + buf[offset] = 0 + buf[offset] |= (((header.increment ? 1 : 0) >>> 0) & 1) << 0 + buf[offset] |= (((header.incrementPositive ? 1 : 0) >>> 0) & 1) << 1 + buf[offset] |= (((header.expire ? 1 : 0) >>> 0) & 1) << 2 + buf[offset] |= ((0 >>> 0) & 31) << 3 + offset += 1 + buf[offset] = Number(header.size) + offset += 1 + writeUint16(buf, Number(header.start), offset) + offset += 2 + return offset +} + +export const writeModifyMainHeaderProps = { + id: (buf: Uint8Array, value: number, offset: number) => { + buf[offset] = Number(value) + }, + type: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { + buf[offset + 1] = Number(value) + }, + increment: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 2] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + }, + incrementPositive: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 2] |= (((value ? 1 : 0) >>> 0) & 1) << 1 + }, + expire: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 2] |= (((value ? 1 : 0) >>> 0) & 1) << 2 + }, + size: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 3] = Number(value) + }, + start: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 4) + }, +} + +export const readModifyMainHeader = ( + buf: Uint8Array, + offset: number, +): ModifyMainHeader => { + const value: ModifyMainHeader = { + id: buf[offset], + type: (buf[offset + 1]) as PropTypeEnum, + increment: (((buf[offset + 2] >>> 0) & 1)) === 1, + incrementPositive: (((buf[offset + 2] >>> 1) & 1)) === 1, + expire: (((buf[offset + 2] >>> 2) & 1)) === 1, + size: buf[offset + 3], + start: readUint16(buf, offset + 4), + } + return value +} + +export const readModifyMainHeaderProps = { + id: (buf: Uint8Array, offset: number) => buf[offset], + type: (buf: Uint8Array, offset: number) => (buf[offset + 1]) as PropTypeEnum, + increment: (buf: Uint8Array, offset: number) => (((buf[offset + 2] >>> 0) & 1)) === 1, + incrementPositive: (buf: Uint8Array, offset: number) => (((buf[offset + 2] >>> 1) & 1)) === 1, + expire: (buf: Uint8Array, offset: number) => (((buf[offset + 2] >>> 2) & 1)) === 1, + size: (buf: Uint8Array, offset: number) => buf[offset + 3], + start: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 4), +} + +export const createModifyMainHeader = (header: ModifyMainHeader): Uint8Array => { + const buffer = new Uint8Array(ModifyMainHeaderByteSize) + writeModifyMainHeader(buffer, header, 0) + return buffer +} + +export const pushModifyMainHeader = ( + buf: AutoSizedUint8Array, + header: ModifyMainHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.id)) + buf.pushUint8(Number(header.type)) + buf.pushUint8(0) + buf.view[buf.length - 1] |= (((header.increment ? 1 : 0) >>> 0) & 1) << 0 + buf.view[buf.length - 1] |= (((header.incrementPositive ? 1 : 0) >>> 0) & 1) << 1 + buf.view[buf.length - 1] |= (((header.expire ? 1 : 0) >>> 0) & 1) << 2 + buf.view[buf.length - 1] |= ((0 >>> 0) & 31) << 3 + buf.pushUint8(Number(header.size)) + buf.pushUint16(Number(header.start)) + return index +} + +export type ModifyPropHeader = { + id: number + type: PropTypeEnum + size: number +} + +export const ModifyPropHeaderByteSize = 6 + +export const ModifyPropHeaderAlignOf = 8 + +export const writeModifyPropHeader = ( + buf: Uint8Array, + header: ModifyPropHeader, + offset: number, +): number => { + buf[offset] = Number(header.id) + offset += 1 + buf[offset] = Number(header.type) + offset += 1 + writeUint32(buf, Number(header.size), offset) + offset += 4 + return offset +} + +export const writeModifyPropHeaderProps = { + id: (buf: Uint8Array, value: number, offset: number) => { + buf[offset] = Number(value) + }, + type: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { + buf[offset + 1] = Number(value) + }, + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 2) + }, +} + +export const readModifyPropHeader = ( + buf: Uint8Array, + offset: number, +): ModifyPropHeader => { + const value: ModifyPropHeader = { + id: buf[offset], + type: (buf[offset + 1]) as PropTypeEnum, + size: readUint32(buf, offset + 2), + } + return value +} + +export const readModifyPropHeaderProps = { + id: (buf: Uint8Array, offset: number) => buf[offset], + type: (buf: Uint8Array, offset: number) => (buf[offset + 1]) as PropTypeEnum, + size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 2), +} + +export const createModifyPropHeader = (header: ModifyPropHeader): Uint8Array => { + const buffer = new Uint8Array(ModifyPropHeaderByteSize) + writeModifyPropHeader(buffer, header, 0) + return buffer +} + +export const pushModifyPropHeader = ( + buf: AutoSizedUint8Array, + header: ModifyPropHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.id)) + buf.pushUint8(Number(header.type)) + buf.pushUint32(Number(header.size)) + return index +} + +export const ModifyReferences = { + clear: 0, + ids: 1, + idsWithMeta: 2, + tmpIds: 3, + delIds: 4, + delTmpIds: 5, +} as const + +export const ModifyReferencesInverse = { + 0: 'clear', + 1: 'ids', + 2: 'idsWithMeta', + 3: 'tmpIds', + 4: 'delIds', + 5: 'delTmpIds', +} as const + +/** + clear, + ids, + idsWithMeta, + tmpIds, + delIds, + delTmpIds + */ +export type ModifyReferencesEnum = (typeof ModifyReferences)[keyof typeof ModifyReferences] + +export type ModifyReferencesHeader = { + op: ModifyReferencesEnum + size: number +} + +export const ModifyReferencesHeaderByteSize = 5 + +export const ModifyReferencesHeaderAlignOf = 8 + +export const writeModifyReferencesHeader = ( + buf: Uint8Array, + header: ModifyReferencesHeader, + offset: number, +): number => { + buf[offset] = Number(header.op) + offset += 1 + writeUint32(buf, Number(header.size), offset) + offset += 4 + return offset +} + +export const writeModifyReferencesHeaderProps = { + op: (buf: Uint8Array, value: ModifyReferencesEnum, offset: number) => { + buf[offset] = Number(value) + }, + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 1) + }, +} + +export const readModifyReferencesHeader = ( + buf: Uint8Array, + offset: number, +): ModifyReferencesHeader => { + const value: ModifyReferencesHeader = { + op: (buf[offset]) as ModifyReferencesEnum, + size: readUint32(buf, offset + 1), + } + return value +} + +export const readModifyReferencesHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as ModifyReferencesEnum, + size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 1), +} + +export const createModifyReferencesHeader = (header: ModifyReferencesHeader): Uint8Array => { + const buffer = new Uint8Array(ModifyReferencesHeaderByteSize) + writeModifyReferencesHeader(buffer, header, 0) + return buffer +} + +export const pushModifyReferencesHeader = ( + buf: AutoSizedUint8Array, + header: ModifyReferencesHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint32(Number(header.size)) + return index +} + +export type ModifyReferencesMetaHeader = { + id: number + isTmp: boolean + withIndex: boolean + index: number + size: number +} + +export const ModifyReferencesMetaHeaderByteSize = 13 + +export const ModifyReferencesMetaHeaderAlignOf = 16 + +export const writeModifyReferencesMetaHeader = ( + buf: Uint8Array, + header: ModifyReferencesMetaHeader, + offset: number, +): number => { + writeUint32(buf, Number(header.id), offset) + offset += 4 + buf[offset] = 0 + buf[offset] |= (((header.isTmp ? 1 : 0) >>> 0) & 1) << 0 + buf[offset] |= (((header.withIndex ? 1 : 0) >>> 0) & 1) << 1 + buf[offset] |= ((0 >>> 0) & 63) << 2 + offset += 1 + writeUint32(buf, Number(header.index), offset) + offset += 4 + writeUint32(buf, Number(header.size), offset) + offset += 4 + return offset +} + +export const writeModifyReferencesMetaHeaderProps = { + id: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset) + }, + isTmp: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 4] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + }, + withIndex: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 4] |= (((value ? 1 : 0) >>> 0) & 1) << 1 + }, + index: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 5) + }, + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 9) + }, +} + +export const readModifyReferencesMetaHeader = ( + buf: Uint8Array, + offset: number, +): ModifyReferencesMetaHeader => { + const value: ModifyReferencesMetaHeader = { + id: readUint32(buf, offset), + isTmp: (((buf[offset + 4] >>> 0) & 1)) === 1, + withIndex: (((buf[offset + 4] >>> 1) & 1)) === 1, + index: readUint32(buf, offset + 5), + size: readUint32(buf, offset + 9), + } + return value +} + +export const readModifyReferencesMetaHeaderProps = { + id: (buf: Uint8Array, offset: number) => readUint32(buf, offset), + isTmp: (buf: Uint8Array, offset: number) => (((buf[offset + 4] >>> 0) & 1)) === 1, + withIndex: (buf: Uint8Array, offset: number) => (((buf[offset + 4] >>> 1) & 1)) === 1, + index: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 5), + size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 9), +} + +export const createModifyReferencesMetaHeader = (header: ModifyReferencesMetaHeader): Uint8Array => { + const buffer = new Uint8Array(ModifyReferencesMetaHeaderByteSize) + writeModifyReferencesMetaHeader(buffer, header, 0) + return buffer +} + +export const pushModifyReferencesMetaHeader = ( + buf: AutoSizedUint8Array, + header: ModifyReferencesMetaHeader, +): number => { + const index = buf.length + buf.pushUint32(Number(header.id)) + buf.pushUint8(0) + buf.view[buf.length - 1] |= (((header.isTmp ? 1 : 0) >>> 0) & 1) << 0 + buf.view[buf.length - 1] |= (((header.withIndex ? 1 : 0) >>> 0) & 1) << 1 + buf.view[buf.length - 1] |= ((0 >>> 0) & 63) << 2 + buf.pushUint32(Number(header.index)) + buf.pushUint32(Number(header.size)) + return index +} + +export type ModifyReferenceMetaHeader = { + id: number + isTmp: boolean + size: number +} + +export const ModifyReferenceMetaHeaderByteSize = 9 + +export const ModifyReferenceMetaHeaderAlignOf = 16 + +export const writeModifyReferenceMetaHeader = ( + buf: Uint8Array, + header: ModifyReferenceMetaHeader, + offset: number, +): number => { + writeUint32(buf, Number(header.id), offset) + offset += 4 + buf[offset] = 0 + buf[offset] |= (((header.isTmp ? 1 : 0) >>> 0) & 1) << 0 + buf[offset] |= ((0 >>> 0) & 127) << 1 + offset += 1 + writeUint32(buf, Number(header.size), offset) + offset += 4 + return offset +} + +export const writeModifyReferenceMetaHeaderProps = { + id: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset) + }, + isTmp: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 4] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + }, + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 5) + }, +} + +export const readModifyReferenceMetaHeader = ( + buf: Uint8Array, + offset: number, +): ModifyReferenceMetaHeader => { + const value: ModifyReferenceMetaHeader = { + id: readUint32(buf, offset), + isTmp: (((buf[offset + 4] >>> 0) & 1)) === 1, + size: readUint32(buf, offset + 5), + } + return value +} + +export const readModifyReferenceMetaHeaderProps = { + id: (buf: Uint8Array, offset: number) => readUint32(buf, offset), + isTmp: (buf: Uint8Array, offset: number) => (((buf[offset + 4] >>> 0) & 1)) === 1, + size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 5), +} + +export const createModifyReferenceMetaHeader = (header: ModifyReferenceMetaHeader): Uint8Array => { + const buffer = new Uint8Array(ModifyReferenceMetaHeaderByteSize) + writeModifyReferenceMetaHeader(buffer, header, 0) + return buffer +} + +export const pushModifyReferenceMetaHeader = ( + buf: AutoSizedUint8Array, + header: ModifyReferenceMetaHeader, +): number => { + const index = buf.length + buf.pushUint32(Number(header.id)) + buf.pushUint8(0) + buf.view[buf.length - 1] |= (((header.isTmp ? 1 : 0) >>> 0) & 1) << 0 + buf.view[buf.length - 1] |= ((0 >>> 0) & 127) << 1 + buf.pushUint32(Number(header.size)) + return index +} + +export type ModifyCardinalityHeader = { + sparse: boolean + precision: number +} + +export const ModifyCardinalityHeaderByteSize = 2 + +export const ModifyCardinalityHeaderAlignOf = 2 + +export const packModifyCardinalityHeader = (obj: ModifyCardinalityHeader): number => { + let val = 0 + val |= ((obj.sparse ? 1 : 0) & 1) << 0 + val |= (Number(obj.precision) & 255) << 8 + return val +} + +export const unpackModifyCardinalityHeader = (val: number): ModifyCardinalityHeader => { + return { + sparse: ((val >>> 0) & 1) === 1, + precision: Number((val >>> 8) & 255), + } +} + +export const writeModifyCardinalityHeader = ( + buf: Uint8Array, + header: ModifyCardinalityHeader, + offset: number, +): number => { + buf[offset] = 0 + buf[offset] |= (((header.sparse ? 1 : 0) >>> 0) & 1) << 0 + buf[offset] |= ((0 >>> 0) & 127) << 1 + offset += 1 + buf[offset] = Number(header.precision) + offset += 1 + return offset +} + +export const writeModifyCardinalityHeaderProps = { + sparse: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + }, + precision: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 1] = Number(value) + }, +} + +export const readModifyCardinalityHeader = ( + buf: Uint8Array, + offset: number, +): ModifyCardinalityHeader => { + const value: ModifyCardinalityHeader = { + sparse: (((buf[offset] >>> 0) & 1)) === 1, + precision: buf[offset + 1], + } + return value +} + +export const readModifyCardinalityHeaderProps = { + sparse: (buf: Uint8Array, offset: number) => (((buf[offset] >>> 0) & 1)) === 1, + precision: (buf: Uint8Array, offset: number) => buf[offset + 1], +} + +export const createModifyCardinalityHeader = (header: ModifyCardinalityHeader): Uint8Array => { + const buffer = new Uint8Array(ModifyCardinalityHeaderByteSize) + writeModifyCardinalityHeader(buffer, header, 0) + return buffer +} + +export const pushModifyCardinalityHeader = ( + buf: AutoSizedUint8Array, + header: ModifyCardinalityHeader, +): number => { + const index = buf.length + buf.pushUint8(0) + buf.view[buf.length - 1] |= (((header.sparse ? 1 : 0) >>> 0) & 1) << 0 + buf.view[buf.length - 1] |= ((0 >>> 0) & 127) << 1 + buf.pushUint8(Number(header.precision)) + return index +} + +export type ModifyResultItem = { + id: number + err: ModifyErrorEnum +} + +export const ModifyResultItemByteSize = 5 + +export const ModifyResultItemAlignOf = 8 + +export const writeModifyResultItem = ( + buf: Uint8Array, + header: ModifyResultItem, + offset: number, +): number => { + writeUint32(buf, Number(header.id), offset) + offset += 4 + buf[offset] = Number(header.err) + offset += 1 + return offset +} + +export const writeModifyResultItemProps = { + id: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset) + }, + err: (buf: Uint8Array, value: ModifyErrorEnum, offset: number) => { + buf[offset + 4] = Number(value) + }, +} + +export const readModifyResultItem = ( + buf: Uint8Array, + offset: number, +): ModifyResultItem => { + const value: ModifyResultItem = { + id: readUint32(buf, offset), + err: (buf[offset + 4]) as ModifyErrorEnum, + } + return value +} + +export const readModifyResultItemProps = { + id: (buf: Uint8Array, offset: number) => readUint32(buf, offset), + err: (buf: Uint8Array, offset: number) => (buf[offset + 4]) as ModifyErrorEnum, +} + +export const createModifyResultItem = (header: ModifyResultItem): Uint8Array => { + const buffer = new Uint8Array(ModifyResultItemByteSize) + writeModifyResultItem(buffer, header, 0) + return buffer +} + +export const pushModifyResultItem = ( + buf: AutoSizedUint8Array, + header: ModifyResultItem, +): number => { + const index = buf.length + buf.pushUint32(Number(header.id)) + buf.pushUint8(Number(header.err)) + return index +} + +export const ModifyError = { + null: 0, + nx: 1, + unknown: 2, +} as const + +export const ModifyErrorInverse = { + 0: 'null', + 1: 'nx', + 2: 'unknown', +} as const + +/** + null, + nx, + unknown + */ +export type ModifyErrorEnum = (typeof ModifyError)[keyof typeof ModifyError] + +export const PropType = { + null: 0, + timestamp: 1, + number: 4, + cardinality: 5, + uint8: 6, + uint32: 7, + boolean: 9, + enum: 10, + string: 11, + stringFixed: 12, + text: 13, + reference: 15, + references: 16, + microBuffer: 17, + alias: 18, + aliases: 19, + int8: 20, + int16: 21, + uint16: 22, + int32: 23, + binary: 25, + binaryFixed: 26, + vector: 27, + json: 28, + jsonFixed: 29, + object: 30, + colVec: 31, + id: 255, +} as const + +export const PropTypeInverse = { + 0: 'null', + 1: 'timestamp', + 4: 'number', + 5: 'cardinality', + 6: 'uint8', + 7: 'uint32', + 9: 'boolean', + 10: 'enum', + 11: 'string', + 12: 'stringFixed', + 13: 'text', + 15: 'reference', + 16: 'references', + 17: 'microBuffer', + 18: 'alias', + 19: 'aliases', + 20: 'int8', + 21: 'int16', + 22: 'uint16', + 23: 'int32', + 25: 'binary', + 26: 'binaryFixed', + 27: 'vector', + 28: 'json', + 29: 'jsonFixed', + 30: 'object', + 31: 'colVec', + 255: 'id', +} as const + +/** + null, + timestamp, + number, + cardinality, + uint8, + uint32, + boolean, + enum, + string, + stringFixed, + text, + reference, + references, + microBuffer, + alias, + aliases, + int8, + int16, + uint16, + int32, + binary, + binaryFixed, + vector, + json, + jsonFixed, + object, + colVec, + id + */ +export type PropTypeEnum = (typeof PropType)[keyof typeof PropType] + +export const PropTypeSelva = { + null: 0, + microBuffer: 1, + string: 2, + text: 3, + reference: 4, + references: 5, + alias: 8, + aliases: 9, + colVec: 10, +} as const + +export const PropTypeSelvaInverse = { + 0: 'null', + 1: 'microBuffer', + 2: 'string', + 3: 'text', + 4: 'reference', + 5: 'references', + 8: 'alias', + 9: 'aliases', + 10: 'colVec', +} as const + +/** + null, + microBuffer, + string, + text, + reference, + references, + alias, + aliases, + colVec + */ +export type PropTypeSelvaEnum = (typeof PropTypeSelva)[keyof typeof PropTypeSelva] + +export const RefOp = { + clear: 0, + del: 1, + end: ModOp.end, + set: 3, + setEdge: 4, +} as const + +export const RefOpInverse = { + 0: 'clear', + 1: 'del', + [ModOp.end]: 'end', + 3: 'set', + 4: 'setEdge', +} as const + +/** + clear, + del, + end, + set, + setEdge + */ +export type RefOpEnum = (typeof RefOp)[keyof typeof RefOp] + +export const ReadOp = { + none: 0, + id: 255, + edge: 252, + references: 253, + reference: 254, + aggregation: 250, + meta: 249, +} as const + +export const ReadOpInverse = { + 0: 'none', + 255: 'id', + 252: 'edge', + 253: 'references', + 254: 'reference', + 250: 'aggregation', + 249: 'meta', +} as const + +/** + none, + id, + edge, + references, + reference, + aggregation, + meta + */ +export type ReadOpEnum = (typeof ReadOp)[keyof typeof ReadOp] + +export const ReferencesSelect = { + index: 1, + any: 2, + all: 3, +} as const + +export const ReferencesSelectInverse = { + 1: 'index', + 2: 'any', + 3: 'all', +} as const + +/** + index, + any, + all + */ +export type ReferencesSelectEnum = (typeof ReferencesSelect)[keyof typeof ReferencesSelect] + +export const RefEdgeOp = { + noEdgeNoIndexRealId: 0, + edgeNoIndexRealId: 1, + edgeIndexRealId: 2, + noEdgeIndexRealId: 3, + noEdgeNoIndexTmpId: 4, + edgeNoIndexTmpId: 5, + edgeIndexTmpId: 6, + noEdgeIndexTmpId: 7, +} as const + +export const RefEdgeOpInverse = { + 0: 'noEdgeNoIndexRealId', + 1: 'edgeNoIndexRealId', + 2: 'edgeIndexRealId', + 3: 'noEdgeIndexRealId', + 4: 'noEdgeNoIndexTmpId', + 5: 'edgeNoIndexTmpId', + 6: 'edgeIndexTmpId', + 7: 'noEdgeIndexTmpId', +} as const + +/** + noEdgeNoIndexRealId, + edgeNoIndexRealId, + edgeIndexRealId, + noEdgeIndexRealId, + noEdgeNoIndexTmpId, + edgeNoIndexTmpId, + edgeIndexTmpId, + noEdgeIndexTmpId + */ +// this needs number because it has a any (_) condition +export type RefEdgeOpEnum = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | (number & {}) + +export const LangCode = { + none: 0, + aa: 1, + ab: 2, + af: 3, + ak: 4, + sq: 5, + am: 6, + ar: 7, + an: 8, + hy: 9, + as: 10, + av: 11, + ae: 12, + ay: 13, + az: 14, + eu: 15, + be: 16, + bn: 17, + bi: 18, + bs: 19, + br: 20, + bg: 21, + my: 22, + ca: 23, + km: 24, ce: 25, zh: 26, cv: 27, @@ -859,2553 +1917,3035 @@ export const LangCodeInverse = { ka, cnr */ -export type LangCodeEnum = (typeof LangCode)[keyof typeof LangCode] +export type LangCodeEnum = (typeof LangCode)[keyof typeof LangCode] + +export const MAIN_PROP = 0 +export const ID_PROP = 255 + +export const ResultType = { + default: 0, + references: 1, + reference: 2, + edge: 3, + referencesEdge: 4, + referenceEdge: 5, + aggregate: 6, + meta: 7, + metaEdge: 8, + fixed: 9, + edgeFixed: 10, +} as const + +export const ResultTypeInverse = { + 0: 'default', + 1: 'references', + 2: 'reference', + 3: 'edge', + 4: 'referencesEdge', + 5: 'referenceEdge', + 6: 'aggregate', + 7: 'meta', + 8: 'metaEdge', + 9: 'fixed', + 10: 'edgeFixed', +} as const + +/** + default, + references, + reference, + edge, + referencesEdge, + referenceEdge, + aggregate, + meta, + metaEdge, + fixed, + edgeFixed + */ +export type ResultTypeEnum = (typeof ResultType)[keyof typeof ResultType] + +export const AggFunction = { + none: 0, + avg: 1, + cardinality: 2, + concat: 3, + count: 4, + max: 5, + min: 6, + mode: 7, + percentile: 8, + rank: 9, + stddev: 10, + sum: 11, + variance: 12, + hmean: 13, +} as const + +export const AggFunctionInverse = { + 0: 'none', + 1: 'avg', + 2: 'cardinality', + 3: 'concat', + 4: 'count', + 5: 'max', + 6: 'min', + 7: 'mode', + 8: 'percentile', + 9: 'rank', + 10: 'stddev', + 11: 'sum', + 12: 'variance', + 13: 'hmean', +} as const + +/** + none, + avg, + cardinality, + concat, + count, + max, + min, + mode, + percentile, + rank, + stddev, + sum, + variance, + hmean + */ +export type AggFunctionEnum = (typeof AggFunction)[keyof typeof AggFunction] + +export const Compression = { + none: 0, + compressed: 1, +} as const + +export const CompressionInverse = { + 0: 'none', + 1: 'compressed', +} as const + +/** + none, + compressed + */ +export type CompressionEnum = (typeof Compression)[keyof typeof Compression] + +export const Interval = { + none: 0, + epoch: 1, + hour: 2, + minute: 3, + second: 4, + microseconds: 5, + day: 6, + doy: 7, + dow: 8, + isoDOW: 9, + week: 10, + month: 11, + isoMonth: 12, + quarter: 13, + year: 14, +} as const + +export const IntervalInverse = { + 0: 'none', + 1: 'epoch', + 2: 'hour', + 3: 'minute', + 4: 'second', + 5: 'microseconds', + 6: 'day', + 7: 'doy', + 8: 'dow', + 9: 'isoDOW', + 10: 'week', + 11: 'month', + 12: 'isoMonth', + 13: 'quarter', + 14: 'year', +} as const + +/** + none, + epoch, + hour, + minute, + second, + microseconds, + day, + doy, + dow, + isoDOW, + week, + month, + isoMonth, + quarter, + year + */ +export type IntervalEnum = (typeof Interval)[keyof typeof Interval] -export const MAIN_PROP = 0 -export const ID_PROP = 255 +export const Order = { + asc: 0, + desc: 1, +} as const -export const ResultType = { +export const OrderInverse = { + 0: 'asc', + 1: 'desc', +} as const + +/** + asc, + desc + */ +export type OrderEnum = (typeof Order)[keyof typeof Order] + +export type SortHeader = { + order: OrderEnum + prop: number + propType: PropTypeEnum + start: number + len: number + lang: LangCodeEnum + edgeType: number +} + +export const SortHeaderByteSize = 10 + +export const SortHeaderAlignOf = 16 + +export const writeSortHeader = ( + buf: Uint8Array, + header: SortHeader, + offset: number, +): number => { + buf[offset] = Number(header.order) + offset += 1 + buf[offset] = Number(header.prop) + offset += 1 + buf[offset] = Number(header.propType) + offset += 1 + writeUint16(buf, Number(header.start), offset) + offset += 2 + writeUint16(buf, Number(header.len), offset) + offset += 2 + buf[offset] = Number(header.lang) + offset += 1 + writeUint16(buf, Number(header.edgeType), offset) + offset += 2 + return offset +} + +export const writeSortHeaderProps = { + order: (buf: Uint8Array, value: OrderEnum, offset: number) => { + buf[offset] = Number(value) + }, + prop: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 1] = Number(value) + }, + propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { + buf[offset + 2] = Number(value) + }, + start: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 3) + }, + len: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 5) + }, + lang: (buf: Uint8Array, value: LangCodeEnum, offset: number) => { + buf[offset + 7] = Number(value) + }, + edgeType: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 8) + }, +} + +export const readSortHeader = ( + buf: Uint8Array, + offset: number, +): SortHeader => { + const value: SortHeader = { + order: (buf[offset]) as OrderEnum, + prop: buf[offset + 1], + propType: (buf[offset + 2]) as PropTypeEnum, + start: readUint16(buf, offset + 3), + len: readUint16(buf, offset + 5), + lang: (buf[offset + 7]) as LangCodeEnum, + edgeType: readUint16(buf, offset + 8), + } + return value +} + +export const readSortHeaderProps = { + order: (buf: Uint8Array, offset: number) => (buf[offset]) as OrderEnum, + prop: (buf: Uint8Array, offset: number) => buf[offset + 1], + propType: (buf: Uint8Array, offset: number) => (buf[offset + 2]) as PropTypeEnum, + start: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 3), + len: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 5), + lang: (buf: Uint8Array, offset: number) => (buf[offset + 7]) as LangCodeEnum, + edgeType: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 8), +} + +export const createSortHeader = (header: SortHeader): Uint8Array => { + const buffer = new Uint8Array(SortHeaderByteSize) + writeSortHeader(buffer, header, 0) + return buffer +} + +export const pushSortHeader = ( + buf: AutoSizedUint8Array, + header: SortHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.order)) + buf.pushUint8(Number(header.prop)) + buf.pushUint8(Number(header.propType)) + buf.pushUint16(Number(header.start)) + buf.pushUint16(Number(header.len)) + buf.pushUint8(Number(header.lang)) + buf.pushUint16(Number(header.edgeType)) + return index +} + +export const QUERY_ITERATOR_DEFAULT = 0 +export const QUERY_ITERATOR_EDGE = 20 +export const QUERY_ITERATOR_EDGE_INCLUDE = 30 +export const QUERY_ITERATOR_SEARCH = 120 +export const QUERY_ITERATOR_SEARCH_VEC = 130 +export const QUERY_ITERATOR_AGGREGATES = 140 +export const QueryIteratorType = { default: 0, - references: 1, - reference: 2, - edge: 3, - referencesEdge: 4, - referenceEdge: 5, - aggregate: 6, - meta: 7, - metaEdge: 8, - fixed: 9, - edgeFixed: 10, + sort: 1, + filter: 2, + filterSort: 3, + desc: 4, + descSort: 5, + descFilter: 6, + descFilterSort: 7, + edge: 20, + edgeSort: 21, + edgeFilter: 22, + edgeFilterSort: 23, + edgeDesc: 24, + edgeDescSort: 25, + edgeDescFilter: 26, + edgeDescFilterSort: 27, + edgeInclude: 30, + edgeIncludeSort: 31, + edgeIncludeFilter: 32, + edgeIncludeFilterSort: 33, + edgeIncludeDesc: 34, + edgeIncludeDescSort: 35, + edgeIncludeDescFilter: 36, + edgeIncludeDescFilterSort: 37, + edgeIncludeFilterOnEdge: 40, + edgeIncludeFilterOnEdgeDesc: 41, + edgeIncludeFilterOnEdgeSort: 42, + edgeIncludeFilterOnEdgeSortDesc: 43, + edgeFilterOnEdge: 60, + edgeFilterOnEdgeDesc: 61, + edgeFilterOnEdgeSort: 62, + edgeFilterOnEdgeSortDesc: 63, + edgeIncludeFilterAndFilterOnEdge: 70, + edgeIncludeFilterAndFilterOnEdgeDesc: 71, + edgeIncludeFilterAndFilterOnEdgeSort: 72, + edgeIncludeFilterAndFilterOnEdgeSortDesc: 73, + edgeFilterAndFilterOnEdge: 80, + edgeFilterAndFilterOnEdgeDesc: 81, + edgeFilterAndFilterOnEdgeSort: 82, + edgeFilterAndFilterOnEdgeSortDesc: 83, + search: 120, + searchFilter: 121, + vec: 130, + vecFilter: 131, + aggregate: 140, + aggregateFilter: 141, + groupBy: 142, + groupByFilter: 143, +} as const + +export const QueryIteratorTypeInverse = { + 0: 'default', + 1: 'sort', + 2: 'filter', + 3: 'filterSort', + 4: 'desc', + 5: 'descSort', + 6: 'descFilter', + 7: 'descFilterSort', + 20: 'edge', + 21: 'edgeSort', + 22: 'edgeFilter', + 23: 'edgeFilterSort', + 24: 'edgeDesc', + 25: 'edgeDescSort', + 26: 'edgeDescFilter', + 27: 'edgeDescFilterSort', + 30: 'edgeInclude', + 31: 'edgeIncludeSort', + 32: 'edgeIncludeFilter', + 33: 'edgeIncludeFilterSort', + 34: 'edgeIncludeDesc', + 35: 'edgeIncludeDescSort', + 36: 'edgeIncludeDescFilter', + 37: 'edgeIncludeDescFilterSort', + 40: 'edgeIncludeFilterOnEdge', + 41: 'edgeIncludeFilterOnEdgeDesc', + 42: 'edgeIncludeFilterOnEdgeSort', + 43: 'edgeIncludeFilterOnEdgeSortDesc', + 60: 'edgeFilterOnEdge', + 61: 'edgeFilterOnEdgeDesc', + 62: 'edgeFilterOnEdgeSort', + 63: 'edgeFilterOnEdgeSortDesc', + 70: 'edgeIncludeFilterAndFilterOnEdge', + 71: 'edgeIncludeFilterAndFilterOnEdgeDesc', + 72: 'edgeIncludeFilterAndFilterOnEdgeSort', + 73: 'edgeIncludeFilterAndFilterOnEdgeSortDesc', + 80: 'edgeFilterAndFilterOnEdge', + 81: 'edgeFilterAndFilterOnEdgeDesc', + 82: 'edgeFilterAndFilterOnEdgeSort', + 83: 'edgeFilterAndFilterOnEdgeSortDesc', + 120: 'search', + 121: 'searchFilter', + 130: 'vec', + 131: 'vecFilter', + 140: 'aggregate', + 141: 'aggregateFilter', + 142: 'groupBy', + 143: 'groupByFilter', +} as const + +/** + default, + sort, + filter, + filterSort, + desc, + descSort, + descFilter, + descFilterSort, + edge, + edgeSort, + edgeFilter, + edgeFilterSort, + edgeDesc, + edgeDescSort, + edgeDescFilter, + edgeDescFilterSort, + edgeInclude, + edgeIncludeSort, + edgeIncludeFilter, + edgeIncludeFilterSort, + edgeIncludeDesc, + edgeIncludeDescSort, + edgeIncludeDescFilter, + edgeIncludeDescFilterSort, + edgeIncludeFilterOnEdge, + edgeIncludeFilterOnEdgeDesc, + edgeIncludeFilterOnEdgeSort, + edgeIncludeFilterOnEdgeSortDesc, + edgeFilterOnEdge, + edgeFilterOnEdgeDesc, + edgeFilterOnEdgeSort, + edgeFilterOnEdgeSortDesc, + edgeIncludeFilterAndFilterOnEdge, + edgeIncludeFilterAndFilterOnEdgeDesc, + edgeIncludeFilterAndFilterOnEdgeSort, + edgeIncludeFilterAndFilterOnEdgeSortDesc, + edgeFilterAndFilterOnEdge, + edgeFilterAndFilterOnEdgeDesc, + edgeFilterAndFilterOnEdgeSort, + edgeFilterAndFilterOnEdgeSortDesc, + search, + searchFilter, + vec, + vecFilter, + aggregate, + aggregateFilter, + groupBy, + groupByFilter + */ +export type QueryIteratorTypeEnum = (typeof QueryIteratorType)[keyof typeof QueryIteratorType] + +export const QueryType = { + id: 0, + ids: 1, + default: 2, + alias: 3, + aggregates: 4, + aggregatesCount: 5, + references: 6, + reference: 7, + aliasFilter: 8, + idFilter: 9, + referenceEdge: 10, } as const -export const ResultTypeInverse = { - 0: 'default', - 1: 'references', - 2: 'reference', - 3: 'edge', - 4: 'referencesEdge', - 5: 'referenceEdge', - 6: 'aggregate', - 7: 'meta', - 8: 'metaEdge', - 9: 'fixed', - 10: 'edgeFixed', +export const QueryTypeInverse = { + 0: 'id', + 1: 'ids', + 2: 'default', + 3: 'alias', + 4: 'aggregates', + 5: 'aggregatesCount', + 6: 'references', + 7: 'reference', + 8: 'aliasFilter', + 9: 'idFilter', + 10: 'referenceEdge', } as const /** + id, + ids, default, + alias, + aggregates, + aggregatesCount, references, reference, - edge, - referencesEdge, - referenceEdge, - aggregate, - meta, - metaEdge, - fixed, - edgeFixed + aliasFilter, + idFilter, + referenceEdge */ -export type ResultTypeEnum = (typeof ResultType)[keyof typeof ResultType] +export type QueryTypeEnum = (typeof QueryType)[keyof typeof QueryType] -export const AggFunction = { - none: 0, - avg: 1, - cardinality: 2, - concat: 3, - count: 4, - max: 5, - min: 6, - mode: 7, - percentile: 8, - rank: 9, - stddev: 10, - sum: 11, - variance: 12, - hmean: 13, +export const IncludeOp = { + aggregates: 4, + aggregatesCount: 5, + references: 6, + reference: 7, + referenceEdge: 10, + default: 127, + referencesAggregation: 128, + meta: 129, + partial: 130, + defaultWithOpts: 131, + metaWithOpts: 132, } as const -export const AggFunctionInverse = { - 0: 'none', - 1: 'avg', - 2: 'cardinality', - 3: 'concat', - 4: 'count', - 5: 'max', - 6: 'min', - 7: 'mode', - 8: 'percentile', - 9: 'rank', - 10: 'stddev', - 11: 'sum', - 12: 'variance', - 13: 'hmean', +export const IncludeOpInverse = { + 4: 'aggregates', + 5: 'aggregatesCount', + 6: 'references', + 7: 'reference', + 10: 'referenceEdge', + 127: 'default', + 128: 'referencesAggregation', + 129: 'meta', + 130: 'partial', + 131: 'defaultWithOpts', + 132: 'metaWithOpts', } as const /** - none, - avg, - cardinality, - concat, - count, - max, - min, - mode, - percentile, - rank, - stddev, - sum, - variance, - hmean + aggregates, + aggregatesCount, + references, + reference, + referenceEdge, + default, + referencesAggregation, + meta, + partial, + defaultWithOpts, + metaWithOpts */ -export type AggFunctionEnum = (typeof AggFunction)[keyof typeof AggFunction] +export type IncludeOpEnum = (typeof IncludeOp)[keyof typeof IncludeOp] -export const Compression = { - none: 0, - compressed: 1, -} as const +export type IncludeHeader = { + op: IncludeOpEnum + prop: number + propType: PropTypeEnum +} -export const CompressionInverse = { - 0: 'none', - 1: 'compressed', -} as const +export const IncludeHeaderByteSize = 3 -/** - none, - compressed - */ -export type CompressionEnum = (typeof Compression)[keyof typeof Compression] +export const IncludeHeaderAlignOf = 4 -export const Interval = { - none: 0, - epoch: 1, - hour: 2, - minute: 3, - second: 4, - microseconds: 5, - day: 6, - doy: 7, - dow: 8, - isoDOW: 9, - week: 10, - month: 11, - isoMonth: 12, - quarter: 13, - year: 14, -} as const +export const packIncludeHeader = (obj: IncludeHeader): number => { + let val = 0 + val |= (Number(obj.op) & 255) << 0 + val |= (Number(obj.prop) & 255) << 8 + val |= (Number(obj.propType) & 255) << 16 + return val +} -export const IntervalInverse = { - 0: 'none', - 1: 'epoch', - 2: 'hour', - 3: 'minute', - 4: 'second', - 5: 'microseconds', - 6: 'day', - 7: 'doy', - 8: 'dow', - 9: 'isoDOW', - 10: 'week', - 11: 'month', - 12: 'isoMonth', - 13: 'quarter', - 14: 'year', -} as const +export const unpackIncludeHeader = (val: number): IncludeHeader => { + return { + op: ((val >>> 0) & 255) as IncludeOpEnum, + prop: Number((val >>> 8) & 255), + propType: ((val >>> 16) & 255) as PropTypeEnum, + } +} -/** - none, - epoch, - hour, - minute, - second, - microseconds, - day, - doy, - dow, - isoDOW, - week, - month, - isoMonth, - quarter, - year - */ -export type IntervalEnum = (typeof Interval)[keyof typeof Interval] +export const writeIncludeHeader = ( + buf: Uint8Array, + header: IncludeHeader, + offset: number, +): number => { + buf[offset] = Number(header.op) + offset += 1 + buf[offset] = Number(header.prop) + offset += 1 + buf[offset] = Number(header.propType) + offset += 1 + return offset +} + +export const writeIncludeHeaderProps = { + op: (buf: Uint8Array, value: IncludeOpEnum, offset: number) => { + buf[offset] = Number(value) + }, + prop: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 1] = Number(value) + }, + propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { + buf[offset + 2] = Number(value) + }, +} + +export const readIncludeHeader = ( + buf: Uint8Array, + offset: number, +): IncludeHeader => { + const value: IncludeHeader = { + op: (buf[offset]) as IncludeOpEnum, + prop: buf[offset + 1], + propType: (buf[offset + 2]) as PropTypeEnum, + } + return value +} -export const Order = { - asc: 0, - desc: 1, -} as const +export const readIncludeHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as IncludeOpEnum, + prop: (buf: Uint8Array, offset: number) => buf[offset + 1], + propType: (buf: Uint8Array, offset: number) => (buf[offset + 2]) as PropTypeEnum, +} -export const OrderInverse = { - 0: 'asc', - 1: 'desc', -} as const +export const createIncludeHeader = (header: IncludeHeader): Uint8Array => { + const buffer = new Uint8Array(IncludeHeaderByteSize) + writeIncludeHeader(buffer, header, 0) + return buffer +} -/** - asc, - desc - */ -export type OrderEnum = (typeof Order)[keyof typeof Order] +export const pushIncludeHeader = ( + buf: AutoSizedUint8Array, + header: IncludeHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint8(Number(header.prop)) + buf.pushUint8(Number(header.propType)) + return index +} -export type SortHeader = { - order: OrderEnum +export type IncludeMetaHeader = { + op: IncludeOpEnum prop: number propType: PropTypeEnum - start: number - len: number - lang: LangCodeEnum - edgeType: number } -export const SortHeaderByteSize = 10 +export const IncludeMetaHeaderByteSize = 3 -export const SortHeaderAlignOf = 16 +export const IncludeMetaHeaderAlignOf = 4 -export const packSortHeader = (obj: SortHeader): bigint => { - let val = 0n - val |= (BigInt(obj.order) & 255n) << 0n - val |= (BigInt(obj.prop) & 255n) << 8n - val |= (BigInt(obj.propType) & 255n) << 16n - val |= (BigInt(obj.start) & 65535n) << 24n - val |= (BigInt(obj.len) & 65535n) << 40n - val |= (BigInt(obj.lang) & 255n) << 56n - val |= (BigInt(obj.edgeType) & 65535n) << 64n +export const packIncludeMetaHeader = (obj: IncludeMetaHeader): number => { + let val = 0 + val |= (Number(obj.op) & 255) << 0 + val |= (Number(obj.prop) & 255) << 8 + val |= (Number(obj.propType) & 255) << 16 return val } -export const unpackSortHeader = (val: bigint): SortHeader => { +export const unpackIncludeMetaHeader = (val: number): IncludeMetaHeader => { return { - order: (Number((val >> 0n) & 255n)) as OrderEnum, - prop: Number((val >> 8n) & 255n), - propType: (Number((val >> 16n) & 255n)) as PropTypeEnum, - start: Number((val >> 24n) & 65535n), - len: Number((val >> 40n) & 65535n), - lang: (Number((val >> 56n) & 255n)) as LangCodeEnum, - edgeType: Number((val >> 64n) & 65535n), + op: ((val >>> 0) & 255) as IncludeOpEnum, + prop: Number((val >>> 8) & 255), + propType: ((val >>> 16) & 255) as PropTypeEnum, } } -export const writeSortHeader = ( +export const writeIncludeMetaHeader = ( buf: Uint8Array, - header: SortHeader, + header: IncludeMetaHeader, offset: number, ): number => { - buf[offset] = Number(header.order) + buf[offset] = Number(header.op) offset += 1 buf[offset] = Number(header.prop) offset += 1 buf[offset] = Number(header.propType) offset += 1 - writeUint16(buf, Number(header.start), offset) - offset += 2 - writeUint16(buf, Number(header.len), offset) - offset += 2 - buf[offset] = Number(header.lang) + return offset +} + +export const writeIncludeMetaHeaderProps = { + op: (buf: Uint8Array, value: IncludeOpEnum, offset: number) => { + buf[offset] = Number(value) + }, + prop: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 1] = Number(value) + }, + propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { + buf[offset + 2] = Number(value) + }, +} + +export const readIncludeMetaHeader = ( + buf: Uint8Array, + offset: number, +): IncludeMetaHeader => { + const value: IncludeMetaHeader = { + op: (buf[offset]) as IncludeOpEnum, + prop: buf[offset + 1], + propType: (buf[offset + 2]) as PropTypeEnum, + } + return value +} + +export const readIncludeMetaHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as IncludeOpEnum, + prop: (buf: Uint8Array, offset: number) => buf[offset + 1], + propType: (buf: Uint8Array, offset: number) => (buf[offset + 2]) as PropTypeEnum, +} + +export const createIncludeMetaHeader = (header: IncludeMetaHeader): Uint8Array => { + const buffer = new Uint8Array(IncludeMetaHeaderByteSize) + writeIncludeMetaHeader(buffer, header, 0) + return buffer +} + +export const pushIncludeMetaHeader = ( + buf: AutoSizedUint8Array, + header: IncludeMetaHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint8(Number(header.prop)) + buf.pushUint8(Number(header.propType)) + return index +} + +export type IncludePartialHeader = { + op: IncludeOpEnum + prop: number + propType: PropTypeEnum + amount: number +} + +export const IncludePartialHeaderByteSize = 5 + +export const IncludePartialHeaderAlignOf = 8 + +export const writeIncludePartialHeader = ( + buf: Uint8Array, + header: IncludePartialHeader, + offset: number, +): number => { + buf[offset] = Number(header.op) offset += 1 - writeUint16(buf, Number(header.edgeType), offset) + buf[offset] = Number(header.prop) + offset += 1 + buf[offset] = Number(header.propType) + offset += 1 + writeUint16(buf, Number(header.amount), offset) offset += 2 return offset } -export const writeSortHeaderProps = { - order: (buf: Uint8Array, value: OrderEnum, offset: number) => { +export const writeIncludePartialHeaderProps = { + op: (buf: Uint8Array, value: IncludeOpEnum, offset: number) => { buf[offset] = Number(value) }, prop: (buf: Uint8Array, value: number, offset: number) => { buf[offset + 1] = Number(value) }, - propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { - buf[offset + 2] = Number(value) + propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { + buf[offset + 2] = Number(value) + }, + amount: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 3) + }, +} + +export const readIncludePartialHeader = ( + buf: Uint8Array, + offset: number, +): IncludePartialHeader => { + const value: IncludePartialHeader = { + op: (buf[offset]) as IncludeOpEnum, + prop: buf[offset + 1], + propType: (buf[offset + 2]) as PropTypeEnum, + amount: readUint16(buf, offset + 3), + } + return value +} + +export const readIncludePartialHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as IncludeOpEnum, + prop: (buf: Uint8Array, offset: number) => buf[offset + 1], + propType: (buf: Uint8Array, offset: number) => (buf[offset + 2]) as PropTypeEnum, + amount: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 3), +} + +export const createIncludePartialHeader = (header: IncludePartialHeader): Uint8Array => { + const buffer = new Uint8Array(IncludePartialHeaderByteSize) + writeIncludePartialHeader(buffer, header, 0) + return buffer +} + +export const pushIncludePartialHeader = ( + buf: AutoSizedUint8Array, + header: IncludePartialHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint8(Number(header.prop)) + buf.pushUint8(Number(header.propType)) + buf.pushUint16(Number(header.amount)) + return index +} + +export type IncludePartialProp = { + start: number + size: number +} + +export const IncludePartialPropByteSize = 4 + +export const IncludePartialPropAlignOf = 4 + +export const packIncludePartialProp = (obj: IncludePartialProp): number => { + let val = 0 + val |= (Number(obj.start) & 65535) << 0 + val |= (Number(obj.size) & 65535) << 16 + return val +} + +export const unpackIncludePartialProp = (val: number): IncludePartialProp => { + return { + start: Number((val >>> 0) & 65535), + size: Number((val >>> 16) & 65535), + } +} + +export const writeIncludePartialProp = ( + buf: Uint8Array, + header: IncludePartialProp, + offset: number, +): number => { + writeUint16(buf, Number(header.start), offset) + offset += 2 + writeUint16(buf, Number(header.size), offset) + offset += 2 + return offset +} + +export const writeIncludePartialPropProps = { + start: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset) + }, + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 2) + }, +} + +export const readIncludePartialProp = ( + buf: Uint8Array, + offset: number, +): IncludePartialProp => { + const value: IncludePartialProp = { + start: readUint16(buf, offset), + size: readUint16(buf, offset + 2), + } + return value +} + +export const readIncludePartialPropProps = { + start: (buf: Uint8Array, offset: number) => readUint16(buf, offset), + size: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 2), +} + +export const createIncludePartialProp = (header: IncludePartialProp): Uint8Array => { + const buffer = new Uint8Array(IncludePartialPropByteSize) + writeIncludePartialProp(buffer, header, 0) + return buffer +} + +export const pushIncludePartialProp = ( + buf: AutoSizedUint8Array, + header: IncludePartialProp, +): number => { + const index = buf.length + buf.pushUint16(Number(header.start)) + buf.pushUint16(Number(header.size)) + return index +} + +export type IncludeOpts = { + end: number + isChars: boolean + hasOpts: boolean + langFallbackSize: number + lang: LangCodeEnum +} + +export const IncludeOptsByteSize = 7 + +export const IncludeOptsAlignOf = 8 + +export const writeIncludeOpts = ( + buf: Uint8Array, + header: IncludeOpts, + offset: number, +): number => { + writeUint32(buf, Number(header.end), offset) + offset += 4 + buf[offset] = 0 + buf[offset] |= (((header.isChars ? 1 : 0) >>> 0) & 1) << 0 + buf[offset] |= (((header.hasOpts ? 1 : 0) >>> 0) & 1) << 1 + buf[offset] |= ((0 >>> 0) & 63) << 2 + offset += 1 + buf[offset] = Number(header.langFallbackSize) + offset += 1 + buf[offset] = Number(header.lang) + offset += 1 + return offset +} + +export const writeIncludeOptsProps = { + end: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset) + }, + isChars: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 4] |= (((value ? 1 : 0) >>> 0) & 1) << 0 }, - start: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 3) + hasOpts: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 4] |= (((value ? 1 : 0) >>> 0) & 1) << 1 }, - len: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 5) + langFallbackSize: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 5] = Number(value) }, lang: (buf: Uint8Array, value: LangCodeEnum, offset: number) => { - buf[offset + 7] = Number(value) - }, - edgeType: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 8) + buf[offset + 6] = Number(value) }, } -export const readSortHeader = ( +export const readIncludeOpts = ( buf: Uint8Array, offset: number, -): SortHeader => { - const value: SortHeader = { - order: (buf[offset]) as OrderEnum, - prop: buf[offset + 1], - propType: (buf[offset + 2]) as PropTypeEnum, - start: readUint16(buf, offset + 3), - len: readUint16(buf, offset + 5), - lang: (buf[offset + 7]) as LangCodeEnum, - edgeType: readUint16(buf, offset + 8), +): IncludeOpts => { + const value: IncludeOpts = { + end: readUint32(buf, offset), + isChars: (((buf[offset + 4] >>> 0) & 1)) === 1, + hasOpts: (((buf[offset + 4] >>> 1) & 1)) === 1, + langFallbackSize: buf[offset + 5], + lang: (buf[offset + 6]) as LangCodeEnum, } return value } -export const readSortHeaderProps = { - order: (buf: Uint8Array, offset: number) => (buf[offset]) as OrderEnum, - prop: (buf: Uint8Array, offset: number) => buf[offset + 1], - propType: (buf: Uint8Array, offset: number) => (buf[offset + 2]) as PropTypeEnum, - start: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 3), - len: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 5), - lang: (buf: Uint8Array, offset: number) => (buf[offset + 7]) as LangCodeEnum, - edgeType: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 8), +export const readIncludeOptsProps = { + end: (buf: Uint8Array, offset: number) => readUint32(buf, offset), + isChars: (buf: Uint8Array, offset: number) => (((buf[offset + 4] >>> 0) & 1)) === 1, + hasOpts: (buf: Uint8Array, offset: number) => (((buf[offset + 4] >>> 1) & 1)) === 1, + langFallbackSize: (buf: Uint8Array, offset: number) => buf[offset + 5], + lang: (buf: Uint8Array, offset: number) => (buf[offset + 6]) as LangCodeEnum, } -export const createSortHeader = (header: SortHeader): Uint8Array => { - const buffer = new Uint8Array(SortHeaderByteSize) - writeSortHeader(buffer, header, 0) +export const createIncludeOpts = (header: IncludeOpts): Uint8Array => { + const buffer = new Uint8Array(IncludeOptsByteSize) + writeIncludeOpts(buffer, header, 0) return buffer } -export const QUERY_ITERATOR_DEFAULT = 0 -export const QUERY_ITERATOR_EDGE = 20 -export const QUERY_ITERATOR_EDGE_INCLUDE = 30 -export const QUERY_ITERATOR_SEARCH = 120 -export const QUERY_ITERATOR_SEARCH_VEC = 130 -export const QUERY_ITERATOR_AGGREGATES = 140 -export const QueryIteratorType = { - default: 0, - sort: 1, - filter: 2, - filterSort: 3, - desc: 4, - descSort: 5, - descFilter: 6, - descFilterSort: 7, - edge: 20, - edgeSort: 21, - edgeFilter: 22, - edgeFilterSort: 23, - edgeDesc: 24, - edgeDescSort: 25, - edgeDescFilter: 26, - edgeDescFilterSort: 27, - edgeInclude: 30, - edgeIncludeSort: 31, - edgeIncludeFilter: 32, - edgeIncludeFilterSort: 33, - edgeIncludeDesc: 34, - edgeIncludeDescSort: 35, - edgeIncludeDescFilter: 36, - edgeIncludeDescFilterSort: 37, - search: 120, - searchFilter: 121, - vec: 130, - vecFilter: 131, - aggregate: 140, - aggregateFilter: 141, - groupBy: 142, - groupByFilter: 143, -} as const - -export const QueryIteratorTypeInverse = { - 0: 'default', - 1: 'sort', - 2: 'filter', - 3: 'filterSort', - 4: 'desc', - 5: 'descSort', - 6: 'descFilter', - 7: 'descFilterSort', - 20: 'edge', - 21: 'edgeSort', - 22: 'edgeFilter', - 23: 'edgeFilterSort', - 24: 'edgeDesc', - 25: 'edgeDescSort', - 26: 'edgeDescFilter', - 27: 'edgeDescFilterSort', - 30: 'edgeInclude', - 31: 'edgeIncludeSort', - 32: 'edgeIncludeFilter', - 33: 'edgeIncludeFilterSort', - 34: 'edgeIncludeDesc', - 35: 'edgeIncludeDescSort', - 36: 'edgeIncludeDescFilter', - 37: 'edgeIncludeDescFilterSort', - 120: 'search', - 121: 'searchFilter', - 130: 'vec', - 131: 'vecFilter', - 140: 'aggregate', - 141: 'aggregateFilter', - 142: 'groupBy', - 143: 'groupByFilter', -} as const - -/** - default, - sort, - filter, - filterSort, - desc, - descSort, - descFilter, - descFilterSort, - edge, - edgeSort, - edgeFilter, - edgeFilterSort, - edgeDesc, - edgeDescSort, - edgeDescFilter, - edgeDescFilterSort, - edgeInclude, - edgeIncludeSort, - edgeIncludeFilter, - edgeIncludeFilterSort, - edgeIncludeDesc, - edgeIncludeDescSort, - edgeIncludeDescFilter, - edgeIncludeDescFilterSort, - search, - searchFilter, - vec, - vecFilter, - aggregate, - aggregateFilter, - groupBy, - groupByFilter - */ -export type QueryIteratorTypeEnum = (typeof QueryIteratorType)[keyof typeof QueryIteratorType] - -export const QueryType = { - id: 0, - ids: 1, - default: 2, - alias: 3, - aggregates: 4, - aggregatesCount: 5, - references: 6, - reference: 7, - aliasFilter: 8, - idFilter: 9, - referenceEdge: 10, -} as const - -export const QueryTypeInverse = { - 0: 'id', - 1: 'ids', - 2: 'default', - 3: 'alias', - 4: 'aggregates', - 5: 'aggregatesCount', - 6: 'references', - 7: 'reference', - 8: 'aliasFilter', - 9: 'idFilter', - 10: 'referenceEdge', -} as const - -/** - id, - ids, - default, - alias, - aggregates, - aggregatesCount, - references, - reference, - aliasFilter, - idFilter, - referenceEdge - */ -export type QueryTypeEnum = (typeof QueryType)[keyof typeof QueryType] - -export const IncludeOp = { - aggregates: 4, - aggregatesCount: 5, - references: 6, - reference: 7, - referenceEdge: 10, - default: 127, - referencesAggregation: 128, - meta: 129, - partial: 130, - defaultWithOpts: 131, - metaWithOpts: 132, -} as const - -export const IncludeOpInverse = { - 4: 'aggregates', - 5: 'aggregatesCount', - 6: 'references', - 7: 'reference', - 10: 'referenceEdge', - 127: 'default', - 128: 'referencesAggregation', - 129: 'meta', - 130: 'partial', - 131: 'defaultWithOpts', - 132: 'metaWithOpts', -} as const - -/** - aggregates, - aggregatesCount, - references, - reference, - referenceEdge, - default, - referencesAggregation, - meta, - partial, - defaultWithOpts, - metaWithOpts - */ -export type IncludeOpEnum = (typeof IncludeOp)[keyof typeof IncludeOp] - -export type IncludeHeader = { - op: IncludeOpEnum +export const pushIncludeOpts = ( + buf: AutoSizedUint8Array, + header: IncludeOpts, +): number => { + const index = buf.length + buf.pushUint32(Number(header.end)) + buf.pushUint8(0) + buf.view[buf.length - 1] |= (((header.isChars ? 1 : 0) >>> 0) & 1) << 0 + buf.view[buf.length - 1] |= (((header.hasOpts ? 1 : 0) >>> 0) & 1) << 1 + buf.view[buf.length - 1] |= ((0 >>> 0) & 63) << 2 + buf.pushUint8(Number(header.langFallbackSize)) + buf.pushUint8(Number(header.lang)) + return index +} + +export type IncludeResponse = { prop: number - propType: PropTypeEnum + size: number } -export const IncludeHeaderByteSize = 3 +export const IncludeResponseByteSize = 5 -export const IncludeHeaderAlignOf = 4 +export const IncludeResponseAlignOf = 8 -export const packIncludeHeader = (obj: IncludeHeader): bigint => { - let val = 0n - val |= (BigInt(obj.op) & 255n) << 0n - val |= (BigInt(obj.prop) & 255n) << 8n - val |= (BigInt(obj.propType) & 255n) << 16n - return val +export const writeIncludeResponse = ( + buf: Uint8Array, + header: IncludeResponse, + offset: number, +): number => { + buf[offset] = Number(header.prop) + offset += 1 + writeUint32(buf, Number(header.size), offset) + offset += 4 + return offset } -export const unpackIncludeHeader = (val: bigint): IncludeHeader => { - return { - op: (Number((val >> 0n) & 255n)) as IncludeOpEnum, - prop: Number((val >> 8n) & 255n), - propType: (Number((val >> 16n) & 255n)) as PropTypeEnum, +export const writeIncludeResponseProps = { + prop: (buf: Uint8Array, value: number, offset: number) => { + buf[offset] = Number(value) + }, + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 1) + }, +} + +export const readIncludeResponse = ( + buf: Uint8Array, + offset: number, +): IncludeResponse => { + const value: IncludeResponse = { + prop: buf[offset], + size: readUint32(buf, offset + 1), } + return value } -export const writeIncludeHeader = ( +export const readIncludeResponseProps = { + prop: (buf: Uint8Array, offset: number) => buf[offset], + size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 1), +} + +export const createIncludeResponse = (header: IncludeResponse): Uint8Array => { + const buffer = new Uint8Array(IncludeResponseByteSize) + writeIncludeResponse(buffer, header, 0) + return buffer +} + +export const pushIncludeResponse = ( + buf: AutoSizedUint8Array, + header: IncludeResponse, +): number => { + const index = buf.length + buf.pushUint8(Number(header.prop)) + buf.pushUint32(Number(header.size)) + return index +} + +export type IncludeResponseMeta = { + op: ReadOpEnum + prop: number + lang: LangCodeEnum + compressed: boolean + crc32: number + size: number +} + +export const IncludeResponseMetaByteSize = 12 + +export const IncludeResponseMetaAlignOf = 16 + +export const writeIncludeResponseMeta = ( buf: Uint8Array, - header: IncludeHeader, + header: IncludeResponseMeta, offset: number, ): number => { buf[offset] = Number(header.op) offset += 1 buf[offset] = Number(header.prop) offset += 1 - buf[offset] = Number(header.propType) + buf[offset] = Number(header.lang) + offset += 1 + buf[offset] = 0 + buf[offset] |= (((header.compressed ? 1 : 0) >>> 0) & 1) << 0 + buf[offset] |= ((0 >>> 0) & 127) << 1 offset += 1 + writeUint32(buf, Number(header.crc32), offset) + offset += 4 + writeUint32(buf, Number(header.size), offset) + offset += 4 return offset } -export const writeIncludeHeaderProps = { - op: (buf: Uint8Array, value: IncludeOpEnum, offset: number) => { +export const writeIncludeResponseMetaProps = { + op: (buf: Uint8Array, value: ReadOpEnum, offset: number) => { buf[offset] = Number(value) }, prop: (buf: Uint8Array, value: number, offset: number) => { buf[offset + 1] = Number(value) }, - propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { + lang: (buf: Uint8Array, value: LangCodeEnum, offset: number) => { buf[offset + 2] = Number(value) }, + compressed: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 3] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + }, + crc32: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 4) + }, + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 8) + }, } -export const readIncludeHeader = ( +export const readIncludeResponseMeta = ( buf: Uint8Array, offset: number, -): IncludeHeader => { - const value: IncludeHeader = { - op: (buf[offset]) as IncludeOpEnum, +): IncludeResponseMeta => { + const value: IncludeResponseMeta = { + op: (buf[offset]) as ReadOpEnum, prop: buf[offset + 1], - propType: (buf[offset + 2]) as PropTypeEnum, + lang: (buf[offset + 2]) as LangCodeEnum, + compressed: (((buf[offset + 3] >>> 0) & 1)) === 1, + crc32: readUint32(buf, offset + 4), + size: readUint32(buf, offset + 8), } return value } -export const readIncludeHeaderProps = { - op: (buf: Uint8Array, offset: number) => (buf[offset]) as IncludeOpEnum, +export const readIncludeResponseMetaProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as ReadOpEnum, prop: (buf: Uint8Array, offset: number) => buf[offset + 1], - propType: (buf: Uint8Array, offset: number) => (buf[offset + 2]) as PropTypeEnum, + lang: (buf: Uint8Array, offset: number) => (buf[offset + 2]) as LangCodeEnum, + compressed: (buf: Uint8Array, offset: number) => (((buf[offset + 3] >>> 0) & 1)) === 1, + crc32: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 4), + size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 8), } -export const createIncludeHeader = (header: IncludeHeader): Uint8Array => { - const buffer = new Uint8Array(IncludeHeaderByteSize) - writeIncludeHeader(buffer, header, 0) +export const createIncludeResponseMeta = (header: IncludeResponseMeta): Uint8Array => { + const buffer = new Uint8Array(IncludeResponseMetaByteSize) + writeIncludeResponseMeta(buffer, header, 0) return buffer } -export type IncludeMetaHeader = { - op: IncludeOpEnum - prop: number - propType: PropTypeEnum +export const pushIncludeResponseMeta = ( + buf: AutoSizedUint8Array, + header: IncludeResponseMeta, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint8(Number(header.prop)) + buf.pushUint8(Number(header.lang)) + buf.pushUint8(0) + buf.view[buf.length - 1] |= (((header.compressed ? 1 : 0) >>> 0) & 1) << 0 + buf.view[buf.length - 1] |= ((0 >>> 0) & 127) << 1 + buf.pushUint32(Number(header.crc32)) + buf.pushUint32(Number(header.size)) + return index } -export const IncludeMetaHeaderByteSize = 3 - -export const IncludeMetaHeaderAlignOf = 4 - -export const packIncludeMetaHeader = (obj: IncludeMetaHeader): bigint => { - let val = 0n - val |= (BigInt(obj.op) & 255n) << 0n - val |= (BigInt(obj.prop) & 255n) << 8n - val |= (BigInt(obj.propType) & 255n) << 16n - return val +export type SubscriptionHeader = { + op: OpTypeEnum + typeId: TypeId + fieldsLen: number + partialLen: number } -export const unpackIncludeMetaHeader = (val: bigint): IncludeMetaHeader => { - return { - op: (Number((val >> 0n) & 255n)) as IncludeOpEnum, - prop: Number((val >> 8n) & 255n), - propType: (Number((val >> 16n) & 255n)) as PropTypeEnum, - } -} +export const SubscriptionHeaderByteSize = 5 -export const writeIncludeMetaHeader = ( +export const SubscriptionHeaderAlignOf = 8 + +export const writeSubscriptionHeader = ( buf: Uint8Array, - header: IncludeMetaHeader, + header: SubscriptionHeader, offset: number, ): number => { buf[offset] = Number(header.op) offset += 1 - buf[offset] = Number(header.prop) + writeUint16(buf, Number(header.typeId), offset) + offset += 2 + buf[offset] = Number(header.fieldsLen) offset += 1 - buf[offset] = Number(header.propType) + buf[offset] = Number(header.partialLen) offset += 1 return offset } -export const writeIncludeMetaHeaderProps = { - op: (buf: Uint8Array, value: IncludeOpEnum, offset: number) => { +export const writeSubscriptionHeaderProps = { + op: (buf: Uint8Array, value: OpTypeEnum, offset: number) => { buf[offset] = Number(value) }, - prop: (buf: Uint8Array, value: number, offset: number) => { - buf[offset + 1] = Number(value) + typeId: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 1) }, - propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { - buf[offset + 2] = Number(value) + fieldsLen: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 3] = Number(value) + }, + partialLen: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 4] = Number(value) }, } -export const readIncludeMetaHeader = ( +export const readSubscriptionHeader = ( buf: Uint8Array, offset: number, -): IncludeMetaHeader => { - const value: IncludeMetaHeader = { - op: (buf[offset]) as IncludeOpEnum, - prop: buf[offset + 1], - propType: (buf[offset + 2]) as PropTypeEnum, +): SubscriptionHeader => { + const value: SubscriptionHeader = { + op: (buf[offset]) as OpTypeEnum, + typeId: (readUint16(buf, offset + 1)) as TypeId, + fieldsLen: buf[offset + 3], + partialLen: buf[offset + 4], } return value } -export const readIncludeMetaHeaderProps = { - op: (buf: Uint8Array, offset: number) => (buf[offset]) as IncludeOpEnum, - prop: (buf: Uint8Array, offset: number) => buf[offset + 1], - propType: (buf: Uint8Array, offset: number) => (buf[offset + 2]) as PropTypeEnum, +export const readSubscriptionHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as OpTypeEnum, + typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 1)) as TypeId, + fieldsLen: (buf: Uint8Array, offset: number) => buf[offset + 3], + partialLen: (buf: Uint8Array, offset: number) => buf[offset + 4], } -export const createIncludeMetaHeader = (header: IncludeMetaHeader): Uint8Array => { - const buffer = new Uint8Array(IncludeMetaHeaderByteSize) - writeIncludeMetaHeader(buffer, header, 0) +export const createSubscriptionHeader = (header: SubscriptionHeader): Uint8Array => { + const buffer = new Uint8Array(SubscriptionHeaderByteSize) + writeSubscriptionHeader(buffer, header, 0) return buffer } -export type IncludePartialHeader = { - op: IncludeOpEnum - prop: number - propType: PropTypeEnum - amount: number +export const pushSubscriptionHeader = ( + buf: AutoSizedUint8Array, + header: SubscriptionHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint16(Number(header.typeId)) + buf.pushUint8(Number(header.fieldsLen)) + buf.pushUint8(Number(header.partialLen)) + return index } -export const IncludePartialHeaderByteSize = 5 - -export const IncludePartialHeaderAlignOf = 8 - -export const packIncludePartialHeader = (obj: IncludePartialHeader): bigint => { - let val = 0n - val |= (BigInt(obj.op) & 255n) << 0n - val |= (BigInt(obj.prop) & 255n) << 8n - val |= (BigInt(obj.propType) & 255n) << 16n - val |= (BigInt(obj.amount) & 65535n) << 24n - return val +export type QueryHeader = { + op: QueryTypeEnum + prop: number + typeId: TypeId + edgeTypeId: TypeId + offset: number + limit: number + filterSize: number + searchSize: number + edgeSize: number + edgeFilterSize: number + includeSize: number + iteratorType: QueryIteratorTypeEnum + size: number + sort: boolean } -export const unpackIncludePartialHeader = (val: bigint): IncludePartialHeader => { - return { - op: (Number((val >> 0n) & 255n)) as IncludeOpEnum, - prop: Number((val >> 8n) & 255n), - propType: (Number((val >> 16n) & 255n)) as PropTypeEnum, - amount: Number((val >> 24n) & 65535n), - } -} +export const QueryHeaderByteSize = 28 -export const writeIncludePartialHeader = ( +export const QueryHeaderAlignOf = 16 + +export const writeQueryHeader = ( buf: Uint8Array, - header: IncludePartialHeader, + header: QueryHeader, offset: number, ): number => { buf[offset] = Number(header.op) offset += 1 buf[offset] = Number(header.prop) offset += 1 - buf[offset] = Number(header.propType) + writeUint16(buf, Number(header.typeId), offset) + offset += 2 + writeUint16(buf, Number(header.edgeTypeId), offset) + offset += 2 + writeUint32(buf, Number(header.offset), offset) + offset += 4 + writeUint32(buf, Number(header.limit), offset) + offset += 4 + writeUint16(buf, Number(header.filterSize), offset) + offset += 2 + writeUint16(buf, Number(header.searchSize), offset) + offset += 2 + writeUint16(buf, Number(header.edgeSize), offset) + offset += 2 + writeUint16(buf, Number(header.edgeFilterSize), offset) + offset += 2 + writeUint16(buf, Number(header.includeSize), offset) + offset += 2 + buf[offset] = Number(header.iteratorType) offset += 1 - writeUint16(buf, Number(header.amount), offset) + writeUint16(buf, Number(header.size), offset) offset += 2 + buf[offset] = 0 + buf[offset] |= (((header.sort ? 1 : 0) >>> 0) & 1) << 0 + buf[offset] |= ((0 >>> 0) & 127) << 1 + offset += 1 return offset } -export const writeIncludePartialHeaderProps = { - op: (buf: Uint8Array, value: IncludeOpEnum, offset: number) => { +export const writeQueryHeaderProps = { + op: (buf: Uint8Array, value: QueryTypeEnum, offset: number) => { buf[offset] = Number(value) }, prop: (buf: Uint8Array, value: number, offset: number) => { buf[offset + 1] = Number(value) }, - propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { - buf[offset + 2] = Number(value) + typeId: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 2) }, - amount: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 3) + edgeTypeId: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 4) + }, + offset: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 6) + }, + limit: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 10) + }, + filterSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 14) + }, + searchSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 16) + }, + edgeSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 18) + }, + edgeFilterSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 20) + }, + includeSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 22) + }, + iteratorType: (buf: Uint8Array, value: QueryIteratorTypeEnum, offset: number) => { + buf[offset + 24] = Number(value) + }, + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 25) + }, + sort: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 27] |= (((value ? 1 : 0) >>> 0) & 1) << 0 }, } -export const readIncludePartialHeader = ( +export const readQueryHeader = ( buf: Uint8Array, offset: number, -): IncludePartialHeader => { - const value: IncludePartialHeader = { - op: (buf[offset]) as IncludeOpEnum, +): QueryHeader => { + const value: QueryHeader = { + op: (buf[offset]) as QueryTypeEnum, prop: buf[offset + 1], - propType: (buf[offset + 2]) as PropTypeEnum, - amount: readUint16(buf, offset + 3), + typeId: (readUint16(buf, offset + 2)) as TypeId, + edgeTypeId: (readUint16(buf, offset + 4)) as TypeId, + offset: readUint32(buf, offset + 6), + limit: readUint32(buf, offset + 10), + filterSize: readUint16(buf, offset + 14), + searchSize: readUint16(buf, offset + 16), + edgeSize: readUint16(buf, offset + 18), + edgeFilterSize: readUint16(buf, offset + 20), + includeSize: readUint16(buf, offset + 22), + iteratorType: (buf[offset + 24]) as QueryIteratorTypeEnum, + size: readUint16(buf, offset + 25), + sort: (((buf[offset + 27] >>> 0) & 1)) === 1, } return value } -export const readIncludePartialHeaderProps = { - op: (buf: Uint8Array, offset: number) => (buf[offset]) as IncludeOpEnum, +export const readQueryHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as QueryTypeEnum, prop: (buf: Uint8Array, offset: number) => buf[offset + 1], - propType: (buf: Uint8Array, offset: number) => (buf[offset + 2]) as PropTypeEnum, - amount: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 3), + typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 2)) as TypeId, + edgeTypeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 4)) as TypeId, + offset: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 6), + limit: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 10), + filterSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 14), + searchSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 16), + edgeSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 18), + edgeFilterSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 20), + includeSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 22), + iteratorType: (buf: Uint8Array, offset: number) => (buf[offset + 24]) as QueryIteratorTypeEnum, + size: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 25), + sort: (buf: Uint8Array, offset: number) => (((buf[offset + 27] >>> 0) & 1)) === 1, } -export const createIncludePartialHeader = (header: IncludePartialHeader): Uint8Array => { - const buffer = new Uint8Array(IncludePartialHeaderByteSize) - writeIncludePartialHeader(buffer, header, 0) +export const createQueryHeader = (header: QueryHeader): Uint8Array => { + const buffer = new Uint8Array(QueryHeaderByteSize) + writeQueryHeader(buffer, header, 0) return buffer } -export type IncludePartialProp = { - start: number - size: number +export const pushQueryHeader = ( + buf: AutoSizedUint8Array, + header: QueryHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint8(Number(header.prop)) + buf.pushUint16(Number(header.typeId)) + buf.pushUint16(Number(header.edgeTypeId)) + buf.pushUint32(Number(header.offset)) + buf.pushUint32(Number(header.limit)) + buf.pushUint16(Number(header.filterSize)) + buf.pushUint16(Number(header.searchSize)) + buf.pushUint16(Number(header.edgeSize)) + buf.pushUint16(Number(header.edgeFilterSize)) + buf.pushUint16(Number(header.includeSize)) + buf.pushUint8(Number(header.iteratorType)) + buf.pushUint16(Number(header.size)) + buf.pushUint8(0) + buf.view[buf.length - 1] |= (((header.sort ? 1 : 0) >>> 0) & 1) << 0 + buf.view[buf.length - 1] |= ((0 >>> 0) & 127) << 1 + return index } -export const IncludePartialPropByteSize = 4 - -export const IncludePartialPropAlignOf = 4 - -export const packIncludePartialProp = (obj: IncludePartialProp): bigint => { - let val = 0n - val |= (BigInt(obj.start) & 65535n) << 0n - val |= (BigInt(obj.size) & 65535n) << 16n - return val +export type QueryHeaderSingle = { + op: QueryTypeEnum + typeId: TypeId + prop: number + id: number + filterSize: number + includeSize: number + aliasSize: number } -export const unpackIncludePartialProp = (val: bigint): IncludePartialProp => { - return { - start: Number((val >> 0n) & 65535n), - size: Number((val >> 16n) & 65535n), - } -} +export const QueryHeaderSingleByteSize = 14 -export const writeIncludePartialProp = ( +export const QueryHeaderSingleAlignOf = 16 + +export const writeQueryHeaderSingle = ( buf: Uint8Array, - header: IncludePartialProp, + header: QueryHeaderSingle, offset: number, ): number => { - writeUint16(buf, Number(header.start), offset) + buf[offset] = Number(header.op) + offset += 1 + writeUint16(buf, Number(header.typeId), offset) offset += 2 - writeUint16(buf, Number(header.size), offset) + buf[offset] = Number(header.prop) + offset += 1 + writeUint32(buf, Number(header.id), offset) + offset += 4 + writeUint16(buf, Number(header.filterSize), offset) + offset += 2 + writeUint16(buf, Number(header.includeSize), offset) + offset += 2 + writeUint16(buf, Number(header.aliasSize), offset) offset += 2 return offset } -export const writeIncludePartialPropProps = { - start: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset) +export const writeQueryHeaderSingleProps = { + op: (buf: Uint8Array, value: QueryTypeEnum, offset: number) => { + buf[offset] = Number(value) }, - size: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 2) + typeId: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 1) + }, + prop: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 3] = Number(value) + }, + id: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 4) + }, + filterSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 8) + }, + includeSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 10) + }, + aliasSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 12) }, } -export const readIncludePartialProp = ( +export const readQueryHeaderSingle = ( buf: Uint8Array, offset: number, -): IncludePartialProp => { - const value: IncludePartialProp = { - start: readUint16(buf, offset), - size: readUint16(buf, offset + 2), +): QueryHeaderSingle => { + const value: QueryHeaderSingle = { + op: (buf[offset]) as QueryTypeEnum, + typeId: (readUint16(buf, offset + 1)) as TypeId, + prop: buf[offset + 3], + id: readUint32(buf, offset + 4), + filterSize: readUint16(buf, offset + 8), + includeSize: readUint16(buf, offset + 10), + aliasSize: readUint16(buf, offset + 12), } return value } -export const readIncludePartialPropProps = { - start: (buf: Uint8Array, offset: number) => readUint16(buf, offset), - size: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 2), +export const readQueryHeaderSingleProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as QueryTypeEnum, + typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 1)) as TypeId, + prop: (buf: Uint8Array, offset: number) => buf[offset + 3], + id: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 4), + filterSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 8), + includeSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 10), + aliasSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 12), } -export const createIncludePartialProp = (header: IncludePartialProp): Uint8Array => { - const buffer = new Uint8Array(IncludePartialPropByteSize) - writeIncludePartialProp(buffer, header, 0) +export const createQueryHeaderSingle = (header: QueryHeaderSingle): Uint8Array => { + const buffer = new Uint8Array(QueryHeaderSingleByteSize) + writeQueryHeaderSingle(buffer, header, 0) return buffer } -export type IncludeOpts = { - end: number - isChars: boolean - hasOpts: boolean - langFallbackSize: number - lang: LangCodeEnum +export const pushQueryHeaderSingle = ( + buf: AutoSizedUint8Array, + header: QueryHeaderSingle, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint16(Number(header.typeId)) + buf.pushUint8(Number(header.prop)) + buf.pushUint32(Number(header.id)) + buf.pushUint16(Number(header.filterSize)) + buf.pushUint16(Number(header.includeSize)) + buf.pushUint16(Number(header.aliasSize)) + return index } -export const IncludeOptsByteSize = 7 - -export const IncludeOptsAlignOf = 8 - -export const packIncludeOpts = (obj: IncludeOpts): bigint => { - let val = 0n - val |= (BigInt(obj.end) & 4294967295n) << 0n - val |= ((obj.isChars ? 1n : 0n) & 1n) << 32n - val |= ((obj.hasOpts ? 1n : 0n) & 1n) << 33n - val |= (BigInt(obj.langFallbackSize) & 255n) << 40n - val |= (BigInt(obj.lang) & 255n) << 48n - return val +export type QueryHeaderSingleReference = { + op: QueryTypeEnum + prop: number + typeId: TypeId + edgeTypeId: TypeId + edgeSize: number + includeSize: number } -export const unpackIncludeOpts = (val: bigint): IncludeOpts => { - return { - end: Number((val >> 0n) & 4294967295n), - isChars: ((val >> 32n) & 1n) === 1n, - hasOpts: ((val >> 33n) & 1n) === 1n, - langFallbackSize: Number((val >> 40n) & 255n), - lang: (Number((val >> 48n) & 255n)) as LangCodeEnum, - } -} +export const QueryHeaderSingleReferenceByteSize = 10 -export const writeIncludeOpts = ( +export const QueryHeaderSingleReferenceAlignOf = 16 + +export const writeQueryHeaderSingleReference = ( buf: Uint8Array, - header: IncludeOpts, + header: QueryHeaderSingleReference, offset: number, ): number => { - writeUint32(buf, Number(header.end), offset) - offset += 4 - buf[offset] = 0 - buf[offset] |= (((header.isChars ? 1 : 0) >>> 0) & 1) << 0 - buf[offset] |= (((header.hasOpts ? 1 : 0) >>> 0) & 1) << 1 - buf[offset] |= ((0 >>> 0) & 63) << 2 - offset += 1 - buf[offset] = Number(header.langFallbackSize) + buf[offset] = Number(header.op) offset += 1 - buf[offset] = Number(header.lang) + buf[offset] = Number(header.prop) offset += 1 + writeUint16(buf, Number(header.typeId), offset) + offset += 2 + writeUint16(buf, Number(header.edgeTypeId), offset) + offset += 2 + writeUint16(buf, Number(header.edgeSize), offset) + offset += 2 + writeUint16(buf, Number(header.includeSize), offset) + offset += 2 return offset } -export const writeIncludeOptsProps = { - end: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset) +export const writeQueryHeaderSingleReferenceProps = { + op: (buf: Uint8Array, value: QueryTypeEnum, offset: number) => { + buf[offset] = Number(value) }, - isChars: (buf: Uint8Array, value: boolean, offset: number) => { - buf[offset + 4] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + prop: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 1] = Number(value) + }, + typeId: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 2) }, - hasOpts: (buf: Uint8Array, value: boolean, offset: number) => { - buf[offset + 4] |= (((value ? 1 : 0) >>> 0) & 1) << 1 + edgeTypeId: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 4) }, - langFallbackSize: (buf: Uint8Array, value: number, offset: number) => { - buf[offset + 5] = Number(value) + edgeSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 6) }, - lang: (buf: Uint8Array, value: LangCodeEnum, offset: number) => { - buf[offset + 6] = Number(value) + includeSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 8) }, } -export const readIncludeOpts = ( +export const readQueryHeaderSingleReference = ( buf: Uint8Array, offset: number, -): IncludeOpts => { - const value: IncludeOpts = { - end: readUint32(buf, offset), - isChars: (((buf[offset + 4] >>> 0) & 1)) === 1, - hasOpts: (((buf[offset + 4] >>> 1) & 1)) === 1, - langFallbackSize: buf[offset + 5], - lang: (buf[offset + 6]) as LangCodeEnum, +): QueryHeaderSingleReference => { + const value: QueryHeaderSingleReference = { + op: (buf[offset]) as QueryTypeEnum, + prop: buf[offset + 1], + typeId: (readUint16(buf, offset + 2)) as TypeId, + edgeTypeId: (readUint16(buf, offset + 4)) as TypeId, + edgeSize: readUint16(buf, offset + 6), + includeSize: readUint16(buf, offset + 8), } return value } -export const readIncludeOptsProps = { - end: (buf: Uint8Array, offset: number) => readUint32(buf, offset), - isChars: (buf: Uint8Array, offset: number) => (((buf[offset + 4] >>> 0) & 1)) === 1, - hasOpts: (buf: Uint8Array, offset: number) => (((buf[offset + 4] >>> 1) & 1)) === 1, - langFallbackSize: (buf: Uint8Array, offset: number) => buf[offset + 5], - lang: (buf: Uint8Array, offset: number) => (buf[offset + 6]) as LangCodeEnum, +export const readQueryHeaderSingleReferenceProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as QueryTypeEnum, + prop: (buf: Uint8Array, offset: number) => buf[offset + 1], + typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 2)) as TypeId, + edgeTypeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 4)) as TypeId, + edgeSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 6), + includeSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 8), } -export const createIncludeOpts = (header: IncludeOpts): Uint8Array => { - const buffer = new Uint8Array(IncludeOptsByteSize) - writeIncludeOpts(buffer, header, 0) +export const createQueryHeaderSingleReference = (header: QueryHeaderSingleReference): Uint8Array => { + const buffer = new Uint8Array(QueryHeaderSingleReferenceByteSize) + writeQueryHeaderSingleReference(buffer, header, 0) return buffer } -export type IncludeResponse = { - prop: number - size: number +export const pushQueryHeaderSingleReference = ( + buf: AutoSizedUint8Array, + header: QueryHeaderSingleReference, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint8(Number(header.prop)) + buf.pushUint16(Number(header.typeId)) + buf.pushUint16(Number(header.edgeTypeId)) + buf.pushUint16(Number(header.edgeSize)) + buf.pushUint16(Number(header.includeSize)) + return index } -export const IncludeResponseByteSize = 5 +export const VectorBaseType = { + int8: 1, + uint8: 2, + int16: 3, + uint16: 4, + int32: 5, + uint32: 6, + float32: 7, + float64: 8, +} as const -export const IncludeResponseAlignOf = 8 +export const VectorBaseTypeInverse = { + 1: 'int8', + 2: 'uint8', + 3: 'int16', + 4: 'uint16', + 5: 'int32', + 6: 'uint32', + 7: 'float32', + 8: 'float64', +} as const -export const packIncludeResponse = (obj: IncludeResponse): bigint => { - let val = 0n - val |= (BigInt(obj.prop) & 255n) << 0n - val |= (BigInt(obj.size) & 4294967295n) << 8n - return val -} +/** + int8, + uint8, + int16, + uint16, + int32, + uint32, + float32, + float64 + */ +export type VectorBaseTypeEnum = (typeof VectorBaseType)[keyof typeof VectorBaseType] -export const unpackIncludeResponse = (val: bigint): IncludeResponse => { - return { - prop: Number((val >> 0n) & 255n), - size: Number((val >> 8n) & 4294967295n), - } +export type AggHeader = { + op: QueryTypeEnum + typeId: TypeId + offset: number + limit: number + filterSize: number + iteratorType: QueryIteratorTypeEnum + resultsSize: number + accumulatorSize: number + hasGroupBy: boolean + isSamplingSet: boolean } -export const writeIncludeResponse = ( +export const AggHeaderByteSize = 19 + +export const AggHeaderAlignOf = 16 + +export const writeAggHeader = ( buf: Uint8Array, - header: IncludeResponse, + header: AggHeader, offset: number, ): number => { - buf[offset] = Number(header.prop) + buf[offset] = Number(header.op) offset += 1 - writeUint32(buf, Number(header.size), offset) + writeUint16(buf, Number(header.typeId), offset) + offset += 2 + writeUint32(buf, Number(header.offset), offset) + offset += 4 + writeUint32(buf, Number(header.limit), offset) offset += 4 + writeUint16(buf, Number(header.filterSize), offset) + offset += 2 + buf[offset] = Number(header.iteratorType) + offset += 1 + writeUint16(buf, Number(header.resultsSize), offset) + offset += 2 + writeUint16(buf, Number(header.accumulatorSize), offset) + offset += 2 + buf[offset] = 0 + buf[offset] |= (((header.hasGroupBy ? 1 : 0) >>> 0) & 1) << 0 + buf[offset] |= (((header.isSamplingSet ? 1 : 0) >>> 0) & 1) << 1 + buf[offset] |= ((0 >>> 0) & 63) << 2 + offset += 1 return offset } -export const writeIncludeResponseProps = { - prop: (buf: Uint8Array, value: number, offset: number) => { +export const writeAggHeaderProps = { + op: (buf: Uint8Array, value: QueryTypeEnum, offset: number) => { buf[offset] = Number(value) }, - size: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset + 1) + typeId: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 1) + }, + offset: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 3) + }, + limit: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 7) + }, + filterSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 11) + }, + iteratorType: (buf: Uint8Array, value: QueryIteratorTypeEnum, offset: number) => { + buf[offset + 13] = Number(value) + }, + resultsSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 14) + }, + accumulatorSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 16) + }, + hasGroupBy: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 18] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + }, + isSamplingSet: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 18] |= (((value ? 1 : 0) >>> 0) & 1) << 1 }, } -export const readIncludeResponse = ( +export const readAggHeader = ( buf: Uint8Array, offset: number, -): IncludeResponse => { - const value: IncludeResponse = { - prop: buf[offset], - size: readUint32(buf, offset + 1), +): AggHeader => { + const value: AggHeader = { + op: (buf[offset]) as QueryTypeEnum, + typeId: (readUint16(buf, offset + 1)) as TypeId, + offset: readUint32(buf, offset + 3), + limit: readUint32(buf, offset + 7), + filterSize: readUint16(buf, offset + 11), + iteratorType: (buf[offset + 13]) as QueryIteratorTypeEnum, + resultsSize: readUint16(buf, offset + 14), + accumulatorSize: readUint16(buf, offset + 16), + hasGroupBy: (((buf[offset + 18] >>> 0) & 1)) === 1, + isSamplingSet: (((buf[offset + 18] >>> 1) & 1)) === 1, } return value } -export const readIncludeResponseProps = { - prop: (buf: Uint8Array, offset: number) => buf[offset], - size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 1), +export const readAggHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as QueryTypeEnum, + typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 1)) as TypeId, + offset: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 3), + limit: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 7), + filterSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 11), + iteratorType: (buf: Uint8Array, offset: number) => (buf[offset + 13]) as QueryIteratorTypeEnum, + resultsSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 14), + accumulatorSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 16), + hasGroupBy: (buf: Uint8Array, offset: number) => (((buf[offset + 18] >>> 0) & 1)) === 1, + isSamplingSet: (buf: Uint8Array, offset: number) => (((buf[offset + 18] >>> 1) & 1)) === 1, } -export const createIncludeResponse = (header: IncludeResponse): Uint8Array => { - const buffer = new Uint8Array(IncludeResponseByteSize) - writeIncludeResponse(buffer, header, 0) +export const createAggHeader = (header: AggHeader): Uint8Array => { + const buffer = new Uint8Array(AggHeaderByteSize) + writeAggHeader(buffer, header, 0) return buffer } -export type IncludeResponseMeta = { - op: ReadOpEnum - prop: number - lang: LangCodeEnum - compressed: boolean - crc32: number - size: number +export const pushAggHeader = ( + buf: AutoSizedUint8Array, + header: AggHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint16(Number(header.typeId)) + buf.pushUint32(Number(header.offset)) + buf.pushUint32(Number(header.limit)) + buf.pushUint16(Number(header.filterSize)) + buf.pushUint8(Number(header.iteratorType)) + buf.pushUint16(Number(header.resultsSize)) + buf.pushUint16(Number(header.accumulatorSize)) + buf.pushUint8(0) + buf.view[buf.length - 1] |= (((header.hasGroupBy ? 1 : 0) >>> 0) & 1) << 0 + buf.view[buf.length - 1] |= (((header.isSamplingSet ? 1 : 0) >>> 0) & 1) << 1 + buf.view[buf.length - 1] |= ((0 >>> 0) & 63) << 2 + return index } -export const IncludeResponseMetaByteSize = 12 - -export const IncludeResponseMetaAlignOf = 16 - -export const packIncludeResponseMeta = (obj: IncludeResponseMeta): bigint => { - let val = 0n - val |= (BigInt(obj.op) & 255n) << 0n - val |= (BigInt(obj.prop) & 255n) << 8n - val |= (BigInt(obj.lang) & 255n) << 16n - val |= ((obj.compressed ? 1n : 0n) & 1n) << 24n - val |= (BigInt(obj.crc32) & 4294967295n) << 32n - val |= (BigInt(obj.size) & 4294967295n) << 64n - return val +export type AggRefsHeader = { + op: IncludeOpEnum + targetProp: number + offset: number + filterSize: number + resultsSize: number + accumulatorSize: number + hasGroupBy: boolean + isSamplingSet: boolean } -export const unpackIncludeResponseMeta = (val: bigint): IncludeResponseMeta => { - return { - op: (Number((val >> 0n) & 255n)) as ReadOpEnum, - prop: Number((val >> 8n) & 255n), - lang: (Number((val >> 16n) & 255n)) as LangCodeEnum, - compressed: ((val >> 24n) & 1n) === 1n, - crc32: Number((val >> 32n) & 4294967295n), - size: Number((val >> 64n) & 4294967295n), - } -} +export const AggRefsHeaderByteSize = 13 -export const writeIncludeResponseMeta = ( +export const AggRefsHeaderAlignOf = 16 + +export const writeAggRefsHeader = ( buf: Uint8Array, - header: IncludeResponseMeta, + header: AggRefsHeader, offset: number, ): number => { buf[offset] = Number(header.op) offset += 1 - buf[offset] = Number(header.prop) - offset += 1 - buf[offset] = Number(header.lang) - offset += 1 - buf[offset] = 0 - buf[offset] |= (((header.compressed ? 1 : 0) >>> 0) & 1) << 0 - buf[offset] |= ((0 >>> 0) & 127) << 1 + buf[offset] = Number(header.targetProp) offset += 1 - writeUint32(buf, Number(header.crc32), offset) - offset += 4 - writeUint32(buf, Number(header.size), offset) + writeUint32(buf, Number(header.offset), offset) offset += 4 + writeUint16(buf, Number(header.filterSize), offset) + offset += 2 + writeUint16(buf, Number(header.resultsSize), offset) + offset += 2 + writeUint16(buf, Number(header.accumulatorSize), offset) + offset += 2 + buf[offset] = 0 + buf[offset] |= (((header.hasGroupBy ? 1 : 0) >>> 0) & 1) << 0 + buf[offset] |= (((header.isSamplingSet ? 1 : 0) >>> 0) & 1) << 1 + buf[offset] |= ((0 >>> 0) & 63) << 2 + offset += 1 return offset } -export const writeIncludeResponseMetaProps = { - op: (buf: Uint8Array, value: ReadOpEnum, offset: number) => { +export const writeAggRefsHeaderProps = { + op: (buf: Uint8Array, value: IncludeOpEnum, offset: number) => { buf[offset] = Number(value) }, - prop: (buf: Uint8Array, value: number, offset: number) => { + targetProp: (buf: Uint8Array, value: number, offset: number) => { buf[offset + 1] = Number(value) }, - lang: (buf: Uint8Array, value: LangCodeEnum, offset: number) => { - buf[offset + 2] = Number(value) + offset: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 2) }, - compressed: (buf: Uint8Array, value: boolean, offset: number) => { - buf[offset + 3] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + filterSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 6) }, - crc32: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset + 4) + resultsSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 8) }, - size: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset + 8) + accumulatorSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 10) + }, + hasGroupBy: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 12] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + }, + isSamplingSet: (buf: Uint8Array, value: boolean, offset: number) => { + buf[offset + 12] |= (((value ? 1 : 0) >>> 0) & 1) << 1 }, } -export const readIncludeResponseMeta = ( +export const readAggRefsHeader = ( buf: Uint8Array, offset: number, -): IncludeResponseMeta => { - const value: IncludeResponseMeta = { - op: (buf[offset]) as ReadOpEnum, - prop: buf[offset + 1], - lang: (buf[offset + 2]) as LangCodeEnum, - compressed: (((buf[offset + 3] >>> 0) & 1)) === 1, - crc32: readUint32(buf, offset + 4), - size: readUint32(buf, offset + 8), +): AggRefsHeader => { + const value: AggRefsHeader = { + op: (buf[offset]) as IncludeOpEnum, + targetProp: buf[offset + 1], + offset: readUint32(buf, offset + 2), + filterSize: readUint16(buf, offset + 6), + resultsSize: readUint16(buf, offset + 8), + accumulatorSize: readUint16(buf, offset + 10), + hasGroupBy: (((buf[offset + 12] >>> 0) & 1)) === 1, + isSamplingSet: (((buf[offset + 12] >>> 1) & 1)) === 1, } return value } -export const readIncludeResponseMetaProps = { - op: (buf: Uint8Array, offset: number) => (buf[offset]) as ReadOpEnum, - prop: (buf: Uint8Array, offset: number) => buf[offset + 1], - lang: (buf: Uint8Array, offset: number) => (buf[offset + 2]) as LangCodeEnum, - compressed: (buf: Uint8Array, offset: number) => (((buf[offset + 3] >>> 0) & 1)) === 1, - crc32: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 4), - size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 8), +export const readAggRefsHeaderProps = { + op: (buf: Uint8Array, offset: number) => (buf[offset]) as IncludeOpEnum, + targetProp: (buf: Uint8Array, offset: number) => buf[offset + 1], + offset: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 2), + filterSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 6), + resultsSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 8), + accumulatorSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 10), + hasGroupBy: (buf: Uint8Array, offset: number) => (((buf[offset + 12] >>> 0) & 1)) === 1, + isSamplingSet: (buf: Uint8Array, offset: number) => (((buf[offset + 12] >>> 1) & 1)) === 1, } -export const createIncludeResponseMeta = (header: IncludeResponseMeta): Uint8Array => { - const buffer = new Uint8Array(IncludeResponseMetaByteSize) - writeIncludeResponseMeta(buffer, header, 0) +export const createAggRefsHeader = (header: AggRefsHeader): Uint8Array => { + const buffer = new Uint8Array(AggRefsHeaderByteSize) + writeAggRefsHeader(buffer, header, 0) return buffer } -export type SubscriptionHeader = { - op: OpTypeEnum - typeId: TypeId - fieldsLen: number - partialLen: number +export const pushAggRefsHeader = ( + buf: AutoSizedUint8Array, + header: AggRefsHeader, +): number => { + const index = buf.length + buf.pushUint8(Number(header.op)) + buf.pushUint8(Number(header.targetProp)) + buf.pushUint32(Number(header.offset)) + buf.pushUint16(Number(header.filterSize)) + buf.pushUint16(Number(header.resultsSize)) + buf.pushUint16(Number(header.accumulatorSize)) + buf.pushUint8(0) + buf.view[buf.length - 1] |= (((header.hasGroupBy ? 1 : 0) >>> 0) & 1) << 0 + buf.view[buf.length - 1] |= (((header.isSamplingSet ? 1 : 0) >>> 0) & 1) << 1 + buf.view[buf.length - 1] |= ((0 >>> 0) & 63) << 2 + return index } -export const SubscriptionHeaderByteSize = 5 +export type addMultiSubscriptionHeader = { + typeId: number +} -export const SubscriptionHeaderAlignOf = 8 +export const addMultiSubscriptionHeaderByteSize = 2 + +export const addMultiSubscriptionHeaderAlignOf = 2 -export const packSubscriptionHeader = (obj: SubscriptionHeader): bigint => { - let val = 0n - val |= (BigInt(obj.op) & 255n) << 0n - val |= (BigInt(obj.typeId) & 65535n) << 8n - val |= (BigInt(obj.fieldsLen) & 255n) << 24n - val |= (BigInt(obj.partialLen) & 255n) << 32n +export const packaddMultiSubscriptionHeader = (obj: addMultiSubscriptionHeader): number => { + let val = 0 + val |= (Number(obj.typeId) & 65535) << 0 return val } -export const unpackSubscriptionHeader = (val: bigint): SubscriptionHeader => { +export const unpackaddMultiSubscriptionHeader = (val: number): addMultiSubscriptionHeader => { return { - op: (Number((val >> 0n) & 255n)) as OpTypeEnum, - typeId: (Number((val >> 8n) & 65535n)) as TypeId, - fieldsLen: Number((val >> 24n) & 255n), - partialLen: Number((val >> 32n) & 255n), + typeId: Number((val >>> 0) & 65535), } } -export const writeSubscriptionHeader = ( +export const writeaddMultiSubscriptionHeader = ( buf: Uint8Array, - header: SubscriptionHeader, + header: addMultiSubscriptionHeader, offset: number, ): number => { - buf[offset] = Number(header.op) - offset += 1 writeUint16(buf, Number(header.typeId), offset) offset += 2 - buf[offset] = Number(header.fieldsLen) - offset += 1 - buf[offset] = Number(header.partialLen) - offset += 1 return offset } -export const writeSubscriptionHeaderProps = { - op: (buf: Uint8Array, value: OpTypeEnum, offset: number) => { - buf[offset] = Number(value) - }, - typeId: (buf: Uint8Array, value: TypeId, offset: number) => { - writeUint16(buf, Number(value), offset + 1) - }, - fieldsLen: (buf: Uint8Array, value: number, offset: number) => { - buf[offset + 3] = Number(value) - }, - partialLen: (buf: Uint8Array, value: number, offset: number) => { - buf[offset + 4] = Number(value) +export const writeaddMultiSubscriptionHeaderProps = { + typeId: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset) }, } -export const readSubscriptionHeader = ( +export const readaddMultiSubscriptionHeader = ( buf: Uint8Array, offset: number, -): SubscriptionHeader => { - const value: SubscriptionHeader = { - op: (buf[offset]) as OpTypeEnum, - typeId: (readUint16(buf, offset + 1)) as TypeId, - fieldsLen: buf[offset + 3], - partialLen: buf[offset + 4], +): addMultiSubscriptionHeader => { + const value: addMultiSubscriptionHeader = { + typeId: readUint16(buf, offset), } return value } -export const readSubscriptionHeaderProps = { - op: (buf: Uint8Array, offset: number) => (buf[offset]) as OpTypeEnum, - typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 1)) as TypeId, - fieldsLen: (buf: Uint8Array, offset: number) => buf[offset + 3], - partialLen: (buf: Uint8Array, offset: number) => buf[offset + 4], +export const readaddMultiSubscriptionHeaderProps = { + typeId: (buf: Uint8Array, offset: number) => readUint16(buf, offset), } -export const createSubscriptionHeader = (header: SubscriptionHeader): Uint8Array => { - const buffer = new Uint8Array(SubscriptionHeaderByteSize) - writeSubscriptionHeader(buffer, header, 0) +export const createaddMultiSubscriptionHeader = (header: addMultiSubscriptionHeader): Uint8Array => { + const buffer = new Uint8Array(addMultiSubscriptionHeaderByteSize) + writeaddMultiSubscriptionHeader(buffer, header, 0) return buffer } -export type QueryHeader = { - op: QueryTypeEnum - prop: number - typeId: TypeId - edgeTypeId: TypeId - offset: number - limit: number - filterSize: number - searchSize: number - edgeSize: number - edgeFilterSize: number - includeSize: number - iteratorType: QueryIteratorTypeEnum - size: number - sort: boolean +export const pushaddMultiSubscriptionHeader = ( + buf: AutoSizedUint8Array, + header: addMultiSubscriptionHeader, +): number => { + const index = buf.length + buf.pushUint16(Number(header.typeId)) + return index } -export const QueryHeaderByteSize = 28 +export type removeMultiSubscriptionHeader = { + typeId: number +} -export const QueryHeaderAlignOf = 16 +export const removeMultiSubscriptionHeaderByteSize = 2 + +export const removeMultiSubscriptionHeaderAlignOf = 2 -export const packQueryHeader = (obj: QueryHeader): bigint => { - let val = 0n - val |= (BigInt(obj.op) & 255n) << 0n - val |= (BigInt(obj.prop) & 255n) << 8n - val |= (BigInt(obj.typeId) & 65535n) << 16n - val |= (BigInt(obj.edgeTypeId) & 65535n) << 32n - val |= (BigInt(obj.offset) & 4294967295n) << 48n - val |= (BigInt(obj.limit) & 4294967295n) << 80n - val |= (BigInt(obj.filterSize) & 65535n) << 112n - val |= (BigInt(obj.searchSize) & 65535n) << 128n - val |= (BigInt(obj.edgeSize) & 65535n) << 144n - val |= (BigInt(obj.edgeFilterSize) & 65535n) << 160n - val |= (BigInt(obj.includeSize) & 65535n) << 176n - val |= (BigInt(obj.iteratorType) & 255n) << 192n - val |= (BigInt(obj.size) & 65535n) << 200n - val |= ((obj.sort ? 1n : 0n) & 1n) << 216n +export const packremoveMultiSubscriptionHeader = (obj: removeMultiSubscriptionHeader): number => { + let val = 0 + val |= (Number(obj.typeId) & 65535) << 0 return val } -export const unpackQueryHeader = (val: bigint): QueryHeader => { +export const unpackremoveMultiSubscriptionHeader = (val: number): removeMultiSubscriptionHeader => { return { - op: (Number((val >> 0n) & 255n)) as QueryTypeEnum, - prop: Number((val >> 8n) & 255n), - typeId: (Number((val >> 16n) & 65535n)) as TypeId, - edgeTypeId: (Number((val >> 32n) & 65535n)) as TypeId, - offset: Number((val >> 48n) & 4294967295n), - limit: Number((val >> 80n) & 4294967295n), - filterSize: Number((val >> 112n) & 65535n), - searchSize: Number((val >> 128n) & 65535n), - edgeSize: Number((val >> 144n) & 65535n), - edgeFilterSize: Number((val >> 160n) & 65535n), - includeSize: Number((val >> 176n) & 65535n), - iteratorType: (Number((val >> 192n) & 255n)) as QueryIteratorTypeEnum, - size: Number((val >> 200n) & 65535n), - sort: ((val >> 216n) & 1n) === 1n, + typeId: Number((val >>> 0) & 65535), } } -export const writeQueryHeader = ( +export const writeremoveMultiSubscriptionHeader = ( buf: Uint8Array, - header: QueryHeader, + header: removeMultiSubscriptionHeader, offset: number, ): number => { - buf[offset] = Number(header.op) - offset += 1 - buf[offset] = Number(header.prop) - offset += 1 writeUint16(buf, Number(header.typeId), offset) offset += 2 - writeUint16(buf, Number(header.edgeTypeId), offset) - offset += 2 - writeUint32(buf, Number(header.offset), offset) - offset += 4 - writeUint32(buf, Number(header.limit), offset) - offset += 4 - writeUint16(buf, Number(header.filterSize), offset) - offset += 2 - writeUint16(buf, Number(header.searchSize), offset) - offset += 2 - writeUint16(buf, Number(header.edgeSize), offset) - offset += 2 - writeUint16(buf, Number(header.edgeFilterSize), offset) - offset += 2 - writeUint16(buf, Number(header.includeSize), offset) - offset += 2 - buf[offset] = Number(header.iteratorType) - offset += 1 - writeUint16(buf, Number(header.size), offset) - offset += 2 - buf[offset] = 0 - buf[offset] |= (((header.sort ? 1 : 0) >>> 0) & 1) << 0 - buf[offset] |= ((0 >>> 0) & 127) << 1 - offset += 1 return offset } -export const writeQueryHeaderProps = { - op: (buf: Uint8Array, value: QueryTypeEnum, offset: number) => { - buf[offset] = Number(value) - }, - prop: (buf: Uint8Array, value: number, offset: number) => { - buf[offset + 1] = Number(value) - }, - typeId: (buf: Uint8Array, value: TypeId, offset: number) => { - writeUint16(buf, Number(value), offset + 2) - }, - edgeTypeId: (buf: Uint8Array, value: TypeId, offset: number) => { - writeUint16(buf, Number(value), offset + 4) - }, - offset: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset + 6) - }, - limit: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset + 10) - }, - filterSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 14) - }, - searchSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 16) - }, - edgeSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 18) - }, - edgeFilterSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 20) - }, - includeSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 22) - }, - iteratorType: (buf: Uint8Array, value: QueryIteratorTypeEnum, offset: number) => { - buf[offset + 24] = Number(value) - }, - size: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 25) - }, - sort: (buf: Uint8Array, value: boolean, offset: number) => { - buf[offset + 27] |= (((value ? 1 : 0) >>> 0) & 1) << 0 +export const writeremoveMultiSubscriptionHeaderProps = { + typeId: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset) }, } -export const readQueryHeader = ( +export const readremoveMultiSubscriptionHeader = ( buf: Uint8Array, offset: number, -): QueryHeader => { - const value: QueryHeader = { - op: (buf[offset]) as QueryTypeEnum, - prop: buf[offset + 1], - typeId: (readUint16(buf, offset + 2)) as TypeId, - edgeTypeId: (readUint16(buf, offset + 4)) as TypeId, - offset: readUint32(buf, offset + 6), - limit: readUint32(buf, offset + 10), - filterSize: readUint16(buf, offset + 14), - searchSize: readUint16(buf, offset + 16), - edgeSize: readUint16(buf, offset + 18), - edgeFilterSize: readUint16(buf, offset + 20), - includeSize: readUint16(buf, offset + 22), - iteratorType: (buf[offset + 24]) as QueryIteratorTypeEnum, - size: readUint16(buf, offset + 25), - sort: (((buf[offset + 27] >>> 0) & 1)) === 1, +): removeMultiSubscriptionHeader => { + const value: removeMultiSubscriptionHeader = { + typeId: readUint16(buf, offset), } return value } -export const readQueryHeaderProps = { - op: (buf: Uint8Array, offset: number) => (buf[offset]) as QueryTypeEnum, - prop: (buf: Uint8Array, offset: number) => buf[offset + 1], - typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 2)) as TypeId, - edgeTypeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 4)) as TypeId, - offset: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 6), - limit: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 10), - filterSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 14), - searchSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 16), - edgeSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 18), - edgeFilterSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 20), - includeSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 22), - iteratorType: (buf: Uint8Array, offset: number) => (buf[offset + 24]) as QueryIteratorTypeEnum, - size: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 25), - sort: (buf: Uint8Array, offset: number) => (((buf[offset + 27] >>> 0) & 1)) === 1, +export const readremoveMultiSubscriptionHeaderProps = { + typeId: (buf: Uint8Array, offset: number) => readUint16(buf, offset), } -export const createQueryHeader = (header: QueryHeader): Uint8Array => { - const buffer = new Uint8Array(QueryHeaderByteSize) - writeQueryHeader(buffer, header, 0) +export const createremoveMultiSubscriptionHeader = (header: removeMultiSubscriptionHeader): Uint8Array => { + const buffer = new Uint8Array(removeMultiSubscriptionHeaderByteSize) + writeremoveMultiSubscriptionHeader(buffer, header, 0) return buffer } -export type QueryHeaderSingle = { - op: QueryTypeEnum - typeId: TypeId - prop: number - id: number - filterSize: number - includeSize: number - aliasSize: number +export const pushremoveMultiSubscriptionHeader = ( + buf: AutoSizedUint8Array, + header: removeMultiSubscriptionHeader, +): number => { + const index = buf.length + buf.pushUint16(Number(header.typeId)) + return index } -export const QueryHeaderSingleByteSize = 14 - -export const QueryHeaderSingleAlignOf = 16 - -export const packQueryHeaderSingle = (obj: QueryHeaderSingle): bigint => { - let val = 0n - val |= (BigInt(obj.op) & 255n) << 0n - val |= (BigInt(obj.typeId) & 65535n) << 8n - val |= (BigInt(obj.prop) & 255n) << 24n - val |= (BigInt(obj.id) & 4294967295n) << 32n - val |= (BigInt(obj.filterSize) & 65535n) << 64n - val |= (BigInt(obj.includeSize) & 65535n) << 80n - val |= (BigInt(obj.aliasSize) & 65535n) << 96n - return val +export type AggProp = { + propId: number + propType: PropTypeEnum + propDefStart: number + aggFunction: AggFunctionEnum + resultPos: number + accumulatorPos: number } -export const unpackQueryHeaderSingle = (val: bigint): QueryHeaderSingle => { - return { - op: (Number((val >> 0n) & 255n)) as QueryTypeEnum, - typeId: (Number((val >> 8n) & 65535n)) as TypeId, - prop: Number((val >> 24n) & 255n), - id: Number((val >> 32n) & 4294967295n), - filterSize: Number((val >> 64n) & 65535n), - includeSize: Number((val >> 80n) & 65535n), - aliasSize: Number((val >> 96n) & 65535n), - } -} +export const AggPropByteSize = 9 -export const writeQueryHeaderSingle = ( +export const AggPropAlignOf = 16 + +export const writeAggProp = ( buf: Uint8Array, - header: QueryHeaderSingle, + header: AggProp, offset: number, ): number => { - buf[offset] = Number(header.op) + buf[offset] = Number(header.propId) offset += 1 - writeUint16(buf, Number(header.typeId), offset) - offset += 2 - buf[offset] = Number(header.prop) + buf[offset] = Number(header.propType) offset += 1 - writeUint32(buf, Number(header.id), offset) - offset += 4 - writeUint16(buf, Number(header.filterSize), offset) + writeUint16(buf, Number(header.propDefStart), offset) offset += 2 - writeUint16(buf, Number(header.includeSize), offset) + buf[offset] = Number(header.aggFunction) + offset += 1 + writeUint16(buf, Number(header.resultPos), offset) offset += 2 - writeUint16(buf, Number(header.aliasSize), offset) + writeUint16(buf, Number(header.accumulatorPos), offset) offset += 2 return offset } -export const writeQueryHeaderSingleProps = { - op: (buf: Uint8Array, value: QueryTypeEnum, offset: number) => { +export const writeAggPropProps = { + propId: (buf: Uint8Array, value: number, offset: number) => { buf[offset] = Number(value) }, - typeId: (buf: Uint8Array, value: TypeId, offset: number) => { - writeUint16(buf, Number(value), offset + 1) - }, - prop: (buf: Uint8Array, value: number, offset: number) => { - buf[offset + 3] = Number(value) + propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { + buf[offset + 1] = Number(value) }, - id: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset + 4) + propDefStart: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 2) }, - filterSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 8) + aggFunction: (buf: Uint8Array, value: AggFunctionEnum, offset: number) => { + buf[offset + 4] = Number(value) }, - includeSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 10) + resultPos: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 5) }, - aliasSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 12) + accumulatorPos: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 7) }, } -export const readQueryHeaderSingle = ( +export const readAggProp = ( buf: Uint8Array, offset: number, -): QueryHeaderSingle => { - const value: QueryHeaderSingle = { - op: (buf[offset]) as QueryTypeEnum, - typeId: (readUint16(buf, offset + 1)) as TypeId, - prop: buf[offset + 3], - id: readUint32(buf, offset + 4), - filterSize: readUint16(buf, offset + 8), - includeSize: readUint16(buf, offset + 10), - aliasSize: readUint16(buf, offset + 12), +): AggProp => { + const value: AggProp = { + propId: buf[offset], + propType: (buf[offset + 1]) as PropTypeEnum, + propDefStart: readUint16(buf, offset + 2), + aggFunction: (buf[offset + 4]) as AggFunctionEnum, + resultPos: readUint16(buf, offset + 5), + accumulatorPos: readUint16(buf, offset + 7), } return value } -export const readQueryHeaderSingleProps = { - op: (buf: Uint8Array, offset: number) => (buf[offset]) as QueryTypeEnum, - typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 1)) as TypeId, - prop: (buf: Uint8Array, offset: number) => buf[offset + 3], - id: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 4), - filterSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 8), - includeSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 10), - aliasSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 12), +export const readAggPropProps = { + propId: (buf: Uint8Array, offset: number) => buf[offset], + propType: (buf: Uint8Array, offset: number) => (buf[offset + 1]) as PropTypeEnum, + propDefStart: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 2), + aggFunction: (buf: Uint8Array, offset: number) => (buf[offset + 4]) as AggFunctionEnum, + resultPos: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 5), + accumulatorPos: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 7), } -export const createQueryHeaderSingle = (header: QueryHeaderSingle): Uint8Array => { - const buffer = new Uint8Array(QueryHeaderSingleByteSize) - writeQueryHeaderSingle(buffer, header, 0) +export const createAggProp = (header: AggProp): Uint8Array => { + const buffer = new Uint8Array(AggPropByteSize) + writeAggProp(buffer, header, 0) return buffer } -export type QueryHeaderSingleReference = { - op: QueryTypeEnum - prop: number - typeId: TypeId - edgeTypeId: TypeId - edgeSize: number - includeSize: number +export const pushAggProp = ( + buf: AutoSizedUint8Array, + header: AggProp, +): number => { + const index = buf.length + buf.pushUint8(Number(header.propId)) + buf.pushUint8(Number(header.propType)) + buf.pushUint16(Number(header.propDefStart)) + buf.pushUint8(Number(header.aggFunction)) + buf.pushUint16(Number(header.resultPos)) + buf.pushUint16(Number(header.accumulatorPos)) + return index } -export const QueryHeaderSingleReferenceByteSize = 10 - -export const QueryHeaderSingleReferenceAlignOf = 16 - -export const packQueryHeaderSingleReference = (obj: QueryHeaderSingleReference): bigint => { - let val = 0n - val |= (BigInt(obj.op) & 255n) << 0n - val |= (BigInt(obj.prop) & 255n) << 8n - val |= (BigInt(obj.typeId) & 65535n) << 16n - val |= (BigInt(obj.edgeTypeId) & 65535n) << 32n - val |= (BigInt(obj.edgeSize) & 65535n) << 48n - val |= (BigInt(obj.includeSize) & 65535n) << 64n - return val +export type GroupByKeyProp = { + propId: number + propType: PropTypeEnum + propDefStart: number + stepType: number + stepRange: number + timezone: number } -export const unpackQueryHeaderSingleReference = (val: bigint): QueryHeaderSingleReference => { - return { - op: (Number((val >> 0n) & 255n)) as QueryTypeEnum, - prop: Number((val >> 8n) & 255n), - typeId: (Number((val >> 16n) & 65535n)) as TypeId, - edgeTypeId: (Number((val >> 32n) & 65535n)) as TypeId, - edgeSize: Number((val >> 48n) & 65535n), - includeSize: Number((val >> 64n) & 65535n), - } -} +export const GroupByKeyPropByteSize = 11 -export const writeQueryHeaderSingleReference = ( +export const GroupByKeyPropAlignOf = 16 + +export const writeGroupByKeyProp = ( buf: Uint8Array, - header: QueryHeaderSingleReference, + header: GroupByKeyProp, offset: number, ): number => { - buf[offset] = Number(header.op) + buf[offset] = Number(header.propId) offset += 1 - buf[offset] = Number(header.prop) + buf[offset] = Number(header.propType) offset += 1 - writeUint16(buf, Number(header.typeId), offset) - offset += 2 - writeUint16(buf, Number(header.edgeTypeId), offset) - offset += 2 - writeUint16(buf, Number(header.edgeSize), offset) + writeUint16(buf, Number(header.propDefStart), offset) offset += 2 - writeUint16(buf, Number(header.includeSize), offset) + buf[offset] = Number(header.stepType) + offset += 1 + writeUint32(buf, Number(header.stepRange), offset) + offset += 4 + writeUint16(buf, Number(header.timezone), offset) offset += 2 return offset } -export const writeQueryHeaderSingleReferenceProps = { - op: (buf: Uint8Array, value: QueryTypeEnum, offset: number) => { +export const writeGroupByKeyPropProps = { + propId: (buf: Uint8Array, value: number, offset: number) => { buf[offset] = Number(value) }, - prop: (buf: Uint8Array, value: number, offset: number) => { + propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { buf[offset + 1] = Number(value) }, - typeId: (buf: Uint8Array, value: TypeId, offset: number) => { + propDefStart: (buf: Uint8Array, value: number, offset: number) => { writeUint16(buf, Number(value), offset + 2) }, - edgeTypeId: (buf: Uint8Array, value: TypeId, offset: number) => { - writeUint16(buf, Number(value), offset + 4) + stepType: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 4] = Number(value) }, - edgeSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 6) + stepRange: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 5) }, - includeSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 8) + timezone: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 9) }, } -export const readQueryHeaderSingleReference = ( +export const readGroupByKeyProp = ( buf: Uint8Array, offset: number, -): QueryHeaderSingleReference => { - const value: QueryHeaderSingleReference = { - op: (buf[offset]) as QueryTypeEnum, - prop: buf[offset + 1], - typeId: (readUint16(buf, offset + 2)) as TypeId, - edgeTypeId: (readUint16(buf, offset + 4)) as TypeId, - edgeSize: readUint16(buf, offset + 6), - includeSize: readUint16(buf, offset + 8), +): GroupByKeyProp => { + const value: GroupByKeyProp = { + propId: buf[offset], + propType: (buf[offset + 1]) as PropTypeEnum, + propDefStart: readUint16(buf, offset + 2), + stepType: buf[offset + 4], + stepRange: readUint32(buf, offset + 5), + timezone: readUint16(buf, offset + 9), } return value } -export const readQueryHeaderSingleReferenceProps = { - op: (buf: Uint8Array, offset: number) => (buf[offset]) as QueryTypeEnum, - prop: (buf: Uint8Array, offset: number) => buf[offset + 1], - typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 2)) as TypeId, - edgeTypeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 4)) as TypeId, - edgeSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 6), - includeSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 8), +export const readGroupByKeyPropProps = { + propId: (buf: Uint8Array, offset: number) => buf[offset], + propType: (buf: Uint8Array, offset: number) => (buf[offset + 1]) as PropTypeEnum, + propDefStart: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 2), + stepType: (buf: Uint8Array, offset: number) => buf[offset + 4], + stepRange: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 5), + timezone: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 9), } -export const createQueryHeaderSingleReference = (header: QueryHeaderSingleReference): Uint8Array => { - const buffer = new Uint8Array(QueryHeaderSingleReferenceByteSize) - writeQueryHeaderSingleReference(buffer, header, 0) +export const createGroupByKeyProp = (header: GroupByKeyProp): Uint8Array => { + const buffer = new Uint8Array(GroupByKeyPropByteSize) + writeGroupByKeyProp(buffer, header, 0) return buffer } -export const VectorBaseType = { - int8: 1, - uint8: 2, - int16: 3, - uint16: 4, - int32: 5, - uint32: 6, - float32: 7, - float64: 8, +export const pushGroupByKeyProp = ( + buf: AutoSizedUint8Array, + header: GroupByKeyProp, +): number => { + const index = buf.length + buf.pushUint8(Number(header.propId)) + buf.pushUint8(Number(header.propType)) + buf.pushUint16(Number(header.propDefStart)) + buf.pushUint8(Number(header.stepType)) + buf.pushUint32(Number(header.stepRange)) + buf.pushUint16(Number(header.timezone)) + return index +} + +export const FilterOpCompare = { + eq: 4, + neq: 5, + eqBatch: 6, + neqBatch: 7, + eqBatchSmall: 8, + neqBatchSmall: 9, + range: 10, + nrange: 11, + gt: 14, + lt: 15, + ge: 20, + le: 21, + inc: 22, + ninc: 23, + incBatch: 24, + nincBatch: 25, + selectLargeRefs: 203, + selectRef: 204, + selectSmallRefs: 205, + selectLargeRefEdge: 206, + selectLargeRefsEdge: 207, + nextOrIndex: 253, } as const -export const VectorBaseTypeInverse = { - 1: 'int8', - 2: 'uint8', - 3: 'int16', - 4: 'uint16', - 5: 'int32', - 6: 'uint32', - 7: 'float32', - 8: 'float64', +export const FilterOpCompareInverse = { + 4: 'eq', + 5: 'neq', + 6: 'eqBatch', + 7: 'neqBatch', + 8: 'eqBatchSmall', + 9: 'neqBatchSmall', + 10: 'range', + 11: 'nrange', + 14: 'gt', + 15: 'lt', + 20: 'ge', + 21: 'le', + 22: 'inc', + 23: 'ninc', + 24: 'incBatch', + 25: 'nincBatch', + 203: 'selectLargeRefs', + 204: 'selectRef', + 205: 'selectSmallRefs', + 206: 'selectLargeRefEdge', + 207: 'selectLargeRefsEdge', + 253: 'nextOrIndex', } as const /** - int8, - uint8, - int16, - uint16, - int32, - uint32, - float32, - float64 + eq, + neq, + eqBatch, + neqBatch, + eqBatchSmall, + neqBatchSmall, + range, + nrange, + gt, + lt, + ge, + le, + inc, + ninc, + incBatch, + nincBatch, + selectLargeRefs, + selectRef, + selectSmallRefs, + selectLargeRefEdge, + selectLargeRefsEdge, + nextOrIndex */ -export type VectorBaseTypeEnum = (typeof VectorBaseType)[keyof typeof VectorBaseType] +export type FilterOpCompareEnum = (typeof FilterOpCompare)[keyof typeof FilterOpCompare] -export type AggHeader = { - op: QueryTypeEnum - typeId: TypeId - offset: number - limit: number - filterSize: number - iteratorType: QueryIteratorTypeEnum - resultsSize: number - accumulatorSize: number - hasGroupBy: boolean - isSamplingSet: boolean +export type FilterOp = { + prop: PropTypeEnum + compare: FilterOpCompareEnum } -export const AggHeaderByteSize = 19 +export const FilterOpByteSize = 2 -export const AggHeaderAlignOf = 16 +export const FilterOpAlignOf = 2 -export const packAggHeader = (obj: AggHeader): bigint => { - let val = 0n - val |= (BigInt(obj.op) & 255n) << 0n - val |= (BigInt(obj.typeId) & 65535n) << 8n - val |= (BigInt(obj.offset) & 4294967295n) << 24n - val |= (BigInt(obj.limit) & 4294967295n) << 56n - val |= (BigInt(obj.filterSize) & 65535n) << 88n - val |= (BigInt(obj.iteratorType) & 255n) << 104n - val |= (BigInt(obj.resultsSize) & 65535n) << 112n - val |= (BigInt(obj.accumulatorSize) & 65535n) << 128n - val |= ((obj.hasGroupBy ? 1n : 0n) & 1n) << 144n - val |= ((obj.isSamplingSet ? 1n : 0n) & 1n) << 145n +export const packFilterOp = (obj: FilterOp): number => { + let val = 0 + val |= (Number(obj.prop) & 255) << 0 + val |= (Number(obj.compare) & 255) << 8 return val } -export const unpackAggHeader = (val: bigint): AggHeader => { +export const unpackFilterOp = (val: number): FilterOp => { return { - op: (Number((val >> 0n) & 255n)) as QueryTypeEnum, - typeId: (Number((val >> 8n) & 65535n)) as TypeId, - offset: Number((val >> 24n) & 4294967295n), - limit: Number((val >> 56n) & 4294967295n), - filterSize: Number((val >> 88n) & 65535n), - iteratorType: (Number((val >> 104n) & 255n)) as QueryIteratorTypeEnum, - resultsSize: Number((val >> 112n) & 65535n), - accumulatorSize: Number((val >> 128n) & 65535n), - hasGroupBy: ((val >> 144n) & 1n) === 1n, - isSamplingSet: ((val >> 145n) & 1n) === 1n, + prop: ((val >>> 0) & 255) as PropTypeEnum, + compare: ((val >>> 8) & 255) as FilterOpCompareEnum, } } -export const writeAggHeader = ( +export const writeFilterOp = ( buf: Uint8Array, - header: AggHeader, + header: FilterOp, offset: number, ): number => { - buf[offset] = Number(header.op) - offset += 1 - writeUint16(buf, Number(header.typeId), offset) - offset += 2 - writeUint32(buf, Number(header.offset), offset) - offset += 4 - writeUint32(buf, Number(header.limit), offset) - offset += 4 - writeUint16(buf, Number(header.filterSize), offset) - offset += 2 - buf[offset] = Number(header.iteratorType) + buf[offset] = Number(header.prop) offset += 1 - writeUint16(buf, Number(header.resultsSize), offset) - offset += 2 - writeUint16(buf, Number(header.accumulatorSize), offset) - offset += 2 - buf[offset] = 0 - buf[offset] |= (((header.hasGroupBy ? 1 : 0) >>> 0) & 1) << 0 - buf[offset] |= (((header.isSamplingSet ? 1 : 0) >>> 0) & 1) << 1 - buf[offset] |= ((0 >>> 0) & 63) << 2 + buf[offset] = Number(header.compare) offset += 1 return offset } -export const writeAggHeaderProps = { - op: (buf: Uint8Array, value: QueryTypeEnum, offset: number) => { +export const writeFilterOpProps = { + prop: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { buf[offset] = Number(value) }, - typeId: (buf: Uint8Array, value: TypeId, offset: number) => { - writeUint16(buf, Number(value), offset + 1) - }, - offset: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset + 3) - }, - limit: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset + 7) - }, - filterSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 11) - }, - iteratorType: (buf: Uint8Array, value: QueryIteratorTypeEnum, offset: number) => { - buf[offset + 13] = Number(value) - }, - resultsSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 14) - }, - accumulatorSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 16) - }, - hasGroupBy: (buf: Uint8Array, value: boolean, offset: number) => { - buf[offset + 18] |= (((value ? 1 : 0) >>> 0) & 1) << 0 - }, - isSamplingSet: (buf: Uint8Array, value: boolean, offset: number) => { - buf[offset + 18] |= (((value ? 1 : 0) >>> 0) & 1) << 1 + compare: (buf: Uint8Array, value: FilterOpCompareEnum, offset: number) => { + buf[offset + 1] = Number(value) }, } -export const readAggHeader = ( +export const readFilterOp = ( buf: Uint8Array, offset: number, -): AggHeader => { - const value: AggHeader = { - op: (buf[offset]) as QueryTypeEnum, - typeId: (readUint16(buf, offset + 1)) as TypeId, - offset: readUint32(buf, offset + 3), - limit: readUint32(buf, offset + 7), - filterSize: readUint16(buf, offset + 11), - iteratorType: (buf[offset + 13]) as QueryIteratorTypeEnum, - resultsSize: readUint16(buf, offset + 14), - accumulatorSize: readUint16(buf, offset + 16), - hasGroupBy: (((buf[offset + 18] >>> 0) & 1)) === 1, - isSamplingSet: (((buf[offset + 18] >>> 1) & 1)) === 1, +): FilterOp => { + const value: FilterOp = { + prop: (buf[offset]) as PropTypeEnum, + compare: (buf[offset + 1]) as FilterOpCompareEnum, } return value } -export const readAggHeaderProps = { - op: (buf: Uint8Array, offset: number) => (buf[offset]) as QueryTypeEnum, - typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 1)) as TypeId, - offset: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 3), - limit: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 7), - filterSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 11), - iteratorType: (buf: Uint8Array, offset: number) => (buf[offset + 13]) as QueryIteratorTypeEnum, - resultsSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 14), - accumulatorSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 16), - hasGroupBy: (buf: Uint8Array, offset: number) => (((buf[offset + 18] >>> 0) & 1)) === 1, - isSamplingSet: (buf: Uint8Array, offset: number) => (((buf[offset + 18] >>> 1) & 1)) === 1, +export const readFilterOpProps = { + prop: (buf: Uint8Array, offset: number) => (buf[offset]) as PropTypeEnum, + compare: (buf: Uint8Array, offset: number) => (buf[offset + 1]) as FilterOpCompareEnum, } -export const createAggHeader = (header: AggHeader): Uint8Array => { - const buffer = new Uint8Array(AggHeaderByteSize) - writeAggHeader(buffer, header, 0) +export const createFilterOp = (header: FilterOp): Uint8Array => { + const buffer = new Uint8Array(FilterOpByteSize) + writeFilterOp(buffer, header, 0) return buffer } -export type AggRefsHeader = { - op: IncludeOpEnum - targetProp: number - offset: number - filterSize: number - resultsSize: number - accumulatorSize: number - hasGroupBy: boolean - isSamplingSet: boolean +export const pushFilterOp = ( + buf: AutoSizedUint8Array, + header: FilterOp, +): number => { + const index = buf.length + buf.pushUint8(Number(header.prop)) + buf.pushUint8(Number(header.compare)) + return index } -export const AggRefsHeaderByteSize = 13 - -export const AggRefsHeaderAlignOf = 16 - -export const packAggRefsHeader = (obj: AggRefsHeader): bigint => { - let val = 0n - val |= (BigInt(obj.op) & 255n) << 0n - val |= (BigInt(obj.targetProp) & 255n) << 8n - val |= (BigInt(obj.offset) & 4294967295n) << 16n - val |= (BigInt(obj.filterSize) & 65535n) << 48n - val |= (BigInt(obj.resultsSize) & 65535n) << 64n - val |= (BigInt(obj.accumulatorSize) & 65535n) << 80n - val |= ((obj.hasGroupBy ? 1n : 0n) & 1n) << 96n - val |= ((obj.isSamplingSet ? 1n : 0n) & 1n) << 97n - return val +export type FilterCondition = { + op: FilterOp + size: number + prop: number + start: number + len: number + fieldSchema: number + offset: number } -export const unpackAggRefsHeader = (val: bigint): AggRefsHeader => { - return { - op: (Number((val >> 0n) & 255n)) as IncludeOpEnum, - targetProp: Number((val >> 8n) & 255n), - offset: Number((val >> 16n) & 4294967295n), - filterSize: Number((val >> 48n) & 65535n), - resultsSize: Number((val >> 64n) & 65535n), - accumulatorSize: Number((val >> 80n) & 65535n), - hasGroupBy: ((val >> 96n) & 1n) === 1n, - isSamplingSet: ((val >> 97n) & 1n) === 1n, - } -} +export const FilterConditionByteSize = 19 -export const writeAggRefsHeader = ( +export const FilterConditionAlignOf = 16 + +export const writeFilterCondition = ( buf: Uint8Array, - header: AggRefsHeader, + header: FilterCondition, offset: number, ): number => { - buf[offset] = Number(header.op) - offset += 1 - buf[offset] = Number(header.targetProp) - offset += 1 - writeUint32(buf, Number(header.offset), offset) - offset += 4 - writeUint16(buf, Number(header.filterSize), offset) - offset += 2 - writeUint16(buf, Number(header.resultsSize), offset) + writeUint16(buf, Number(packFilterOp(header.op)), offset) offset += 2 - writeUint16(buf, Number(header.accumulatorSize), offset) + writeUint32(buf, Number(header.size), offset) + offset += 4 + buf[offset] = Number(header.prop) + offset += 1 + writeUint16(buf, Number(header.start), offset) offset += 2 - buf[offset] = 0 - buf[offset] |= (((header.hasGroupBy ? 1 : 0) >>> 0) & 1) << 0 - buf[offset] |= (((header.isSamplingSet ? 1 : 0) >>> 0) & 1) << 1 - buf[offset] |= ((0 >>> 0) & 63) << 2 + buf[offset] = Number(header.len) + offset += 1 + writeUint64(buf, header.fieldSchema, offset) + offset += 8 + buf[offset] = Number(header.offset) offset += 1 return offset } -export const writeAggRefsHeaderProps = { - op: (buf: Uint8Array, value: IncludeOpEnum, offset: number) => { - buf[offset] = Number(value) - }, - targetProp: (buf: Uint8Array, value: number, offset: number) => { - buf[offset + 1] = Number(value) +export const writeFilterConditionProps = { + op: (buf: Uint8Array, value: FilterOp, offset: number) => { + writeUint16(buf, Number(packFilterOp(value)), offset) }, - offset: (buf: Uint8Array, value: number, offset: number) => { + size: (buf: Uint8Array, value: number, offset: number) => { writeUint32(buf, Number(value), offset + 2) }, - filterSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 6) + prop: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 6] = Number(value) }, - resultsSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 8) + start: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 7) }, - accumulatorSize: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 10) + len: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 9] = Number(value) }, - hasGroupBy: (buf: Uint8Array, value: boolean, offset: number) => { - buf[offset + 12] |= (((value ? 1 : 0) >>> 0) & 1) << 0 + fieldSchema: (buf: Uint8Array, value: number, offset: number) => { + writeUint64(buf, value, offset + 10) }, - isSamplingSet: (buf: Uint8Array, value: boolean, offset: number) => { - buf[offset + 12] |= (((value ? 1 : 0) >>> 0) & 1) << 1 + offset: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 18] = Number(value) }, } -export const readAggRefsHeader = ( +export const readFilterCondition = ( buf: Uint8Array, offset: number, -): AggRefsHeader => { - const value: AggRefsHeader = { - op: (buf[offset]) as IncludeOpEnum, - targetProp: buf[offset + 1], - offset: readUint32(buf, offset + 2), - filterSize: readUint16(buf, offset + 6), - resultsSize: readUint16(buf, offset + 8), - accumulatorSize: readUint16(buf, offset + 10), - hasGroupBy: (((buf[offset + 12] >>> 0) & 1)) === 1, - isSamplingSet: (((buf[offset + 12] >>> 1) & 1)) === 1, +): FilterCondition => { + const value: FilterCondition = { + op: unpackFilterOp(readUint16(buf, offset)), + size: readUint32(buf, offset + 2), + prop: buf[offset + 6], + start: readUint16(buf, offset + 7), + len: buf[offset + 9], + fieldSchema: readUint64(buf, offset + 10), + offset: buf[offset + 18], } return value } -export const readAggRefsHeaderProps = { - op: (buf: Uint8Array, offset: number) => (buf[offset]) as IncludeOpEnum, - targetProp: (buf: Uint8Array, offset: number) => buf[offset + 1], - offset: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 2), - filterSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 6), - resultsSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 8), - accumulatorSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 10), - hasGroupBy: (buf: Uint8Array, offset: number) => (((buf[offset + 12] >>> 0) & 1)) === 1, - isSamplingSet: (buf: Uint8Array, offset: number) => (((buf[offset + 12] >>> 1) & 1)) === 1, +export const readFilterConditionProps = { + op: (buf: Uint8Array, offset: number) => unpackFilterOp(readUint16(buf, offset)), + size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 2), + prop: (buf: Uint8Array, offset: number) => buf[offset + 6], + start: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 7), + len: (buf: Uint8Array, offset: number) => buf[offset + 9], + fieldSchema: (buf: Uint8Array, offset: number) => readUint64(buf, offset + 10), + offset: (buf: Uint8Array, offset: number) => buf[offset + 18], } -export const createAggRefsHeader = (header: AggRefsHeader): Uint8Array => { - const buffer = new Uint8Array(AggRefsHeaderByteSize) - writeAggRefsHeader(buffer, header, 0) +export const createFilterCondition = (header: FilterCondition): Uint8Array => { + const buffer = new Uint8Array(FilterConditionByteSize) + writeFilterCondition(buffer, header, 0) return buffer } -export type addMultiSubscriptionHeader = { - typeId: number +export const pushFilterCondition = ( + buf: AutoSizedUint8Array, + header: FilterCondition, +): number => { + const index = buf.length + buf.pushUint16(Number(packFilterOp(header.op))) + buf.pushUint32(Number(header.size)) + buf.pushUint8(Number(header.prop)) + buf.pushUint16(Number(header.start)) + buf.pushUint8(Number(header.len)) + buf.pushUint64(header.fieldSchema) + buf.pushUint8(Number(header.offset)) + return index } -export const addMultiSubscriptionHeaderByteSize = 2 - -export const addMultiSubscriptionHeaderAlignOf = 2 - -export const packaddMultiSubscriptionHeader = (obj: addMultiSubscriptionHeader): bigint => { - let val = 0n - val |= (BigInt(obj.typeId) & 65535n) << 0n - return val +export type FilterSelect = { + size: number + typeEntry: number + typeId: TypeId } -export const unpackaddMultiSubscriptionHeader = (val: bigint): addMultiSubscriptionHeader => { - return { - typeId: Number((val >> 0n) & 65535n), - } -} +export const FilterSelectByteSize = 14 -export const writeaddMultiSubscriptionHeader = ( +export const FilterSelectAlignOf = 16 + +export const writeFilterSelect = ( buf: Uint8Array, - header: addMultiSubscriptionHeader, + header: FilterSelect, offset: number, ): number => { + writeUint32(buf, Number(header.size), offset) + offset += 4 + writeUint64(buf, header.typeEntry, offset) + offset += 8 writeUint16(buf, Number(header.typeId), offset) offset += 2 return offset } -export const writeaddMultiSubscriptionHeaderProps = { - typeId: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset) +export const writeFilterSelectProps = { + size: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset) + }, + typeEntry: (buf: Uint8Array, value: number, offset: number) => { + writeUint64(buf, value, offset + 4) + }, + typeId: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 12) }, } -export const readaddMultiSubscriptionHeader = ( +export const readFilterSelect = ( buf: Uint8Array, offset: number, -): addMultiSubscriptionHeader => { - const value: addMultiSubscriptionHeader = { - typeId: readUint16(buf, offset), +): FilterSelect => { + const value: FilterSelect = { + size: readUint32(buf, offset), + typeEntry: readUint64(buf, offset + 4), + typeId: (readUint16(buf, offset + 12)) as TypeId, } return value } -export const readaddMultiSubscriptionHeaderProps = { - typeId: (buf: Uint8Array, offset: number) => readUint16(buf, offset), +export const readFilterSelectProps = { + size: (buf: Uint8Array, offset: number) => readUint32(buf, offset), + typeEntry: (buf: Uint8Array, offset: number) => readUint64(buf, offset + 4), + typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 12)) as TypeId, } -export const createaddMultiSubscriptionHeader = (header: addMultiSubscriptionHeader): Uint8Array => { - const buffer = new Uint8Array(addMultiSubscriptionHeaderByteSize) - writeaddMultiSubscriptionHeader(buffer, header, 0) +export const createFilterSelect = (header: FilterSelect): Uint8Array => { + const buffer = new Uint8Array(FilterSelectByteSize) + writeFilterSelect(buffer, header, 0) return buffer } -export type removeMultiSubscriptionHeader = { - typeId: number +export const pushFilterSelect = ( + buf: AutoSizedUint8Array, + header: FilterSelect, +): number => { + const index = buf.length + buf.pushUint32(Number(header.size)) + buf.pushUint64(header.typeEntry) + buf.pushUint16(Number(header.typeId)) + return index } -export const removeMultiSubscriptionHeaderByteSize = 2 - -export const removeMultiSubscriptionHeaderAlignOf = 2 - -export const packremoveMultiSubscriptionHeader = (obj: removeMultiSubscriptionHeader): bigint => { - let val = 0n - val |= (BigInt(obj.typeId) & 65535n) << 0n - return val +export type SelvaSchemaHeader = { + blockCapacity: number + nrFields: number + nrFixedFields: number + nrVirtualFields: number + sdbVersion: number } -export const unpackremoveMultiSubscriptionHeader = (val: bigint): removeMultiSubscriptionHeader => { - return { - typeId: Number((val >> 0n) & 65535n), - } -} +export const SelvaSchemaHeaderByteSize = 8 -export const writeremoveMultiSubscriptionHeader = ( +export const SelvaSchemaHeaderAlignOf = 8 + +export const writeSelvaSchemaHeader = ( buf: Uint8Array, - header: removeMultiSubscriptionHeader, + header: SelvaSchemaHeader, offset: number, ): number => { - writeUint16(buf, Number(header.typeId), offset) - offset += 2 + writeUint32(buf, Number(header.blockCapacity), offset) + offset += 4 + buf[offset] = Number(header.nrFields) + offset += 1 + buf[offset] = Number(header.nrFixedFields) + offset += 1 + buf[offset] = Number(header.nrVirtualFields) + offset += 1 + buf[offset] = Number(header.sdbVersion) + offset += 1 return offset } -export const writeremoveMultiSubscriptionHeaderProps = { - typeId: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset) +export const writeSelvaSchemaHeaderProps = { + blockCapacity: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset) + }, + nrFields: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 4] = Number(value) + }, + nrFixedFields: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 5] = Number(value) + }, + nrVirtualFields: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 6] = Number(value) + }, + sdbVersion: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 7] = Number(value) }, } -export const readremoveMultiSubscriptionHeader = ( +export const readSelvaSchemaHeader = ( buf: Uint8Array, offset: number, -): removeMultiSubscriptionHeader => { - const value: removeMultiSubscriptionHeader = { - typeId: readUint16(buf, offset), +): SelvaSchemaHeader => { + const value: SelvaSchemaHeader = { + blockCapacity: readUint32(buf, offset), + nrFields: buf[offset + 4], + nrFixedFields: buf[offset + 5], + nrVirtualFields: buf[offset + 6], + sdbVersion: buf[offset + 7], } return value } -export const readremoveMultiSubscriptionHeaderProps = { - typeId: (buf: Uint8Array, offset: number) => readUint16(buf, offset), +export const readSelvaSchemaHeaderProps = { + blockCapacity: (buf: Uint8Array, offset: number) => readUint32(buf, offset), + nrFields: (buf: Uint8Array, offset: number) => buf[offset + 4], + nrFixedFields: (buf: Uint8Array, offset: number) => buf[offset + 5], + nrVirtualFields: (buf: Uint8Array, offset: number) => buf[offset + 6], + sdbVersion: (buf: Uint8Array, offset: number) => buf[offset + 7], } -export const createremoveMultiSubscriptionHeader = (header: removeMultiSubscriptionHeader): Uint8Array => { - const buffer = new Uint8Array(removeMultiSubscriptionHeaderByteSize) - writeremoveMultiSubscriptionHeader(buffer, header, 0) +export const createSelvaSchemaHeader = (header: SelvaSchemaHeader): Uint8Array => { + const buffer = new Uint8Array(SelvaSchemaHeaderByteSize) + writeSelvaSchemaHeader(buffer, header, 0) return buffer } -export type AggProp = { - propId: number - propType: PropTypeEnum - propDefStart: number - aggFunction: AggFunctionEnum - resultPos: number - accumulatorPos: number +export const pushSelvaSchemaHeader = ( + buf: AutoSizedUint8Array, + header: SelvaSchemaHeader, +): number => { + const index = buf.length + buf.pushUint32(Number(header.blockCapacity)) + buf.pushUint8(Number(header.nrFields)) + buf.pushUint8(Number(header.nrFixedFields)) + buf.pushUint8(Number(header.nrVirtualFields)) + buf.pushUint8(Number(header.sdbVersion)) + return index } -export const AggPropByteSize = 9 +export type SelvaSchemaMicroBuffer = { + type: SelvaFieldType + len: number + hasDefault: number +} -export const AggPropAlignOf = 16 +export const SelvaSchemaMicroBufferByteSize = 4 + +export const SelvaSchemaMicroBufferAlignOf = 4 -export const packAggProp = (obj: AggProp): bigint => { - let val = 0n - val |= (BigInt(obj.propId) & 255n) << 0n - val |= (BigInt(obj.propType) & 255n) << 8n - val |= (BigInt(obj.propDefStart) & 65535n) << 16n - val |= (BigInt(obj.aggFunction) & 255n) << 32n - val |= (BigInt(obj.resultPos) & 65535n) << 40n - val |= (BigInt(obj.accumulatorPos) & 65535n) << 56n +export const packSelvaSchemaMicroBuffer = (obj: SelvaSchemaMicroBuffer): number => { + let val = 0 + val |= (Number(obj.type) & 255) << 0 + val |= (Number(obj.len) & 65535) << 8 + val |= (Number(obj.hasDefault) & 255) << 24 return val } -export const unpackAggProp = (val: bigint): AggProp => { +export const unpackSelvaSchemaMicroBuffer = (val: number): SelvaSchemaMicroBuffer => { return { - propId: Number((val >> 0n) & 255n), - propType: (Number((val >> 8n) & 255n)) as PropTypeEnum, - propDefStart: Number((val >> 16n) & 65535n), - aggFunction: (Number((val >> 32n) & 255n)) as AggFunctionEnum, - resultPos: Number((val >> 40n) & 65535n), - accumulatorPos: Number((val >> 56n) & 65535n), + type: Number((val >>> 0) & 255), + len: Number((val >>> 8) & 65535), + hasDefault: Number((val >>> 24) & 255), } } -export const writeAggProp = ( +export const writeSelvaSchemaMicroBuffer = ( buf: Uint8Array, - header: AggProp, + header: SelvaSchemaMicroBuffer, offset: number, ): number => { - buf[offset] = Number(header.propId) - offset += 1 - buf[offset] = Number(header.propType) + buf[offset] = Number(header.type) offset += 1 - writeUint16(buf, Number(header.propDefStart), offset) + writeUint16(buf, Number(header.len), offset) offset += 2 - buf[offset] = Number(header.aggFunction) + buf[offset] = Number(header.hasDefault) offset += 1 - writeUint16(buf, Number(header.resultPos), offset) - offset += 2 - writeUint16(buf, Number(header.accumulatorPos), offset) - offset += 2 return offset } -export const writeAggPropProps = { - propId: (buf: Uint8Array, value: number, offset: number) => { +export const writeSelvaSchemaMicroBufferProps = { + type: (buf: Uint8Array, value: SelvaFieldType, offset: number) => { buf[offset] = Number(value) }, - propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { - buf[offset + 1] = Number(value) - }, - propDefStart: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 2) - }, - aggFunction: (buf: Uint8Array, value: AggFunctionEnum, offset: number) => { - buf[offset + 4] = Number(value) - }, - resultPos: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 5) + len: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 1) }, - accumulatorPos: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 7) + hasDefault: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 3] = Number(value) }, } -export const readAggProp = ( +export const readSelvaSchemaMicroBuffer = ( buf: Uint8Array, offset: number, -): AggProp => { - const value: AggProp = { - propId: buf[offset], - propType: (buf[offset + 1]) as PropTypeEnum, - propDefStart: readUint16(buf, offset + 2), - aggFunction: (buf[offset + 4]) as AggFunctionEnum, - resultPos: readUint16(buf, offset + 5), - accumulatorPos: readUint16(buf, offset + 7), +): SelvaSchemaMicroBuffer => { + const value: SelvaSchemaMicroBuffer = { + type: buf[offset], + len: readUint16(buf, offset + 1), + hasDefault: buf[offset + 3], } return value } -export const readAggPropProps = { - propId: (buf: Uint8Array, offset: number) => buf[offset], - propType: (buf: Uint8Array, offset: number) => (buf[offset + 1]) as PropTypeEnum, - propDefStart: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 2), - aggFunction: (buf: Uint8Array, offset: number) => (buf[offset + 4]) as AggFunctionEnum, - resultPos: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 5), - accumulatorPos: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 7), +export const readSelvaSchemaMicroBufferProps = { + type: (buf: Uint8Array, offset: number) => buf[offset], + len: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 1), + hasDefault: (buf: Uint8Array, offset: number) => buf[offset + 3], } -export const createAggProp = (header: AggProp): Uint8Array => { - const buffer = new Uint8Array(AggPropByteSize) - writeAggProp(buffer, header, 0) +export const createSelvaSchemaMicroBuffer = (header: SelvaSchemaMicroBuffer): Uint8Array => { + const buffer = new Uint8Array(SelvaSchemaMicroBufferByteSize) + writeSelvaSchemaMicroBuffer(buffer, header, 0) return buffer } -export type GroupByKeyProp = { - propId: number - propType: PropTypeEnum - propDefStart: number - stepType: number - stepRange: number - timezone: number +export const pushSelvaSchemaMicroBuffer = ( + buf: AutoSizedUint8Array, + header: SelvaSchemaMicroBuffer, +): number => { + const index = buf.length + buf.pushUint8(Number(header.type)) + buf.pushUint16(Number(header.len)) + buf.pushUint8(Number(header.hasDefault)) + return index } -export const GroupByKeyPropByteSize = 11 - -export const GroupByKeyPropAlignOf = 16 - -export const packGroupByKeyProp = (obj: GroupByKeyProp): bigint => { - let val = 0n - val |= (BigInt(obj.propId) & 255n) << 0n - val |= (BigInt(obj.propType) & 255n) << 8n - val |= (BigInt(obj.propDefStart) & 65535n) << 16n - val |= (BigInt(obj.stepType) & 255n) << 32n - val |= (BigInt(obj.stepRange) & 4294967295n) << 40n - val |= (BigInt(obj.timezone) & 65535n) << 72n - return val +export type SelvaSchemaString = { + type: SelvaFieldType + fixedLenHint: number + defaultLen: number } -export const unpackGroupByKeyProp = (val: bigint): GroupByKeyProp => { - return { - propId: Number((val >> 0n) & 255n), - propType: (Number((val >> 8n) & 255n)) as PropTypeEnum, - propDefStart: Number((val >> 16n) & 65535n), - stepType: Number((val >> 32n) & 255n), - stepRange: Number((val >> 40n) & 4294967295n), - timezone: Number((val >> 72n) & 65535n), - } -} +export const SelvaSchemaStringByteSize = 6 -export const writeGroupByKeyProp = ( +export const SelvaSchemaStringAlignOf = 8 + +export const writeSelvaSchemaString = ( buf: Uint8Array, - header: GroupByKeyProp, + header: SelvaSchemaString, offset: number, -): number => { - buf[offset] = Number(header.propId) - offset += 1 - buf[offset] = Number(header.propType) +): number => { + buf[offset] = Number(header.type) offset += 1 - writeUint16(buf, Number(header.propDefStart), offset) - offset += 2 - buf[offset] = Number(header.stepType) + buf[offset] = Number(header.fixedLenHint) offset += 1 - writeUint32(buf, Number(header.stepRange), offset) + writeUint32(buf, Number(header.defaultLen), offset) offset += 4 - writeUint16(buf, Number(header.timezone), offset) - offset += 2 return offset } -export const writeGroupByKeyPropProps = { - propId: (buf: Uint8Array, value: number, offset: number) => { +export const writeSelvaSchemaStringProps = { + type: (buf: Uint8Array, value: SelvaFieldType, offset: number) => { buf[offset] = Number(value) }, - propType: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { + fixedLenHint: (buf: Uint8Array, value: number, offset: number) => { buf[offset + 1] = Number(value) }, - propDefStart: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 2) - }, - stepType: (buf: Uint8Array, value: number, offset: number) => { - buf[offset + 4] = Number(value) - }, - stepRange: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset + 5) - }, - timezone: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 9) + defaultLen: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 2) }, } -export const readGroupByKeyProp = ( +export const readSelvaSchemaString = ( buf: Uint8Array, offset: number, -): GroupByKeyProp => { - const value: GroupByKeyProp = { - propId: buf[offset], - propType: (buf[offset + 1]) as PropTypeEnum, - propDefStart: readUint16(buf, offset + 2), - stepType: buf[offset + 4], - stepRange: readUint32(buf, offset + 5), - timezone: readUint16(buf, offset + 9), +): SelvaSchemaString => { + const value: SelvaSchemaString = { + type: buf[offset], + fixedLenHint: buf[offset + 1], + defaultLen: readUint32(buf, offset + 2), } return value } -export const readGroupByKeyPropProps = { - propId: (buf: Uint8Array, offset: number) => buf[offset], - propType: (buf: Uint8Array, offset: number) => (buf[offset + 1]) as PropTypeEnum, - propDefStart: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 2), - stepType: (buf: Uint8Array, offset: number) => buf[offset + 4], - stepRange: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 5), - timezone: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 9), +export const readSelvaSchemaStringProps = { + type: (buf: Uint8Array, offset: number) => buf[offset], + fixedLenHint: (buf: Uint8Array, offset: number) => buf[offset + 1], + defaultLen: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 2), } -export const createGroupByKeyProp = (header: GroupByKeyProp): Uint8Array => { - const buffer = new Uint8Array(GroupByKeyPropByteSize) - writeGroupByKeyProp(buffer, header, 0) +export const createSelvaSchemaString = (header: SelvaSchemaString): Uint8Array => { + const buffer = new Uint8Array(SelvaSchemaStringByteSize) + writeSelvaSchemaString(buffer, header, 0) return buffer } -export const FilterOpCompare = { - eq: 4, - neq: 5, - eqBatch: 6, - neqBatch: 7, - eqBatchSmall: 8, - neqBatchSmall: 9, - range: 10, - nrange: 11, - gt: 14, - lt: 15, - gtBatch: 16, - ltBatch: 17, - gtBatchSmall: 18, - ltBatchSmall: 19, - ge: 20, - le: 21, - geBatch: 22, - leBatch: 23, - geBatchSmall: 24, - leBatchSmall: 25, - selectLargeRefs: 203, - selectRef: 204, - selectSmallRefs: 205, - selectLargeRefEdge: 206, - selectLargeRefsEdge: 207, - nextOrIndex: 253, -} as const - -export const FilterOpCompareInverse = { - 4: 'eq', - 5: 'neq', - 6: 'eqBatch', - 7: 'neqBatch', - 8: 'eqBatchSmall', - 9: 'neqBatchSmall', - 10: 'range', - 11: 'nrange', - 14: 'gt', - 15: 'lt', - 16: 'gtBatch', - 17: 'ltBatch', - 18: 'gtBatchSmall', - 19: 'ltBatchSmall', - 20: 'ge', - 21: 'le', - 22: 'geBatch', - 23: 'leBatch', - 24: 'geBatchSmall', - 25: 'leBatchSmall', - 203: 'selectLargeRefs', - 204: 'selectRef', - 205: 'selectSmallRefs', - 206: 'selectLargeRefEdge', - 207: 'selectLargeRefsEdge', - 253: 'nextOrIndex', -} as const - -/** - eq, - neq, - eqBatch, - neqBatch, - eqBatchSmall, - neqBatchSmall, - range, - nrange, - gt, - lt, - gtBatch, - ltBatch, - gtBatchSmall, - ltBatchSmall, - ge, - le, - geBatch, - leBatch, - geBatchSmall, - leBatchSmall, - selectLargeRefs, - selectRef, - selectSmallRefs, - selectLargeRefEdge, - selectLargeRefsEdge, - nextOrIndex - */ -export type FilterOpCompareEnum = (typeof FilterOpCompare)[keyof typeof FilterOpCompare] +export const pushSelvaSchemaString = ( + buf: AutoSizedUint8Array, + header: SelvaSchemaString, +): number => { + const index = buf.length + buf.pushUint8(Number(header.type)) + buf.pushUint8(Number(header.fixedLenHint)) + buf.pushUint32(Number(header.defaultLen)) + return index +} -export type FilterOp = { - prop: PropTypeEnum - compare: FilterOpCompareEnum +export type SelvaSchemaText = { + type: SelvaFieldType + nrDefaults: number } -export const FilterOpByteSize = 2 +export const SelvaSchemaTextByteSize = 2 -export const FilterOpAlignOf = 2 +export const SelvaSchemaTextAlignOf = 2 -export const packFilterOp = (obj: FilterOp): bigint => { - let val = 0n - val |= (BigInt(obj.prop) & 255n) << 0n - val |= (BigInt(obj.compare) & 255n) << 8n +export const packSelvaSchemaText = (obj: SelvaSchemaText): number => { + let val = 0 + val |= (Number(obj.type) & 255) << 0 + val |= (Number(obj.nrDefaults) & 255) << 8 return val } -export const unpackFilterOp = (val: bigint): FilterOp => { +export const unpackSelvaSchemaText = (val: number): SelvaSchemaText => { return { - prop: (Number((val >> 0n) & 255n)) as PropTypeEnum, - compare: (Number((val >> 8n) & 255n)) as FilterOpCompareEnum, + type: Number((val >>> 0) & 255), + nrDefaults: Number((val >>> 8) & 255), } } -export const writeFilterOp = ( +export const writeSelvaSchemaText = ( buf: Uint8Array, - header: FilterOp, + header: SelvaSchemaText, offset: number, ): number => { - buf[offset] = Number(header.prop) + buf[offset] = Number(header.type) offset += 1 - buf[offset] = Number(header.compare) + buf[offset] = Number(header.nrDefaults) offset += 1 return offset } -export const writeFilterOpProps = { - prop: (buf: Uint8Array, value: PropTypeEnum, offset: number) => { +export const writeSelvaSchemaTextProps = { + type: (buf: Uint8Array, value: SelvaFieldType, offset: number) => { buf[offset] = Number(value) }, - compare: (buf: Uint8Array, value: FilterOpCompareEnum, offset: number) => { + nrDefaults: (buf: Uint8Array, value: number, offset: number) => { buf[offset + 1] = Number(value) }, } -export const readFilterOp = ( +export const readSelvaSchemaText = ( buf: Uint8Array, offset: number, -): FilterOp => { - const value: FilterOp = { - prop: (buf[offset]) as PropTypeEnum, - compare: (buf[offset + 1]) as FilterOpCompareEnum, +): SelvaSchemaText => { + const value: SelvaSchemaText = { + type: buf[offset], + nrDefaults: buf[offset + 1], } return value } -export const readFilterOpProps = { - prop: (buf: Uint8Array, offset: number) => (buf[offset]) as PropTypeEnum, - compare: (buf: Uint8Array, offset: number) => (buf[offset + 1]) as FilterOpCompareEnum, +export const readSelvaSchemaTextProps = { + type: (buf: Uint8Array, offset: number) => buf[offset], + nrDefaults: (buf: Uint8Array, offset: number) => buf[offset + 1], } -export const createFilterOp = (header: FilterOp): Uint8Array => { - const buffer = new Uint8Array(FilterOpByteSize) - writeFilterOp(buffer, header, 0) +export const createSelvaSchemaText = (header: SelvaSchemaText): Uint8Array => { + const buffer = new Uint8Array(SelvaSchemaTextByteSize) + writeSelvaSchemaText(buffer, header, 0) return buffer } -export type FilterCondition = { - op: FilterOp - size: number - prop: number - start: number - len: number - fieldSchema: number - offset: number +export const pushSelvaSchemaText = ( + buf: AutoSizedUint8Array, + header: SelvaSchemaText, +): number => { + const index = buf.length + buf.pushUint8(Number(header.type)) + buf.pushUint8(Number(header.nrDefaults)) + return index } -export const FilterConditionByteSize = 19 - -export const FilterConditionAlignOf = 16 - -export const packFilterCondition = (obj: FilterCondition): bigint => { - let val = 0n - val |= (packFilterOp(obj.op) & 65535n) << 0n - val |= (BigInt(obj.size) & 4294967295n) << 16n - val |= (BigInt(obj.prop) & 255n) << 48n - val |= (BigInt(obj.start) & 65535n) << 56n - val |= (BigInt(obj.len) & 255n) << 72n - val |= (BigInt(obj.fieldSchema) & 18446744073709551615n) << 80n - val |= (BigInt(obj.offset) & 255n) << 144n - return val +export type SelvaSchemaRef = { + type: SelvaFieldType + flags: number + dstNodeType: TypeId + inverseField: SelvaField + edgeNodeType: TypeId + capped: number } -export const unpackFilterCondition = (val: bigint): FilterCondition => { - return { - op: unpackFilterOp((val >> 0n) & 65535n), - size: Number((val >> 16n) & 4294967295n), - prop: Number((val >> 48n) & 255n), - start: Number((val >> 56n) & 65535n), - len: Number((val >> 72n) & 255n), - fieldSchema: Number((val >> 80n) & 18446744073709551615n), - offset: Number((val >> 144n) & 255n), - } -} +export const SelvaSchemaRefByteSize = 11 -export const writeFilterCondition = ( +export const SelvaSchemaRefAlignOf = 16 + +export const writeSelvaSchemaRef = ( buf: Uint8Array, - header: FilterCondition, + header: SelvaSchemaRef, offset: number, ): number => { - writeUint16(buf, Number(packFilterOp(header.op)), offset) - offset += 2 - writeUint32(buf, Number(header.size), offset) - offset += 4 - buf[offset] = Number(header.prop) + buf[offset] = Number(header.type) offset += 1 - writeUint16(buf, Number(header.start), offset) - offset += 2 - buf[offset] = Number(header.len) + buf[offset] = Number(header.flags) offset += 1 - writeUint64(buf, header.fieldSchema, offset) - offset += 8 - buf[offset] = Number(header.offset) + writeUint16(buf, Number(header.dstNodeType), offset) + offset += 2 + buf[offset] = Number(header.inverseField) offset += 1 + writeUint16(buf, Number(header.edgeNodeType), offset) + offset += 2 + writeUint32(buf, Number(header.capped), offset) + offset += 4 return offset } -export const writeFilterConditionProps = { - op: (buf: Uint8Array, value: FilterOp, offset: number) => { - writeUint16(buf, Number(packFilterOp(value)), offset) - }, - size: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset + 2) +export const writeSelvaSchemaRefProps = { + type: (buf: Uint8Array, value: SelvaFieldType, offset: number) => { + buf[offset] = Number(value) }, - prop: (buf: Uint8Array, value: number, offset: number) => { - buf[offset + 6] = Number(value) + flags: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 1] = Number(value) }, - start: (buf: Uint8Array, value: number, offset: number) => { - writeUint16(buf, Number(value), offset + 7) + dstNodeType: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 2) }, - len: (buf: Uint8Array, value: number, offset: number) => { - buf[offset + 9] = Number(value) + inverseField: (buf: Uint8Array, value: SelvaField, offset: number) => { + buf[offset + 4] = Number(value) }, - fieldSchema: (buf: Uint8Array, value: number, offset: number) => { - writeUint64(buf, value, offset + 10) + edgeNodeType: (buf: Uint8Array, value: TypeId, offset: number) => { + writeUint16(buf, Number(value), offset + 5) }, - offset: (buf: Uint8Array, value: number, offset: number) => { - buf[offset + 18] = Number(value) + capped: (buf: Uint8Array, value: number, offset: number) => { + writeUint32(buf, Number(value), offset + 7) }, } -export const readFilterCondition = ( +export const readSelvaSchemaRef = ( buf: Uint8Array, offset: number, -): FilterCondition => { - const value: FilterCondition = { - op: unpackFilterOp(BigInt(readUint16(buf, offset))), - size: readUint32(buf, offset + 2), - prop: buf[offset + 6], - start: readUint16(buf, offset + 7), - len: buf[offset + 9], - fieldSchema: readUint64(buf, offset + 10), - offset: buf[offset + 18], +): SelvaSchemaRef => { + const value: SelvaSchemaRef = { + type: buf[offset], + flags: buf[offset + 1], + dstNodeType: (readUint16(buf, offset + 2)) as TypeId, + inverseField: buf[offset + 4], + edgeNodeType: (readUint16(buf, offset + 5)) as TypeId, + capped: readUint32(buf, offset + 7), } return value } -export const readFilterConditionProps = { - op: (buf: Uint8Array, offset: number) => unpackFilterOp(BigInt(readUint16(buf, offset))), - size: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 2), - prop: (buf: Uint8Array, offset: number) => buf[offset + 6], - start: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 7), - len: (buf: Uint8Array, offset: number) => buf[offset + 9], - fieldSchema: (buf: Uint8Array, offset: number) => readUint64(buf, offset + 10), - offset: (buf: Uint8Array, offset: number) => buf[offset + 18], +export const readSelvaSchemaRefProps = { + type: (buf: Uint8Array, offset: number) => buf[offset], + flags: (buf: Uint8Array, offset: number) => buf[offset + 1], + dstNodeType: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 2)) as TypeId, + inverseField: (buf: Uint8Array, offset: number) => buf[offset + 4], + edgeNodeType: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 5)) as TypeId, + capped: (buf: Uint8Array, offset: number) => readUint32(buf, offset + 7), } -export const createFilterCondition = (header: FilterCondition): Uint8Array => { - const buffer = new Uint8Array(FilterConditionByteSize) - writeFilterCondition(buffer, header, 0) +export const createSelvaSchemaRef = (header: SelvaSchemaRef): Uint8Array => { + const buffer = new Uint8Array(SelvaSchemaRefByteSize) + writeSelvaSchemaRef(buffer, header, 0) return buffer } -export type FilterSelect = { - size: number - typeEntry: number - typeId: TypeId +export const pushSelvaSchemaRef = ( + buf: AutoSizedUint8Array, + header: SelvaSchemaRef, +): number => { + const index = buf.length + buf.pushUint8(Number(header.type)) + buf.pushUint8(Number(header.flags)) + buf.pushUint16(Number(header.dstNodeType)) + buf.pushUint8(Number(header.inverseField)) + buf.pushUint16(Number(header.edgeNodeType)) + buf.pushUint32(Number(header.capped)) + return index } -export const FilterSelectByteSize = 14 - -export const FilterSelectAlignOf = 16 - -export const packFilterSelect = (obj: FilterSelect): bigint => { - let val = 0n - val |= (BigInt(obj.size) & 4294967295n) << 0n - val |= (BigInt(obj.typeEntry) & 18446744073709551615n) << 32n - val |= (BigInt(obj.typeId) & 65535n) << 96n - return val +export type SelvaSchemaColvec = { + type: SelvaFieldType + vecLen: number + compSize: number + hasDefault: number } -export const unpackFilterSelect = (val: bigint): FilterSelect => { - return { - size: Number((val >> 0n) & 4294967295n), - typeEntry: Number((val >> 32n) & 18446744073709551615n), - typeId: (Number((val >> 96n) & 65535n)) as TypeId, - } -} +export const SelvaSchemaColvecByteSize = 6 -export const writeFilterSelect = ( +export const SelvaSchemaColvecAlignOf = 8 + +export const writeSelvaSchemaColvec = ( buf: Uint8Array, - header: FilterSelect, + header: SelvaSchemaColvec, offset: number, ): number => { - writeUint32(buf, Number(header.size), offset) - offset += 4 - writeUint64(buf, header.typeEntry, offset) - offset += 8 - writeUint16(buf, Number(header.typeId), offset) + buf[offset] = Number(header.type) + offset += 1 + writeUint16(buf, Number(header.vecLen), offset) offset += 2 + writeUint16(buf, Number(header.compSize), offset) + offset += 2 + buf[offset] = Number(header.hasDefault) + offset += 1 return offset } -export const writeFilterSelectProps = { - size: (buf: Uint8Array, value: number, offset: number) => { - writeUint32(buf, Number(value), offset) +export const writeSelvaSchemaColvecProps = { + type: (buf: Uint8Array, value: SelvaFieldType, offset: number) => { + buf[offset] = Number(value) }, - typeEntry: (buf: Uint8Array, value: number, offset: number) => { - writeUint64(buf, value, offset + 4) + vecLen: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 1) }, - typeId: (buf: Uint8Array, value: TypeId, offset: number) => { - writeUint16(buf, Number(value), offset + 12) + compSize: (buf: Uint8Array, value: number, offset: number) => { + writeUint16(buf, Number(value), offset + 3) + }, + hasDefault: (buf: Uint8Array, value: number, offset: number) => { + buf[offset + 5] = Number(value) }, } -export const readFilterSelect = ( +export const readSelvaSchemaColvec = ( buf: Uint8Array, offset: number, -): FilterSelect => { - const value: FilterSelect = { - size: readUint32(buf, offset), - typeEntry: readUint64(buf, offset + 4), - typeId: (readUint16(buf, offset + 12)) as TypeId, +): SelvaSchemaColvec => { + const value: SelvaSchemaColvec = { + type: buf[offset], + vecLen: readUint16(buf, offset + 1), + compSize: readUint16(buf, offset + 3), + hasDefault: buf[offset + 5], } return value } -export const readFilterSelectProps = { - size: (buf: Uint8Array, offset: number) => readUint32(buf, offset), - typeEntry: (buf: Uint8Array, offset: number) => readUint64(buf, offset + 4), - typeId: (buf: Uint8Array, offset: number) => (readUint16(buf, offset + 12)) as TypeId, +export const readSelvaSchemaColvecProps = { + type: (buf: Uint8Array, offset: number) => buf[offset], + vecLen: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 1), + compSize: (buf: Uint8Array, offset: number) => readUint16(buf, offset + 3), + hasDefault: (buf: Uint8Array, offset: number) => buf[offset + 5], } -export const createFilterSelect = (header: FilterSelect): Uint8Array => { - const buffer = new Uint8Array(FilterSelectByteSize) - writeFilterSelect(buffer, header, 0) +export const createSelvaSchemaColvec = (header: SelvaSchemaColvec): Uint8Array => { + const buffer = new Uint8Array(SelvaSchemaColvecByteSize) + writeSelvaSchemaColvec(buffer, header, 0) return buffer } +export const pushSelvaSchemaColvec = ( + buf: AutoSizedUint8Array, + header: SelvaSchemaColvec, +): number => { + const index = buf.length + buf.pushUint8(Number(header.type)) + buf.pushUint16(Number(header.vecLen)) + buf.pushUint16(Number(header.compSize)) + buf.pushUint8(Number(header.hasDefault)) + return index +} + diff --git a/test/HLLunion.ts b/test/HLLunion.ts index 0cca03bbc7..a2bf0eeaf5 100644 --- a/test/HLLunion.ts +++ b/test/HLLunion.ts @@ -83,7 +83,7 @@ await test.skip('dev', async (t) => { // OK await db // dont break line - .query('user') + .query2('user') .include('**') .groupBy('country') .sum('name') @@ -93,7 +93,7 @@ await test.skip('dev', async (t) => { // OK // await db // // dont break line - // .query('user') + // .query2('user') // .groupBy('name') // .sum('flap') // .get() @@ -101,7 +101,7 @@ await test.skip('dev', async (t) => { // TODO: display is tagging "sum" when count with alias // TODO: also there os a misplaced comma in inspect // await db - // .query('article') + // .query2('article') // .include((q) => q('contributors').count('votes'), 'name') // .get() // .inspect() @@ -194,17 +194,17 @@ await test.skip('dev', async (t) => { // // TODO: display is tagging "sum" when count with alias // // TODO: also there os a misplaced comma in inspect // // await db -// // .query('article') +// // .query2('article') // // .include((q) => q('contributors').count('votes'), 'name') // // .get() // // .inspect() // // deepEqual( // // await db -// // .query('article') +// // .query2('article') // // .include((q) => q('contributors').sum('flap'), 'name') // // .get() -// // .toObject(), +// // , // // [ // // { id: 1, name: 'The wonders of Strudel', contributors: { flap: 100 } }, // // { @@ -218,7 +218,7 @@ await test.skip('dev', async (t) => { // await db // // dont break line -// .query('user') +// .query2('user') // .groupBy('country') // .cardinality('myUniqueValuesCount') // .get() @@ -228,14 +228,14 @@ await test.skip('dev', async (t) => { // // const q = await db // // // dont break line -// // .query('users') +// // .query2('users') // // .get() // // q.inspect() // // await db // // // dont break line -// // .query('user') +// // .query2('user') // // .groupBy('name') // // .sum('flap') // // .get() diff --git a/test/aggregate/basic.ts b/test/aggregate/basic.ts index ce1d009eb8..2f570b92c0 100644 --- a/test/aggregate/basic.ts +++ b/test/aggregate/basic.ts @@ -1,19 +1,10 @@ -import { equal } from 'node:assert' -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' -import { throws, deepEqual } from '../shared/assert.js' -import { fastPrng } from '../../src/utils/index.js' +import { throws, deepEqual, equal } from '../shared/assert.js' +import { testDb } from '../shared/index.js' +import { fastPrng } from '../../src/utils/fastPrng.js' await test('sum top level', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { sequence: { props: { @@ -43,6 +34,7 @@ await test('sum top level', async (t) => { }, }, }) + const nl1 = db.create('vote', { country: 'bb', flap: { hello: 100 }, @@ -56,69 +48,35 @@ await test('sum top level', async (t) => { country: 'aa', AU: 15, }) - db.drain() - db.create('sequence', { votes: nl1 }) - db.create('sequence', { votes: nl2 }) - db.create('sequence', { votes: au1 }) - // const s = db.create('sequence', { votes: [nl1, nl2, au1] }) + db.create('sequence', { votes: [nl1, nl2, au1] }) - // top level ---------------------------------- deepEqual( - await db.query('vote').sum('NL').get().toObject(), + await db.query2('vote').sum('NL').get(), { NL: { sum: 30 } }, 'sum, top level, single prop', ) - // deepEqual( - // await db - // .query('vote') - // .filter('country', '=', 'aa') - // .sum('NL') - // .get() - // .toObject(), - // { NL: { sum: 20 } }, - // 'sum with filter', - // ) - deepEqual( - await db.query('vote').sum('NL', 'AU').get().toObject(), + await db.query2('vote').sum('NL', 'AU').get(), { NL: { sum: 30 }, AU: { sum: 15 } }, 'sum, top level, multiple props', ) throws(async () => { - await db.query('vote').sum().get().toObject() + // @ts-expect-error + await db.query2('vote').sum().get() }, 'sum() returning nothing') - // deepEqual( - // await db - // .query('vote') - // .filter('country', '=', 'zz') - // .sum('NL') - // .get() - // .toObject(), - // { NL: { sum: 0 } }, - // 'sum with empty result set', - // ) - deepEqual( - await db.query('vote').sum('flap.hello').get().toObject(), + await db.query2('vote').sum('flap.hello').get(), { flap: { hello: { sum: 100 } } }, 'nested object notation', ) }) await test('top level count', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { sequence: { props: { @@ -160,71 +118,57 @@ await test('top level count', async (t) => { AU: 15, }) - // const s = db.create('sequence', { votes: [nl1, nl2, au1] }) - db.drain() - db.create('sequence', { votes: nl1 }) - db.create('sequence', { votes: nl2 }) - db.create('sequence', { votes: au1 }) + const s = db.create('sequence', { votes: [nl1, nl2, au1] }) - // // top level ---------------------------------- + // top level ---------------------------------- deepEqual( - await db.query('vote').count().get().toObject(), + await db.query2('vote').count().get(), { count: 3 }, 'count, top level, prop', ) // deepEqual( // await db - // .query('vote') - // .filter('country', '=', 'aa') + // .query2('vote') + // .filter('country', '=', 'aa') // string filter not implemented yet // .count() - // .get() - // .toObject(), + // .get(), // { count: 2 }, // 'count, top level, with filter', // ) deepEqual( - await db.query('vote').include('IT').count().get(), + await db.query2('vote').include('IT').count().get(), { count: 3 }, 'count, top level, ignoring include', ) // deepEqual( // await db - // .query('vote') - // .filter('country', '=', 'zz') + // .query2('vote') + // .filter('country', '=', 'zz') // string filter not implemented yet // .count() - // .get() - // .toObject(), + // .get(), // { count: 0 }, // 'count, with no match filtering, string value', // ) deepEqual( - await db.query('vote').filter('NL', '=', 20).count().get(), + await db.query2('vote').filter('NL', '=', 20).count().get(), { count: 1 }, 'count, with filtering an int value', ) deepEqual( - await db.query('vote').filter('NL', '>', 1e6).count().get(), + await db.query2('vote').filter('NL', '>', 255).count().get(), { count: 0 }, 'count, with no match filtering, int value', ) }) await test('two phase accumulation', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { sequence: { props: { @@ -275,16 +219,10 @@ await test('two phase accumulation', async (t) => { country: 'Brazil', NL: 50, }) - // const s = db.create('sequence', { votes: [nl1, nl2, au1, au2, br1] }) - db.drain() - db.create('sequence', { votes: nl1 }) - db.create('sequence', { votes: nl2 }) - db.create('sequence', { votes: au1 }) - db.create('sequence', { votes: au2 }) - db.create('sequence', { votes: br1 }) + const s = db.create('sequence', { votes: [nl1, nl2, au1, au2, br1] }) deepEqual( - await db.query('vote').stddev('NL', { mode: 'sample' }).get(), + await db.query2('vote').stddev('NL', { mode: 'sample' }).get(), { NL: { stddev: 15.56598856481656 }, }, @@ -292,7 +230,7 @@ await test('two phase accumulation', async (t) => { ) deepEqual( - await db.query('vote').stddev('NL', { mode: 'sample' }).get(), + await db.query2('vote').stddev('NL', { mode: 'sample' }).get(), { NL: { stddev: 15.56598856481656 }, }, @@ -300,7 +238,7 @@ await test('two phase accumulation', async (t) => { ) deepEqual( - await db.query('vote').stddev('NL', { mode: 'population' }).get(), + await db.query2('vote').stddev('NL', { mode: 'population' }).get(), { NL: { stddev: 13.922643427165687 }, }, @@ -308,7 +246,7 @@ await test('two phase accumulation', async (t) => { ) deepEqual( - await db.query('vote').sum('NL').get().toObject(), + await db.query2('vote').sum('NL').get(), { NL: { sum: 118 }, }, @@ -317,11 +255,10 @@ await test('two phase accumulation', async (t) => { deepEqual( await db - .query('vote') + .query2('vote') .stddev('NL', { mode: 'population' }) .groupBy('country') - .get() - .toObject(), + .get(), { Brazil: { NL: { stddev: 0 }, @@ -336,61 +273,53 @@ await test('two phase accumulation', async (t) => { 'stddev, top level, groupBy', ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => q('votes').stddev('NL', { mode: 'population' })) - // .get() - // .toObject(), - // [ - // { - // id: 1, - // votes: { - // NL: { stddev: 13.922643427165687 }, - // }, + // branched References not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => q('votes').stddev('NL', { mode: 'population' })) + // .get(), + // [ + // { + // id: 1, + // votes: { + // NL: { stddev: 13.922643427165687 }, // }, - // ], - // 'stddev, branched References, no groupBy', - // ) + // }, + // ], + // 'stddev, branched References, no groupBy', + // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => - // q('votes').stddev('NL', { mode: 'population' }).groupBy('country'), - // ) - // .get() - // .toObject(), - // [ - // { - // id: 1, - // votes: { - // Brazil: { - // NL: { stddev: 0 }, - // }, - // bb: { - // NL: { stddev: 6.5 }, - // }, - // aa: { - // NL: { stddev: 2.5 }, - // }, + // branched References not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => + // q('votes').stddev('NL', { mode: 'population' }).groupBy('country'), + // ) + // .get(), + // [ + // { + // id: 1, + // votes: { + // Brazil: { + // NL: { stddev: 0 }, + // }, + // bb: { + // NL: { stddev: 6.5 }, + // }, + // aa: { + // NL: { stddev: 2.5 }, // }, // }, - // ], - // 'stddev, branched References, groupBy', - // ) + // }, + // ], + // 'stddev, branched References, groupBy', + // ) }) await test('numeric types', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { sequence: { props: { @@ -457,16 +386,10 @@ await test('numeric types', async (t) => { PL: -50, FI: -50.999, }) - // const s = db.create('sequence', { votes: [nl1, nl2, au1, au2, br1] }) - db.drain() - db.create('sequence', { votes: nl1 }) - db.create('sequence', { votes: nl2 }) - db.create('sequence', { votes: au1 }) - db.create('sequence', { votes: au2 }) - db.create('sequence', { votes: br1 }) + const s = db.create('sequence', { votes: [nl1, nl2, au1, au2, br1] }) deepEqual( - await db.query('vote').groupBy('region').get(), + await db.query2('vote').groupBy('region').get(), { bb: {}, aa: {}, @@ -476,7 +399,7 @@ await test('numeric types', async (t) => { ) deepEqual( - await db.query('vote').sum('NL', 'FI').groupBy('region').get(), + await db.query2('vote').sum('NL', 'FI').groupBy('region').get(), { bb: { NL: { sum: 33 }, @@ -493,8 +416,9 @@ await test('numeric types', async (t) => { }, 'sum, main, group by', ) + deepEqual( - await db.query('vote').count().groupBy('region').get(), + await db.query2('vote').count().groupBy('region').get(), { bb: { count: 2, @@ -508,8 +432,9 @@ await test('numeric types', async (t) => { }, 'count, main, group by', ) + deepEqual( - await db.query('vote').avg('NL', 'PT', 'FI').groupBy('region').get(), + await db.query2('vote').avg('NL', 'PT', 'FI').groupBy('region').get(), { bb: { NL: { avg: 16.5 }, @@ -529,12 +454,9 @@ await test('numeric types', async (t) => { }, 'avg, main, group by', ) + deepEqual( - await db - .query('vote') - .harmonicMean('NL', 'PT', 'FI') - .groupBy('region') - .get(), + await db.query2('vote').hmean('NL', 'PT', 'FI').groupBy('region').get(), { bb: { NL: { hmean: 13.93939393939394 }, @@ -552,11 +474,12 @@ await test('numeric types', async (t) => { FI: { hmean: -50.99900000000001 }, // harmonic mean is not designed for negative numbers but possible }, }, - 'harmonic_mean, main, group by', + 'hmean, main, group by', ) + deepEqual( await db - .query('vote') + .query2('vote') .stddev('NL', 'PL', { mode: 'population' }) .groupBy('region') .get(), @@ -576,8 +499,9 @@ await test('numeric types', async (t) => { }, 'stddev, main, group by, pop', ) + deepEqual( - await db.query('vote').stddev('NL', 'PL').groupBy('region').get(), + await db.query2('vote').stddev('NL', 'PL').groupBy('region').get(), { bb: { NL: { stddev: 9.192388155425117 }, @@ -594,9 +518,10 @@ await test('numeric types', async (t) => { }, 'stddev, main, group by, default=sample', ) + deepEqual( await db - .query('vote') + .query2('vote') .var('NL', 'PL', { mode: 'population' }) .groupBy('region') .get(), @@ -616,9 +541,10 @@ await test('numeric types', async (t) => { }, 'variance, main, group by, population', ) + deepEqual( await db - .query('vote') + .query2('vote') .var('NL', 'PL', { mode: 'sample' }) .groupBy('region') .get(), @@ -629,8 +555,9 @@ await test('numeric types', async (t) => { }, 'variance, main, group by, sample', ) + deepEqual( - await db.query('vote').var('NL', 'PL').groupBy('region').get(), + await db.query2('vote').var('NL', 'PL').groupBy('region').get(), { bb: { NL: { variance: 84.5 }, PL: { variance: 264.5 } }, aa: { NL: { variance: 24.5 }, PL: { variance: 264.5 } }, @@ -638,8 +565,9 @@ await test('numeric types', async (t) => { }, 'variance, main, group by, default (sample)', ) + deepEqual( - await db.query('vote').max('NL', 'NO', 'PT', 'FI').groupBy('region').get(), + await db.query2('vote').max('NL', 'NO', 'PT', 'FI').groupBy('region').get(), { bb: { NL: { max: 23 }, @@ -662,8 +590,9 @@ await test('numeric types', async (t) => { }, 'max, main, group by', ) + deepEqual( - await db.query('vote').min('NL', 'NO', 'PT', 'FI').groupBy('region').get(), + await db.query2('vote').min('NL', 'NO', 'PT', 'FI').groupBy('region').get(), { bb: { NL: { min: 10 }, @@ -687,402 +616,407 @@ await test('numeric types', async (t) => { 'min, main, group by', ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => q('votes').sum('NL')) - // .get(), - // [ - // { - // id: 1, - // votes: { - // NL: { sum: 176 }, - // }, + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => q('votes').sum('NL')) + // .get(), + // [ + // { + // id: 1, + // votes: { + // NL: { sum: 176 }, // }, - // ], - // 'references, not grouped', - // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => q('votes').avg('NL')) - // .get(), - // [ - // { - // id: 1, - // votes: { - // NL: { avg: 35.2 }, - // }, + // }, + // ], + // 'references, not grouped', + // ) + + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => q('votes').avg('NL')) + // .get(), + // [ + // { + // id: 1, + // votes: { + // NL: { avg: 35.2 }, // }, - // ], - // 'avg, references, not grouped', - // ) + // }, + // ], + // 'avg, references, not grouped', + // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => q('votes').harmonicMean('NL')) - // .get(), - // [ - // { - // id: 1, - // votes: { - // NL: { hmean: 24.18565978675536 }, - // }, + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => q('votes').hmean('NL')) + // .get(), + // [ + // { + // id: 1, + // votes: { + // NL: { hmean: 24.18565978675536 }, // }, - // ], - // 'harmonic_mean, references, not grouped', - // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => q('votes').groupBy('region').sum('NL')) - // .get(), - // [ - // { - // id: 1, - // votes: { - // bb: { - // NL: { sum: 33 }, - // }, - // aa: { - // NL: { sum: 93 }, - // }, - // Great: { - // NL: { sum: 50 }, - // }, + // }, + // ], + // 'hmean, references, not grouped', + // ) + + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => q('votes').groupBy('region').sum('NL')) + // .get(), + // [ + // { + // id: 1, + // votes: { + // bb: { + // NL: { sum: 33 }, + // }, + // aa: { + // NL: { sum: 93 }, + // }, + // Great: { + // NL: { sum: 50 }, // }, // }, - // ], - // 'sum, references, group by', - // ) + // }, + // ], + // 'sum, references, group by', + // ) - // // await db.query('vote').groupBy('sequence').sum('NL').get().inspect() + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => q('votes').groupBy('region').count()) + // .get(), + // [ + // { + // id: 1, + // votes: { + // bb: { count: 2 }, + // aa: { count: 2 }, + // Great: { count: 1 }, + // }, + // }, + // ], + // 'count, references, group by', + // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => q('votes').groupBy('region').count()) - // .get(), - // [ - // { - // id: 1, - // votes: { - // bb: { count: 2 }, - // aa: { count: 2 }, - // Great: { count: 1 }, + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => + // q('votes').groupBy('region').stddev('NL', { mode: 'population' }), + // ) + // .get(), + // [ + // { + // id: 1, + // votes: { + // bb: { + // NL: { stddev: 6.5 }, // }, - // }, - // ], - // 'count, references, group by', - // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => - // q('votes').groupBy('region').stddev('NL', { mode: 'population' }), - // ) - // .get(), - // [ - // { - // id: 1, - // votes: { - // bb: { - // NL: { stddev: 6.5 }, - // }, - // aa: { - // NL: { stddev: 3.5 }, - // }, - // Great: { - // NL: { stddev: 0 }, - // }, + // aa: { + // NL: { stddev: 3.5 }, + // }, + // Great: { + // NL: { stddev: 0 }, // }, // }, - // ], - // 'stddev, references, group by', - // ) + // }, + // ], + // 'stddev, references, group by', + // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => q('votes').groupBy('region').stddev('NL')) - // .get(), - // [ - // { - // id: 1, - // votes: { - // bb: { NL: { stddev: 9.192388155425117 } }, - // aa: { NL: { stddev: 4.949747468305833 } }, - // Great: { NL: { stddev: 0 } }, - // }, + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => q('votes').groupBy('region').stddev('NL')) + // .get(), + // [ + // { + // id: 1, + // votes: { + // bb: { NL: { stddev: 9.192388155425117 } }, + // aa: { NL: { stddev: 4.949747468305833 } }, + // Great: { NL: { stddev: 0 } }, // }, - // ], - // 'stddev, references, group by', - // ) + // }, + // ], + // 'stddev, references, group by', + // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => - // q('votes').groupBy('region').var('NL', { mode: 'population' }), - // ) - // .get(), - // [ - // { - // id: 1, - // votes: { - // bb: { - // NL: { variance: 42.25 }, - // }, - // aa: { - // NL: { variance: 12.25 }, - // }, - // Great: { - // NL: { variance: 0 }, - // }, + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => + // q('votes').groupBy('region').var('NL', { mode: 'population' }), + // ) + // .get(), + // [ + // { + // id: 1, + // votes: { + // bb: { + // NL: { variance: 42.25 }, // }, - // }, - // ], - // 'variance, references, group by, pop', - // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => - // q('votes').groupBy('region').var('NL', { mode: 'sample' }), - // ) - // .get(), - // [ - // { - // id: 1, - // votes: { - // bb: { NL: { variance: 84.5 } }, - // aa: { NL: { variance: 24.5 } }, - // Great: { NL: { variance: 0 } }, + // aa: { + // NL: { variance: 12.25 }, // }, - // }, - // ], - // 'variance, references, group by, sample', - // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => q('votes').groupBy('region').var('NL')) - // .get(), - // [ - // { - // id: 1, - // votes: { - // bb: { NL: { variance: 84.5 } }, - // aa: { NL: { variance: 24.5 } }, - // Great: { NL: { variance: 0 } }, + // Great: { + // NL: { variance: 0 }, // }, // }, - // ], - // 'variance, references, group by, defaul (sample)', - // ) + // }, + // ], + // 'variance, references, group by, pop', + // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => q('votes').groupBy('region').avg('NL')) - // .get(), - // [ - // { - // id: 1, - // votes: { - // bb: { - // NL: { avg: 16.5 }, - // }, - // aa: { - // NL: { avg: 46.5 }, - // }, - // Great: { - // NL: { avg: 50 }, - // }, + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => + // q('votes').groupBy('region').var('NL', { mode: 'sample' }), + // ) + // .get(), + // [ + // { + // id: 1, + // votes: { + // bb: { NL: { variance: 84.5 } }, + // aa: { NL: { variance: 24.5 } }, + // Great: { NL: { variance: 0 } }, + // }, + // }, + // ], + // 'variance, references, group by, sample', + // ) + + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => q('votes').groupBy('region').var('NL')) + // .get(), + // [ + // { + // id: 1, + // votes: { + // bb: { NL: { variance: 84.5 } }, + // aa: { NL: { variance: 24.5 } }, + // Great: { NL: { variance: 0 } }, + // }, + // }, + // ], + // 'variance, references, group by, defaul (sample)', + // ) + + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => q('votes').groupBy('region').avg('NL')) + // .get(), + // [ + // { + // id: 1, + // votes: { + // bb: { + // NL: { avg: 16.5 }, + // }, + // aa: { + // NL: { avg: 46.5 }, + // }, + // Great: { + // NL: { avg: 50 }, // }, // }, - // ], - // 'avg, references, group by', - // ) + // }, + // ], + // 'avg, references, group by', + // ) - // deepEqual( - // await db - // .query('sequence') - // .include((q) => q('votes').groupBy('region').harmonicMean('NL')) - // .get(), - // [ - // { - // id: 1, - // votes: { - // bb: { - // NL: { hmean: 13.93939393939394 }, - // }, - // aa: { - // NL: { hmean: 46.236559139784944 }, - // }, - // Great: { - // NL: { hmean: 50 }, - // }, + // Branched queries not implemented yet in AST + // deepEqual( + // await db + // .query2('sequence') + // .include((q) => q('votes').groupBy('region').hmean('NL')) + // .get(), + // [ + // { + // id: 1, + // votes: { + // bb: { + // NL: { hmean: 13.93939393939394 }, + // }, + // aa: { + // NL: { hmean: 46.236559139784944 }, + // }, + // Great: { + // NL: { hmean: 50 }, // }, // }, - // ], - // 'harmonic_mean, references, group by', - // ) + // }, + // ], + // 'hmean, references, group by', + // ) }) -// await test.skip('fixed length strings', async (t) => { -// const db = new BasedDb({ -// path: t.tmp, -// }) -// await db.start({ clean: true }) -// t.after(() => db.stop()) - -// await db.setSchema({ -// types: { -// product: { -// name: { type: 'string', maxBytes: 10 }, -// flap: 'number', -// }, -// shelve: { -// code: { type: 'string', maxBytes: 4 }, -// products: { -// items: { -// ref: 'product', -// prop: 'product', -// }, -// }, -// }, -// }, -// }) - -// const rnd = fastPrng() -// for (let i = 0; i < 100; i++) { -// let p = db.create('product', { -// name: `lala ${rnd(0, 10)}`, -// flap: Math.random() * 100, -// }) -// db.create('shelve', { -// code: `S${rnd(0, 10)}`, -// products: [p], -// }) -// } - -// equal( -// Number( -// Object.keys( -// await db -// .query('product') -// .include('*') -// .avg('flap') -// .groupBy('name') -// .get() -// .toObject(), -// )[0].substring(4, 6), -// ) < 100, -// true, -// 'fixed length strings on main', -// ) +await test('fixed length strings', async (t) => { + const db = await testDb(t, { + types: { + product: { + name: { type: 'string', maxBytes: 10 }, + flap: 'number', + }, + shelve: { + code: { type: 'string', maxBytes: 4 }, + products: { + items: { + ref: 'product', + prop: 'product', + }, + }, + }, + }, + }) -// equal( -// Number( -// Object.keys( -// await db -// .query('shelve') -// .include((q) => q('products').avg('flap').groupBy('name')) -// .get() -// .toObject(), -// )[0].substring(4, 6), -// ) < 100, -// true, -// 'fixed length strings on references', -// ) -// }) + const rnd = fastPrng() + for (let i = 0; i < 100; i++) { + let p = db.create('product', { + name: `lala ${rnd(0, 10)}`, + flap: Math.random() * 100, + }) + db.drain() + db.create('shelve', { + code: `S${rnd(0, 10)}`, + products: [p], + }) + } + + equal( + Number( + Object.keys( + await db + .query2('product') + .include('*') + .avg('flap') + .groupBy('name') + .get(), + )[0].substring(4, 6), + ) < 100, + true, + 'fixed length strings on main', + ) -// await test('range', async (t) => { -// const db = new BasedDb({ -// path: t.tmp, -// }) -// await db.start({ clean: true }) -// t.after(() => db.stop()) + // Branched queries not implemented yet in AST + // equal( + // Number( + // Object.keys( + // await db + // .query2('shelve') + // .include((q) => q('products').avg('flap').groupBy('name')) + // .get(), + // )[0].substring(4, 6), + // ) < 100, + // true, + // 'fixed length strings on references', + // ) +}) -// const ter = ['lala', 'lele', 'lili'] +await test('range', async (t) => { + const ter = ['lala', 'lele', 'lili'] -// await db.setSchema({ -// types: { -// job: { -// day: 'timestamp', -// tip: 'number', -// employee: { -// ref: 'employee', -// prop: 'employee', -// }, -// }, -// employee: { -// name: 'string', -// area: { -// items: { ref: 'territory', prop: 'territory' }, -// }, -// }, -// territory: { -// name: ter, -// flap: 'number', -// state: { -// ref: 'state', -// prop: 'state', -// }, -// }, -// state: { -// name: 'string', -// }, -// }, -// }) + const db = await testDb(t, { + types: { + job: { + day: 'timestamp', + tip: 'number', + employee: { + ref: 'employee', + prop: 'employee', + }, + }, + employee: { + name: 'string', + area: { + items: { ref: 'territory', prop: 'territory' }, + }, + }, + territory: { + name: ter, + flap: 'number', + state: { + ref: 'state', + prop: 'state', + }, + }, + state: { + name: 'string', + }, + }, + }) -// const rnd = fastPrng() -// for (let i = 0; i < 10; i++) { -// const d = new Date('11/11/2024 11:00-3') -// db.create('job', { -// day: new Date(d.getTime() + Math.random() * 1e7), -// tip: Math.random() * 20, -// }) -// const s = db.create('state', { -// name: `statelala ${rnd(0, 2)}`, -// }) -// const t = db.create('territory', { -// name: ter[rnd(0, ter.length - 1)], -// flap: Math.random() * 100, -// state: s, -// }) -// db.create('employee', { -// name: `emplala ${rnd(0, 10)}`, -// area: [t], -// }) -// } + const rnd = fastPrng(new Date().getTime()) + for (let i = 0; i < 10; i++) { + const d = new Date('11/12/2024 00:00-3') + //@ts-ignore + db.create('job', { + day: new Date(d.getTime() + 3600 * 1000 * rnd(2, 4)), // 11 Dec 24 2:00, 3:00 and 4:00h + tip: Math.random() * 20, + }) + //@ts-ignore + const s = db.create('state', { + name: `statelala ${rnd(0, 2)}`, + }) + //@ts-ignore + const t = db.create('territory', { + name: ter[rnd(0, ter.length - 1)], + flap: Math.random() * 100, + state: s, + }) + db.create('employee', { + name: `emplala ${rnd(0, 10)}`, + //@ts-ignore + area: t, + }) + } -// deepEqual( -// Object.keys( -// await db -// .query('job') -// .groupBy('day', { step: 'hour', timeZone: 'America/Sao_Paulo' }) -// .avg('tip') -// .range(0, 2) -// .get() -// .toObject(), -// ).length, -// 2, -// 'range group by main', -// ) + deepEqual( + Object.keys( + await db + .query2('job') + .groupBy('day', { step: 'hour', timeZone: 'America/Sao_Paulo' }) + .avg('tip') + .range(0, 2) + .get(), + ).length, + 2, + 'range group by main', + ) -// deepEqual( -// Object.keys( -// await db -// .query('employee') -// .include((q) => q('area').groupBy('name').sum('flap'), '*') -// .range(0, 2) -// .get() -// .toObject(), -// ).length, -// 2, -// 'range group by references', -// ) -// }) + // deepEqual( + // Object.keys( + // await db + // .query2('employee') + // .include((q) => q('area').groupBy('name').sum('flap'), '*') + // .range(0, 2) + // .get(), + // ).length, + // 2, + // 'range group by references', + // ) +}) diff --git a/test/aggregate/deep.ts b/test/aggregate/deep.ts index d1a0217bdc..b9b899d16e 100644 --- a/test/aggregate/deep.ts +++ b/test/aggregate/deep.ts @@ -1,23 +1,12 @@ /* * Deep = Reference(s), Edges and nests */ -import { equal } from 'node:assert' -import { BasedDb } from '../../src/index.js' -import { allCountryCodes } from '../shared/examples.js' import test from '../shared/test.js' -import { throws, deepEqual } from '../shared/assert.js' -import { fastPrng } from '../../src/utils/index.js' +import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' -await test.skip('sum branched includes', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ +await test('sum branched includes', async (t) => { + const db = await testDb(t, { types: { sequence: { props: { @@ -64,24 +53,18 @@ await test.skip('sum branched includes', async (t) => { deepEqual( await db - .query('sequence') - .include((select) => { - select('votes').sum('NL', 'AU') - }) - .get() - .toObject(), + .query2('sequence') + .include((select) => select('votes').sum('NL', 'AU')) + .get(), [{ id: 1, votes: { NL: { sum: 30 }, AU: { sum: 15 } } }], 'brached include, sum, references', ) deepEqual( await db - .query('sequence') - .include((select) => { - select('votes').groupBy('country').sum('NL', 'AU') - }) - .get() - .toObject(), + .query2('sequence') + .include((select) => select('votes').groupBy('country').sum('NL', 'AU')) + .get(), [ { id: 1, @@ -94,29 +77,21 @@ await test.skip('sum branched includes', async (t) => { 'branched include, references, groupBy', ) - deepEqual( - await db - .query('sequence') - .include((select) => { - select('votes').filter('country', '=', 'aa').sum('NL', 'AU') - }) - .get() - .toObject(), - [{ id: 1, votes: { NL: { sum: 20 }, AU: { sum: 15 } } }], - 'branched include, references, filtered, groupBy', - ) + // deepEqual( + // await db + // .query2('sequence') + // .include((select) => { + // select('votes').filter('country', '=', 'aa').sum('NL', 'AU') // string filter not implemented and also filter in refs group not implemented + // }) + // .get() + // , + // [{ id: 1, votes: { NL: { sum: 20 }, AU: { sum: 15 } } }], + // 'branched include, references, filtered, groupBy', + // ) }) -await test.skip('count branched includes', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ +await test('count branched includes', async (t) => { + const db = await testDb(t, { types: { sequence: { props: { @@ -163,24 +138,18 @@ await test.skip('count branched includes', async (t) => { deepEqual( await db - .query('sequence') - .include((select) => { - select('votes').count() - }) - .get() - .toObject(), + .query2('sequence') + .include((select) => select('votes').count()) + .get(), [{ id: 1, votes: { count: 3 } }], 'brached include, count, references', ) deepEqual( await db - .query('sequence') - .include((select) => { - select('votes').groupBy('country').sum('NL', 'AU') - }) - .get() - .toObject(), + .query2('sequence') + .include((select) => select('votes').groupBy('country').sum('NL', 'AU')) + .get(), [ { id: 1, @@ -193,29 +162,21 @@ await test.skip('count branched includes', async (t) => { 'branched include, references, groupBy', ) - deepEqual( - await db - .query('sequence') - .include((select) => { - select('votes').filter('country', '=', 'aa').count() - }) - .get() - .toObject(), - [{ id: 1, votes: { count: 2 } }], - 'count, branched include, references, filtered', - ) + // deepEqual( + // await db + // .query2('sequence') + // .include((select) => { + // select('votes').filter('country', '=', 'aa').count() // string filter not implemented and also filter in refs group not implemented + // }) + // .get() + // , + // [{ id: 1, votes: { count: 2 } }], + // 'count, branched include, references, filtered', + // ) }) -await test.skip('agg on references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ +await test('agg on references', async (t) => { + const db = await testDb(t, { types: { team: { props: { @@ -275,6 +236,8 @@ await test.skip('agg on references', async (t) => { gamesPlayed: 9, }) + db.drain() + const t1 = db.create('team', { teamName: 'Grêmio', city: 'Porto Alegre', @@ -304,17 +267,31 @@ await test.skip('agg on references', async (t) => { }) const result = await db - .query('team') - .include('teamName', 'city', (select) => { - select('players').groupBy('position').sum('goalsScored', 'gamesPlayed') - }) + .query2('team') + .include('teamName', 'city', (select) => + select('players').groupBy('position').sum('goalsScored', 'gamesPlayed'), + ) .get() deepEqual( - result.toObject(), + result, [ { id: 1, + teamName: 'Boca Juniors', + city: 'Buenos Aires', + players: {}, // does anybody wants to play for Boca? + }, + { + id: 2, + teamName: 'Barcelona', + city: 'Barcelona', + players: { + Forward: { goalsScored: { sum: 5 }, gamesPlayed: { sum: 5 } }, // Lewandowski + }, + }, + { + id: 3, teamName: 'Grêmio', city: 'Porto Alegre', players: { @@ -323,7 +300,7 @@ await test.skip('agg on references', async (t) => { }, }, { - id: 2, + id: 4, teamName: 'Ajax', city: 'Amsterdam', players: { @@ -331,34 +308,14 @@ await test.skip('agg on references', async (t) => { Defender: { goalsScored: { sum: 2 }, gamesPlayed: { sum: 9 } }, // Jorrel (2,9) }, }, - { - id: 3, - teamName: 'Boca Juniors', - city: 'Buenos Aires', - players: {}, // does anybody wants to play for Boca? - }, - { - id: 4, - teamName: 'Barcelona', - city: 'Barcelona', - players: { - Forward: { goalsScored: { sum: 5 }, gamesPlayed: { sum: 5 } }, // Lewandowski - }, - }, ], 'Include parent props, with referenced items grouped by their own prop, and aggregations', ) }) await test('enums', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - const types = ['IPA', 'Lager', 'Ale', 'Stout', 'Wit', 'Dunkel', 'Tripel'] - await db.setSchema({ + const db = await testDb(t, { types: { beer: { props: { @@ -397,7 +354,7 @@ await test('enums', async (t) => { }) deepEqual( - await db.query('beer').avg('price').groupBy('type').get(), + await db.query2('beer').avg('price').groupBy('type').get(), { Tripel: { price: { avg: 11.85 }, @@ -410,7 +367,7 @@ await test('enums', async (t) => { ) deepEqual( - await db.query('beer').harmonicMean('price').groupBy('type').get(), + await db.query2('beer').hmean('price').groupBy('type').get(), { Tripel: { price: { hmean: 11.839662447257384 }, @@ -419,18 +376,12 @@ await test('enums', async (t) => { price: { hmean: 7.199999999999999 }, // 7.2 should be approximated }, }, - 'harmonic_mean by enum in main', + 'hmean by enum in main', ) }) await test.skip('refs with enums ', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { movie: { name: 'string', @@ -467,7 +418,7 @@ await test.skip('refs with enums ', async (t) => { deepEqual( await db - .query('actor') + .query2('actor') .include((q) => q('movies').groupBy('genre').count()) .get(), [ @@ -493,13 +444,7 @@ await test.skip('refs with enums ', async (t) => { }) await test('cardinality', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { lunch: { week: 'string', @@ -560,13 +505,13 @@ await test('cardinality', async (t) => { }) deepEqual( - await db.query('lunch').cardinality('Mon').get(), + await db.query2('lunch').cardinality('Mon').get(), { Mon: { cardinality: 7 } }, 'main cardinality no group by', ) deepEqual( - await db.query('lunch').cardinality('Mon').groupBy('week').get(), + await db.query2('lunch').cardinality('Mon').groupBy('week').get(), { 27: { Mon: { cardinality: 5 }, @@ -579,62 +524,81 @@ await test('cardinality', async (t) => { ) }) -// await test.skip('cardinality on references', async (t) => { -// const db = new BasedDb({ -// path: t.tmp, -// }) -// await db.start({ clean: true }) -// t.after(() => db.stop()) - -// await db.setSchema({ -// types: { -// booth: { -// company: 'string', -// // badgesScanned: 'number', -// badgesScanned: 'cardinality', -// }, -// fair: { -// day: 'timestamp', -// booths: { -// items: { -// ref: 'booth', -// prop: 'booth', -// }, -// }, -// }, -// }, -// }) - -// const bg = db.create('booth', { -// company: 'big one', -// badgesScanned: ['engineer 1', 'salesman', 'spy', 'annonymous'], -// }) -// const stp = db.create('booth', { -// company: 'just another startup', -// badgesScanned: ['nice ceo', 'entusiastic dev'], -// }) -// db.create('fair', { -// day: new Date('08/02/2024'), -// booths: [bg, stp], -// }) - -// await db.query('fair').include('booths.badgesScanned').get().inspect() -// await db -// .query('fair') -// .cardinality('booths.badgesScanned') -// .groupBy('day') -// .get() -// .inspect() -// }) +await test('cardinality on references', async (t) => { + const db = await testDb(t, { + types: { + booth: { + company: 'string', + // badgesScanned: 'number', + badgesScanned: 'cardinality', + }, + fair: { + day: 'timestamp', + booths: { + items: { + ref: 'booth', + prop: 'booth', + }, + }, + }, + }, + }) -await test('group by reference ids', async (t) => { - const db = new BasedDb({ - path: t.tmp, + const bg = db.create('booth', { + company: 'big one', + badgesScanned: ['engineer 1', 'salesman', 'spy', 'annonymous'], }) - await db.start({ clean: true }) - t.after(() => db.stop()) + const stp = db.create('booth', { + company: 'just another startup', + badgesScanned: ['nice ceo', 'entusiastic dev'], + }) + db.create('fair', { + day: new Date('08/02/2024'), + booths: [bg, stp], + }) + + // await db.query2('fair').include('booths.badgesScanned').get().inspect() + // await db + // .query2('fair') + // .cardinality('booths.badgesScanned') + // .groupBy('day') + // .get() + // .inspect() + // }) + deepEqual( + await db + .query2('fair') + .include((s) => s('booths').cardinality('badgesScanned')) + .get(), + [ + { + id: 1, + booths: { + badgesScanned: { + cardinality: 6, + }, + }, + }, + ], + 'branched query with cardinality function', + ) + + /* + * Nested syntax: + */ + + // await db.query2('fair').include('booths.badgesScanned').get().inspect() + + // await db + // .query2('fair') + // .cardinality('booths.badgesScanned') + // .groupBy('day') + // .get() + // .inspect() +}) - await db.setSchema({ +await test('group by reference ids', async (t) => { + const db = await testDb(t, { types: { trip: { pickup: 'timestamp', @@ -680,16 +644,15 @@ await test('group by reference ids', async (t) => { distance: 523.1, vehicle: v2, }) - db.drain() const d1 = db.create('driver', { name: 'Luc Ferry', rank: 5, vehicle: v2, - trips: t1, + trips: [t1], }) deepEqual( - await db.query('driver').sum('rank').groupBy('vehicle').get(), + await db.query2('driver').sum('rank').groupBy('vehicle').get(), { 2: { rank: { sum: 5 }, @@ -698,36 +661,30 @@ await test('group by reference ids', async (t) => { 'group by reference id', ) - // deepEqual( - // await db - // .query('driver') - // .include((q) => q('trips').groupBy('vehicle').max('distance')) - // .include('*') - // .get(), - // [ - // { - // id: 1, - // rank: 5, - // name: 'Luc Ferry', - // trips: { - // 2: { - // distance: { max: 523.1 }, - // }, - // }, - // }, - // ], - // 'brached query with nested group by reference id', - // ) + deepEqual( + await db + .query2('driver') + .include((q) => q('trips').groupBy('vehicle').max('distance')) + .include('*') + .get(), + [ + { + id: 1, + rank: 5, + name: 'Luc Ferry', + trips: { + 2: { + distance: { max: 523.1 }, + }, + }, + }, + ], + 'branched query with nested group by reference id', + ) }) await test.skip('nested references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -760,13 +717,15 @@ await test.skip('nested references', async (t) => { strong: 4, }) - // await db.query('user').include('*', '**').get().inspect(10) + // await db.query2('user').include('*', '**').get().inspect(10) deepEqual( - await db.query('user').sum('friends.strong').get(), + await db.query2('user').sum('friends.strong').get(), { - strong: { - sum: 7, + friends: { + strong: { + sum: 7, + }, }, }, 'nested references access with dot sintax', @@ -774,13 +733,7 @@ await test.skip('nested references', async (t) => { }) await test.skip('edges aggregation', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { movie: { name: 'string', @@ -846,7 +799,7 @@ await test.skip('edges aggregation', async (t) => { }) // await db - // .query('movie') + // .query2('movie') // .include('*', '**') // // .include('actors.$rating') // // .include('actors.name') @@ -861,119 +814,122 @@ await test.skip('edges aggregation', async (t) => { // after: NOK: unreacheable // console.log( // JSON.stringify( - // await db.query('movie').include('actors.strong').get().toObject(), + // await db.query2('movie').include('actors.strong').get(), // ), // ) // before: NOK: error in js: Cannot read properties of undefined (reading 'edges') // after: NOK: zeroing - // await db.query('movie').include('actors.$rating').get().inspect(10) + // await db.query2('movie').include('actors.$rating').get().inspect(10) /*----------------------------*/ /* BRANCHED QUERY */ /*----------------------------*/ // await db - // .query('movie') + // .query2('movie') // .include((q) => q('actors').max('strong').sum('strong2')) // .get() // .inspect(10) - deepEqual( - await db - .query('movie') - .include((q) => q('actors').max('$rating')) - .get(), - [ - { - id: 1, - actors: { - $rating: { - max: 55, - }, - }, - }, - { - id: 2, - actors: { - $rating: { - max: 77, - }, - }, - }, - ], - 'single edge aggregation, branched query', - ) + // deepEqual( + // await db + // .query2('movie') + // .include((q) => q('actors').max('$rating')) + // .get() + // , + // [ + // { + // id: 1, + // actors: { + // $rating: { + // max: 55, + // }, + // }, + // }, + // { + // id: 2, + // actors: { + // $rating: { + // max: 77, + // }, + // }, + // }, + // ], + // 'single edge aggregation, branched query', + // ) - deepEqual( - await db - .query('movie') - .include((q) => q('actors').max('$rating').sum('$hating')) - .get(), - [ - { - id: 1, - actors: { - $rating: { - max: 55, - }, - $hating: { - sum: 5, - }, - }, - }, - { - id: 2, - actors: { - $rating: { - max: 77, - }, - $hating: { - sum: 10, - }, - }, - }, - ], - 'multiple edges with multiple agg functions, branched query', - ) + // deepEqual( + // await db + // .query2('movie') + // .include((q) => q('actors').max('$rating').sum('$hating')) + // .get() + // , + // [ + // { + // id: 1, + // actors: { + // $rating: { + // max: 55, + // }, + // $hating: { + // sum: 5, + // }, + // }, + // }, + // { + // id: 2, + // actors: { + // $rating: { + // max: 77, + // }, + // $hating: { + // sum: 10, + // }, + // }, + // }, + // ], + // 'multiple edges with multiple agg functions, branched query', + // ) - deepEqual( - await db - .query('movie') - .include((q) => q('actors').max('$rating', '$hating')) - .get(), - [ - { - id: 1, - actors: { - $rating: { - max: 55, - }, - $hating: { - max: 5, - }, - }, - }, - { - id: 2, - actors: { - $rating: { - max: 77, - }, - $hating: { - max: 7, - }, - }, - }, - ], - 'multiple edges on same agg function, branched query', - ) + // deepEqual( + // await db + // .query2('movie') + // .include((q) => q('actors').max('$rating', '$hating')) + // .get() + // , + // [ + // { + // id: 1, + // actors: { + // $rating: { + // max: 55, + // }, + // $hating: { + // max: 5, + // }, + // }, + // }, + // { + // id: 2, + // actors: { + // $rating: { + // max: 77, + // }, + // $hating: { + // max: 7, + // }, + // }, + // }, + // ], + // 'multiple edges on same agg function, branched query', + // ) /*-----------------------------------*/ /* STRAIGHT ON TYPE */ /*-----------------------------------*/ // before: OK: error in js: Cannot read properties of undefined (reading 'edges') // after: NOK: feature not implemented - // await db.query('actor').max('$rating').get().inspect(10) - // await db.query('actor').sum('strong').get().inspect(10) // this is OK, summing all strong props in the type actor + // await db.query2('actor').max('$rating').get().inspect(10) + // await db.query2('actor').sum('strong').get().inspect(10) // this is OK, summing all strong props in the type actor }) diff --git a/test/aggregate/dev.ts b/test/aggregate/dev.ts index c5b5aabfe3..93612fc095 100644 --- a/test/aggregate/dev.ts +++ b/test/aggregate/dev.ts @@ -1,118 +1,272 @@ import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' -import { deepEqual } from '../shared/assert.js' - -await test('kev', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ - types: { - trip: { - driver: 'string', - distance: 'int32', - rate: 'int8', - }, - }, - }) - - db.create('trip', { driver: 'lala', distance: 10, rate: 5 }) - db.create('trip', { driver: 'lala', distance: 20, rate: 10 }) - db.create('trip', { driver: 'lele', distance: 40, rate: 10 }) - - // console.log((await db.query('trip').include('distance').get()).debug()) - // console.log( - // ( - // await db.query('trip').harmonicMean('distance').avg('distance').get() - // ).debug(), - // ) - - // console.log((await db.query('trip').sum('distance', 'rate').get()).debug()) - console.log( - (await db.query('trip').filter('distance', '>', 10).get()).debug(), - ) - console.log( - ( - await db.query('trip').sum('distance').filter('distance', '>', 10).get() - ).debug(), - ) - console.log( - ( - await db - .query('trip') - .sum('distance') - .filter('rate', '>', 8) - .groupBy('driver') - .get() - ).debug(), - ) - - await db.stop() -}) - -await test('references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ - types: { - driver: { - props: { - name: 'string', - trips: { - items: { - ref: 'trip', - prop: 'driver', // Defines the inverse relationship - }, - }, - }, - }, - trip: { - props: { - distance: 'uint16', - driver: { - ref: 'driver', - prop: 'trips', // Points back to the list on driver - }, - }, - }, - }, - }) - - const d1 = db.create('driver', { - name: 'Luc Ferry', - }) - db.drain() - const t1 = db.create('trip', { - distance: 523.1, // with uint16 => 523 - driver: d1, - }) - const t2 = db.create('trip', { - distance: 1230, - driver: d1, - }) - - // await db.query('trip').include('*', '**').get().inspect(10) - - // await db - // .query('driver') - // .include((t) => t('trips').include('distance')) - // .get() - // .inspect(10) - - const lala = await db - .query('driver') - .include((t) => t('trips').sum('distance').max('distance').avg('distance')) - .get() - - // console.log(lala.toObject()) - lala.inspect(10) -}) +import { deepEqual, equal } from '../shared/assert.js' +import { testDb } from '../shared/index.js' +import { fastPrng } from '../../src/utils/fastPrng.js' + +// await test('kev', async (t) => { +// const db = new BasedDb({ +// path: t.tmp, +// maxModifySize: 1e6, +// }) + +// await db.start({ clean: true }) +// t.after(() => db.stop()) + +// await db.setSchema({ +// types: { +// trip: { +// driver: 'string', +// distance: 'int32', +// rate: 'int8', +// }, +// }, +// }) + +// db.create('trip', { driver: 'lala', distance: 10, rate: 5 }) +// db.create('trip', { driver: 'lala', distance: 20, rate: 10 }) +// db.create('trip', { driver: 'lele', distance: 40, rate: 10 }) + +// // console.log((await db.query2('trip').include('distance').get()).debug()) +// // console.log( +// // ( +// // await db.query2('trip').harmonicMean('distance').avg('distance').get() +// // ).debug(), +// // ) + +// // console.log((await db.query2('trip').sum('distance', 'rate').get()).debug()) +// console.log( +// (await db.query2('trip').filter('distance', '>', 10).get()).debug(), +// ) +// console.log( +// ( +// await db.query2('trip').sum('distance').filter('distance', '>', 10).get() +// ).debug(), +// ) +// console.log( +// ( +// await db +// .query2('trip') +// .sum('distance') +// .filter('rate', '>', 8) +// .groupBy('driver') +// .get() +// ).debug(), +// ) + +// await db.stop() +// }) + +// await test('references', async (t) => { +// const db = new BasedDb({ +// path: t.tmp, +// }) +// await db.start({ clean: true }) +// t.after(() => db.stop()) + +// await db.setSchema({ +// types: { +// driver: { +// props: { +// name: 'string', +// trips: { +// items: { +// ref: 'trip', +// prop: 'driver', // Defines the inverse relationship +// }, +// }, +// }, +// }, +// trip: { +// props: { +// distance: 'number', +// rate: 'uint8', +// driver: { +// ref: 'driver', +// prop: 'trips', // Points back to the list on driver +// }, +// }, +// }, +// }, +// }) + +// const d1 = db.create('driver', { +// name: 'Luc Ferry', +// }) +// db.drain() +// const t1 = db.create('trip', { +// distance: 523.1, // with uint16 => 523 +// rate: 4, +// driver: d1, +// }) +// const t2 = db.create('trip', { +// distance: 1230, +// rate: 2, +// driver: d1, +// }) + +// // await db.query2('trip').include('*', '**').get().inspect(10) + +// // await db +// // .query2('driver') +// // .include((t) => t('trips').include('distance')) +// // .get() +// // .inspect(10) + +// const lala = await db +// .query2('driver') +// .include((t) => +// t('trips') +// .sum('distance') +// .avg('distance') +// .min('rate') +// .sum('rate') +// .count(), +// ) +// .get() + +// // console.log(lala.toObject()) +// lala.inspect(10) +// }) + +// await test('yyy', async (t) => { +// const db = await testDb(t, { +// types: { +// team: { +// props: { +// teamName: { type: 'string' }, +// city: { type: 'string' }, +// players: { +// items: { +// ref: 'player', +// prop: 'team', +// }, +// }, +// }, +// }, +// player: { +// props: { +// playerName: { type: 'string' }, +// position: { type: 'string' }, +// goalsScored: 'uint16', +// gamesPlayed: 'uint16', +// team: { +// ref: 'team', +// prop: 'players', +// }, +// }, +// }, +// }, +// }) + +// const p1 = db.create('player', { +// playerName: 'Martin', +// position: 'Forward', +// goalsScored: 10, +// gamesPlayed: 5, +// }) +// const p2 = db.create('player', { +// playerName: 'Jemerson', +// position: 'Defender', +// goalsScored: 1, +// gamesPlayed: 10, +// }) +// const p3 = db.create('player', { +// playerName: 'Pavon', +// position: 'Forward', +// goalsScored: 12, +// gamesPlayed: 6, +// }) +// const p4 = db.create('player', { +// playerName: 'Wout', +// position: 'Forward', +// goalsScored: 8, +// gamesPlayed: 7, +// }) +// const p5 = db.create('player', { +// playerName: 'Jorrel', +// position: 'Defender', +// goalsScored: 2, +// gamesPlayed: 9, +// }) + +// const t1 = db.create('team', { +// teamName: 'Grêmio', +// city: 'Porto Alegre', +// players: [p1, p2, p3], +// }) +// const t2 = db.create('team', { +// teamName: 'Ajax', +// city: 'Amsterdam', +// players: [p4, p5], +// }) +// const t3 = db.create('team', { +// teamName: 'Boca Juniors', +// city: 'Buenos Aires', +// players: [], +// }) +// const t4 = db.create('team', { +// teamName: 'Barcelona', +// city: 'Barcelona', +// players: [ +// db.create('player', { +// playerName: 'Lewandowski', +// position: 'Forward', +// goalsScored: 5, +// gamesPlayed: 5, +// }), +// ], +// }) + +// const result = await db +// .query2('team') +// .include('teamName', 'city', (select) => +// select('players') +// .sum('goalsScored', 'gamesPlayed') +// .groupBy('position') +// .range(0, 10), +// ) +// .get() + +// result.debug() +// result.inspect() + +// deepEqual( +// result.toObject(), +// [ +// { +// id: 1, +// teamName: 'Grêmio', +// city: 'Porto Alegre', +// players: { +// Forward: { goalsScored: { sum: 22 }, gamesPlayed: { sum: 11 } }, // Martin (10,5) + Pavon (12,6) +// Defender: { goalsScored: { sum: 1 }, gamesPlayed: { sum: 10 } }, // Jemerson (1,10) +// }, +// }, +// { +// id: 2, +// teamName: 'Ajax', +// city: 'Amsterdam', +// players: { +// Forward: { goalsScored: { sum: 8 }, gamesPlayed: { sum: 7 } }, // Wout (8,7) +// Defender: { goalsScored: { sum: 2 }, gamesPlayed: { sum: 9 } }, // Jorrel (2,9) +// }, +// }, +// { +// id: 3, +// teamName: 'Boca Juniors', +// city: 'Buenos Aires', +// players: {}, // does anybody wants to play for Boca? +// }, +// { +// id: 4, +// teamName: 'Barcelona', +// city: 'Barcelona', +// players: { +// Forward: { goalsScored: { sum: 5 }, gamesPlayed: { sum: 5 } }, // Lewandowski +// }, +// }, +// ], +// 'Include parent props, with referenced items grouped by their own prop, and aggregations', +// ) +// }) diff --git a/test/aggregate/experimental.ts b/test/aggregate/experimental.ts index 47d0b2a731..cc5670401b 100644 --- a/test/aggregate/experimental.ts +++ b/test/aggregate/experimental.ts @@ -1,15 +1,10 @@ import { BasedDb, groupBy } from '../../src/index.js' import test from '../shared/test.js' import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' await test('dev', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { lunch: { week: 'string', @@ -65,7 +60,7 @@ await test('dev', async (t) => { } await db.create('lunch', week27) - // const eaters = await db.query('lunch').get() + // const eaters = await db.query2('lunch').get() // eaters.inspect() // // knwon from raw data: @@ -78,7 +73,7 @@ await test('dev', async (t) => { // console.log( // 'Total meals from query: ', - // Object.entries(eaters.toObject()[0]) + // Object.entries(eaters[0]) // .filter(([key]) => days.includes(key)) // .reduce((sum, el: [string, number]) => sum + el[1], 0), // ) @@ -89,10 +84,10 @@ await test('dev', async (t) => { lala: 10, }) - // console.log(await db.query('lunch').include('Mon').get()) + // console.log(await db.query2('lunch').include('Mon').get()) deepEqual( - await db.query('lunch').cardinality('Mon').get(), + await db.query2('lunch').cardinality('Mon').get(), { Mon: { cardinality: 7 }, }, @@ -100,7 +95,7 @@ await test('dev', async (t) => { ) deepEqual( - await db.query('lunch').cardinality('Mon').groupBy('week').get(), + await db.query2('lunch').cardinality('Mon').groupBy('week').get(), { 27: { Mon: { cardinality: 5 }, @@ -111,11 +106,11 @@ await test('dev', async (t) => { }, 'cardinality main groupBy', ) - // await db.query('lunch').sum('lala').groupBy('week').get().inspect() + // await db.query2('lunch').sum('lala').groupBy('week').get().inspect() // await db.create('lunch', { // week: 0, // lala: 10, // lele: 11, // }) - // await db.query('lunch').sum('lala', 'lele').get().inspect() + // await db.query2('lunch').sum('lala', 'lele').get().inspect() }) diff --git a/test/aggregate/groupBY.ts b/test/aggregate/groupBY.ts index 31478c0b1a..e3ac71394e 100644 --- a/test/aggregate/groupBY.ts +++ b/test/aggregate/groupBY.ts @@ -1,19 +1,9 @@ -import { equal } from 'node:assert' -import { BasedDb } from '../../src/index.js' -import { allCountryCodes } from '../shared/examples.js' import test from '../shared/test.js' -import { throws, deepEqual } from '../shared/assert.js' +import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' await test('sum group by', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { sequence: { props: { @@ -56,14 +46,10 @@ await test('sum group by', async (t) => { country: 'aa', AU: 15, }) - // const s = db.create('sequence', { votes: [nl1, nl2, au1] }) - db.drain() - db.create('sequence', { votes: nl1 }) - db.create('sequence', { votes: nl2 }) - db.create('sequence', { votes: au1 }) + const s = db.create('sequence', { votes: [nl1, nl2, au1] }) deepEqual( - await db.query('vote').sum('NL', 'AU').groupBy('country').get().toObject(), + await db.query2('vote').sum('NL', 'AU').groupBy('country').get(), { bb: { NL: { sum: 10 }, AU: { sum: 0 } }, aa: { NL: { sum: 20 }, AU: { sum: 15 } }, @@ -72,34 +58,26 @@ await test('sum group by', async (t) => { ) deepEqual( - await db.query('vote').groupBy('country').get().toObject(), + await db.query2('vote').groupBy('country').get(), { bb: {}, aa: {} }, 'groupBy with no aggregation function', ) // deepEqual( // await db - // .query('vote') - // .filter('country', '=', 'bb') + // .query2('vote') + // .filter('country', '=', 'bb') // filter string not implemented yet // .groupBy('country') // .sum('NL', 'AU') // .get() - // .toObject(), + // , // { bb: { NL: { sum: 10 }, AU: { sum: 0 } } }, // 'filter, groupBy on single distinct value', // ) }) await test('count group by', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { sequence: { props: { @@ -142,14 +120,10 @@ await test('count group by', async (t) => { country: 'aa', AU: 15, }) - // const s = db.create('sequence', { votes: [nl1, nl2, au1] }) - db.drain() - db.create('sequence', { votes: nl1 }) - db.create('sequence', { votes: nl2 }) - db.create('sequence', { votes: au1 }) + const s = db.create('sequence', { votes: [nl1, nl2, au1] }) deepEqual( - await db.query('vote').count().groupBy('country').get().toObject(), + await db.query2('vote').count().groupBy('country').get(), { bb: { count: 1, @@ -163,27 +137,19 @@ await test('count group by', async (t) => { // deepEqual( // await db - // .query('vote') - // .filter('country', '=', 'bb') + // .query2('vote') + // .filter('country', '=', 'bb') // filter string not implemented yet // .groupBy('country') // .count() // .get() - // .toObject(), + // , // { bb: { count: 1 } }, // 'count, filter, groupBy on single distinct value', // ) }) await test('variable key sum', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -241,43 +207,21 @@ await test('variable key sum', async (t) => { flap: 80, }) - // const strudelArticle = db.create('article', { - // name: 'The wonders of Strudel', - // contributors: [mrSnurp, flippie, derpie, dinkelDoink], - // }) - db.drain() - db.create('article', { - name: 'The wonders of Strudel', - contributors: mrSnurp, - }) - db.create('article', { + const strudelArticle = db.create('article', { name: 'The wonders of Strudel', - contributors: flippie, - }) - db.create('article', { - name: 'The wonders of Strudel', - contributors: derpie, - }) - db.create('article', { - name: 'The wonders of Strudel', - contributors: dinkelDoink, + contributors: [mrSnurp, flippie, derpie, dinkelDoink], }) - // const stupidity = db.create('article', { - // name: 'Les lois fondamentales de la stupidité humaine', - // contributors: [cipolla], - // }) - db.create('article', { + const stupidity = db.create('article', { name: 'Les lois fondamentales de la stupidité humaine', - contributors: cipolla, + contributors: [cipolla], }) // deepEqual( // await db - // .query('article') + // .query2('article') // .include((q) => q('contributors').sum('flap'), 'name') - // .get() - // .toObject(), + // .get(), // [ // { // id: 1, @@ -294,7 +238,7 @@ await test('variable key sum', async (t) => { // ) deepEqual( - await db.query('user').groupBy('name').sum('flap').get().toObject(), + await db.query2('user').groupBy('name').sum('flap').get(), { Flippie: { flap: { sum: 20 } }, 'Carlo Cipolla': { flap: { sum: 80 } }, @@ -306,7 +250,7 @@ await test('variable key sum', async (t) => { ) deepEqual( - await db.query('user').groupBy('country').sum('flap').get().toObject(), + await db.query2('user').groupBy('country').sum('flap').get(), { $undefined: { flap: { sum: 40 } }, NL: { flap: { sum: 30 } }, @@ -318,12 +262,9 @@ await test('variable key sum', async (t) => { // deepEqual( // await db - // .query('article') - // .include((select) => { - // select('contributors').groupBy('name').sum('flap') - // }) - // .get() - // .toObject(), + // .query2('article') + // .include((select) => select('contributors').groupBy('name').sum('flap')) + // .get(), // [ // { // id: 1, @@ -346,13 +287,7 @@ await test('variable key sum', async (t) => { }) await test('group by unique numbers', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { trip: { pickup: 'timestamp', @@ -383,7 +318,7 @@ await test('group by unique numbers', async (t) => { }) deepEqual( - await db.query('trip').sum('distance').groupBy('vendorIduint8').get(), + await db.query2('trip').sum('distance').groupBy('vendorIduint8').get(), { 13: { distance: { sum: 513.44 }, @@ -392,7 +327,7 @@ await test('group by unique numbers', async (t) => { 'group by number', ) deepEqual( - await db.query('trip').sum('distance').groupBy('vendorIdint8').get(), + await db.query2('trip').sum('distance').groupBy('vendorIdint8').get(), { 13: { distance: { sum: 513.44 }, @@ -401,7 +336,7 @@ await test('group by unique numbers', async (t) => { 'group by number', ) deepEqual( - await db.query('trip').sum('distance').groupBy('vendorIduint16').get(), + await db.query2('trip').sum('distance').groupBy('vendorIduint16').get(), { 813: { distance: { sum: 513.44 }, @@ -410,7 +345,7 @@ await test('group by unique numbers', async (t) => { 'group by number', ) deepEqual( - await db.query('trip').sum('distance').groupBy('vendorIdint16').get(), + await db.query2('trip').sum('distance').groupBy('vendorIdint16').get(), { 813: { distance: { sum: 513.44 }, @@ -419,7 +354,7 @@ await test('group by unique numbers', async (t) => { 'group by number', ) deepEqual( - await db.query('trip').sum('distance').groupBy('vendorIduint32').get(), + await db.query2('trip').sum('distance').groupBy('vendorIduint32').get(), { 813: { distance: { sum: 513.44 }, @@ -428,7 +363,7 @@ await test('group by unique numbers', async (t) => { 'group by number', ) deepEqual( - await db.query('trip').sum('distance').groupBy('vendorIdint32').get(), + await db.query2('trip').sum('distance').groupBy('vendorIdint32').get(), { 813: { distance: { sum: 513.44 }, @@ -437,7 +372,7 @@ await test('group by unique numbers', async (t) => { 'group by number', ) deepEqual( - await db.query('trip').sum('distance').groupBy('vendorIdnumber').get(), + await db.query2('trip').sum('distance').groupBy('vendorIdnumber').get(), { 813.813: { distance: { sum: 513.44 }, @@ -448,13 +383,7 @@ await test('group by unique numbers', async (t) => { }) await test.skip('groupBy ranges in numeric properties', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await db.setSchema({ types: { trip: { tripId: 'number', @@ -463,7 +392,7 @@ await test.skip('groupBy ranges in numeric properties', async (t) => { distance: 'number', }, }, - }) + }, { noBackup: true }) for (let i = 0; i < 10; i++) { db.create('trip', { @@ -473,5 +402,5 @@ await test.skip('groupBy ranges in numeric properties', async (t) => { }) } - // await db.query('trip').sum('distance').groupBy('tripId').get().inspect() + // await db.query2('trip').sum('distance').groupBy('tripId').get().inspect() }) diff --git a/test/aggregate/multiple.ts b/test/aggregate/multiple.ts index ba6a9dadc0..701a1a6559 100644 --- a/test/aggregate/multiple.ts +++ b/test/aggregate/multiple.ts @@ -1,17 +1,9 @@ -import { BasedDb, groupBy } from '../../src/index.js' import test from '../shared/test.js' import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' await test('multiple functions', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { sequence: { props: { @@ -79,23 +71,17 @@ await test('multiple functions', async (t) => { PL: -50, FI: -50.999, }) - // const s = db.create('sequence', { votes: [nl1, nl2, au1, au2, br1] }) - db.drain() - db.create('sequence', { votes: nl1 }) - db.create('sequence', { votes: nl2 }) - db.create('sequence', { votes: au1 }) - db.create('sequence', { votes: au2 }) - db.create('sequence', { votes: br1 }) + const s = db.create('sequence', { votes: [nl1, nl2, au1, au2, br1] }) deepEqual( - await db.query('vote').sum('NL').sum('NO').max('NL').min('NL').get(), + await db.query2('vote').sum('NL').sum('NO').max('NL').min('NL').get(), { NL: { sum: 176, max: 50, min: 10 }, NO: { sum: -176 } }, 'multiple func main no groupBy', ) deepEqual( await db - .query('vote') + .query2('vote') .sum('NL') .sum('NO') .max('NL') @@ -110,27 +96,13 @@ await test('multiple functions', async (t) => { 'multiple func main with groupBy', ) - // const j = db.create('vote', { - // region: 'Great', - // judges: ['lala', 'lele', 'lili'], - // }) - - db.drain() - db.create('vote', { + const j = db.create('vote', { region: 'Great', - judges: 'lala', - }) - db.create('vote', { - region: 'Great', - judges: 'lele', - }) - db.create('vote', { - region: 'Great', - judges: 'lili', + judges: ['lala', 'lele', 'lili'], }) const multi = await db - .query('vote') + .query2('vote') .sum('NL') .max('PT') .cardinality('judges') @@ -152,8 +124,7 @@ await test('multiple functions', async (t) => { max: 50, }, NO: { - // avg: -29.333333333333332, // originally one node because of multiref - avg: -22, // 3 nodes temporarely + avg: -29.333333333333332, sum: -176, }, judges: { @@ -164,7 +135,7 @@ await test('multiple functions', async (t) => { ) const multi2 = await db - .query('vote') + .query2('vote') .sum('NL') .max('PT') .cardinality('judges') @@ -220,8 +191,7 @@ await test('multiple functions', async (t) => { max: 50, }, NO: { - // avg: -25, // also one node only originally - avg: -12.5, + avg: -25, sum: -50, }, judges: { @@ -233,7 +203,7 @@ await test('multiple functions', async (t) => { ) deepEqual( - await db.query('vote').sum('NL').count().sum('PT').stddev('NO').get(), + await db.query2('vote').sum('NL').count().sum('PT').stddev('NO').get(), { NL: { sum: 176, @@ -241,18 +211,16 @@ await test('multiple functions', async (t) => { PT: { sum: 186, }, - // NO: { - // stddev: 21.518983866964227, // also one node only originally - // }, - // count: 6, // also one node only originally - NO: { stddev: 22.696758736499294 }, // std([-10,-23,-43,-50,-50,0,0,0]) ans = 22.697 - count: 8, + NO: { + stddev: 21.518983866964227, + }, + count: 6, }, 'multiple main + count no groupBy', ) deepEqual( await db - .query('vote') + .query2('vote') .sum('NL') .count() .sum('PT') @@ -291,19 +259,17 @@ await test('multiple functions', async (t) => { PT: { sum: 50, }, - // NO: { - // stddev: 35.35533905932738, - // }, - // count: 2, - NO: { stddev: 25 }, - count: 4, + NO: { + stddev: 35.35533905932738, + }, + count: 2, }, }, 'multiple main + count groupBy', ) // const multiref = await db - // .query('sequence') + // .query2('sequence') // .include((q) => q('votes').sum('NL').count().cardinality('judges')) // .get() @@ -330,7 +296,7 @@ await test('multiple functions', async (t) => { // deepEqual( // await db - // .query('sequence') + // .query2('sequence') // .include((q) => q('votes').sum('NL').count().cardinality('judges')) // .get(), // [ @@ -348,7 +314,7 @@ await test('multiple functions', async (t) => { // deepEqual( // await db - // .query('sequence') + // .query2('sequence') // .include((q) => q('votes').count().sum('NL').cardinality('judges')) // .get(), // [ diff --git a/test/aggregate/overall.perf.ts b/test/aggregate/overall.perf.ts index 1922e3b3fd..ebb16515bf 100644 --- a/test/aggregate/overall.perf.ts +++ b/test/aggregate/overall.perf.ts @@ -5,16 +5,11 @@ import { deepEqual } from '../shared/assert.js' import { fastPrng } from '../../src/utils/index.js' import { equal } from 'node:assert' import { SchemaType } from '../../src/schema/index.js' +import { testDb } from '../shared/index.js' -await test.skip('overall performance', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - +test.skip('overall performance', async (t) => { const types = ['IPA', 'Lager', 'Ale', 'Stout', 'Wit', 'Dunkel', 'Tripel'] - await db.setSchema({ + const db = await testDb(t, { types: { beer: { props: { @@ -43,32 +38,24 @@ await test.skip('overall performance', async (t) => { await db.drain() await perf(async () => { - await db.query('beer').sum('price').get() + await db.query2('beer').sum('price').get() }, 'main agg') await perf(async () => { - await db.query('beer').groupBy('year').get() + await db.query2('beer').groupBy('year').get() }, 'group by year') await perf(async () => { - await db.query('beer').groupBy('type').get() + await db.query2('beer').groupBy('type').get() }, 'group by enum main') await perf(async () => { - await db.query('beer').max('price').groupBy('type').get() + await db.query2('beer').max('price').groupBy('type').get() }, 'agg + enum main group by') }) await test.skip('count top level bignumber', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { sequence: { bla: 'uint32', @@ -82,17 +69,11 @@ await test.skip('count top level bignumber', async (t) => { await db.drain() - const q = await db.query('sequence').count().get() - equal(q.toObject().count, 1e6) + const q = await db.query2('sequence').count().get() + equal(q.count, 1e6) }) await test('many countries', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - const countrySchema: SchemaType = { props: { AF: 'uint8', @@ -291,13 +272,13 @@ await test('many countries', async (t) => { }, } - await db.setSchema({ + const db = await testDb(t, { types: { audience: countrySchema, }, }) - const countries = Object.keys(countrySchema.props) + const countries = Object.keys(countrySchema.props) as [any, ...any[]] // for (let i = 0; i < 1e8; i++) { // db.create( @@ -310,7 +291,7 @@ await test('many countries', async (t) => { // } // await perf(async () => { // await db - // .query('audience') + // .query2('audience') // .avg(...countries) // .get() // }, 'averaging 193 props x 100_000_000 nodes') @@ -331,7 +312,7 @@ await test('many countries', async (t) => { } await perf(async () => { await db - .query('audience') + .query2('audience') .avg(...countries) .get() }, 'averaging 193 props x 10_000_000 nodes') @@ -352,7 +333,7 @@ await test('many countries', async (t) => { } await perf(async () => { await db - .query('audience') + .query2('audience') .avg(...countries) .get() }, 'averaging 193 props x 1_000_000 nodes') @@ -364,7 +345,7 @@ await test('many countries', async (t) => { await perf(async () => { await db - .query('audience') + .query2('audience') .avg(...countries) .get() }, 'averaging 193 props x 100_000 nodes') @@ -376,7 +357,7 @@ await test('many countries', async (t) => { await perf(async () => { await db - .query('audience') + .query2('audience') .avg(...countries) .get() }, 'averaging 193 props x 10_000 nodes') @@ -388,7 +369,7 @@ await test('many countries', async (t) => { await perf(async () => { await db - .query('audience') + .query2('audience') .avg(...countries) .get() }, 'averaging 193 props x 1_000 nodes') diff --git a/test/aggregate/temporal.ts b/test/aggregate/temporal.ts index 87892a3cac..6883ff9f9f 100644 --- a/test/aggregate/temporal.ts +++ b/test/aggregate/temporal.ts @@ -1,18 +1,10 @@ -import { equal } from 'node:assert' import { BasedDb } from '../../src/index.js' -import { allCountryCodes } from '../shared/examples.js' import test from '../shared/test.js' import { throws, deepEqual } from '../shared/assert.js' -import { fastPrng } from '../../src/utils/index.js' +import { testDb } from '../shared/index.js' await test('group by datetime intervals', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { trip: { pickup: 'timestamp', @@ -37,7 +29,7 @@ await test('group by datetime intervals', async (t) => { }) deepEqual( - await db.query('trip').sum('distance').groupBy('pickup', 'day').get(), + await db.query2('trip').sum('distance').groupBy('pickup', 'day').get(), { 11: { distance: { sum: 1026.88 }, @@ -47,7 +39,7 @@ await test('group by datetime intervals', async (t) => { ) deepEqual( await db - .query('trip') + .query2('trip') .sum('distance') .groupBy('pickup', { step: 'day' }) .get(), @@ -59,7 +51,7 @@ await test('group by datetime intervals', async (t) => { 'group timestamp by day, without shorthand', ) deepEqual( - await db.query('trip').sum('distance').groupBy('pickup', 'hour').get(), + await db.query2('trip').sum('distance').groupBy('pickup', 'hour').get(), { 11: { distance: { sum: 1026.88 }, @@ -68,7 +60,7 @@ await test('group by datetime intervals', async (t) => { 'group timestamp by hour', ) deepEqual( - await db.query('trip').sum('distance').groupBy('pickup', 'dow').get(), + await db.query2('trip').sum('distance').groupBy('pickup', 'dow').get(), { 3: { distance: { sum: 1026.88 }, @@ -77,7 +69,7 @@ await test('group by datetime intervals', async (t) => { 'group timestamp by day of week', ) deepEqual( - await db.query('trip').sum('distance').groupBy('pickup', 'isoDOW').get(), + await db.query2('trip').sum('distance').groupBy('pickup', 'isoDOW').get(), { 3: { distance: { sum: 1026.88 }, @@ -86,7 +78,7 @@ await test('group by datetime intervals', async (t) => { 'group timestamp by hour', ) deepEqual( - await db.query('trip').sum('distance').groupBy('pickup', 'doy').get(), + await db.query2('trip').sum('distance').groupBy('pickup', 'doy').get(), { 345: { distance: { sum: 1026.88 }, @@ -95,7 +87,7 @@ await test('group by datetime intervals', async (t) => { 'group timestamp by hour', ) deepEqual( - await db.query('trip').sum('distance').groupBy('pickup', 'month').get(), + await db.query2('trip').sum('distance').groupBy('pickup', 'month').get(), { 11: { distance: { sum: 1026.88 }, @@ -104,7 +96,7 @@ await test('group by datetime intervals', async (t) => { 'group timestamp by month[0-11]', ) deepEqual( - await db.query('trip').sum('distance').groupBy('pickup', 'year').get(), + await db.query2('trip').sum('distance').groupBy('pickup', 'year').get(), { 2024: { distance: { sum: 1026.88 }, @@ -115,13 +107,7 @@ await test('group by datetime intervals', async (t) => { }) await test('group by datetime ranges', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { trip: { pickup: 'timestamp', @@ -154,11 +140,10 @@ await test('group by datetime ranges', async (t) => { let interval = 40 * 60 // 40 minutes let r = await db - .query('trip') + .query2('trip') .sum('distance') .groupBy('pickup', interval) .get() - .toObject() let epoch = Number(Object.keys(r)[0]) let startDate = dtFormat.format(epoch) @@ -190,11 +175,10 @@ await test('group by datetime ranges', async (t) => { let interval2 = 60 * 60 * 24 * 12 + 2 * 60 * 60 // 12 days and 2h let r2 = await db - .query('trip') + .query2('trip') .sum('distance') .groupBy('pickup', interval2) .get() - .toObject() let epoch2 = Number(Object.keys(r2)[0]) let startDate2 = dtFormat.format(epoch2) @@ -209,29 +193,23 @@ await test('group by datetime ranges', async (t) => { 'another range interval as index', ) + // validation habling not implemented yet // ranges are limited to u32 max value seconds => (group by ~136 years intervals) - await throws( - async () => { - await db - .query('trip') - .sum('distance') - .groupBy('pickup', 2 ** 32 + 1) - .get() - .inspect() - }, - false, - `throw invalid step range error on validation`, - ) + // await throws( + // async () => { + // await db + // .query2('trip') + // .sum('distance') + // .groupBy('pickup', 2 ** 32 + 1) + // .get() + // }, + // false, + // `throw invalid step range error on validation`, + // ) }) await test('cardinality with dates', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { lunch: { day: 'timestamp', @@ -279,17 +257,16 @@ await test('cardinality with dates', async (t) => { ], }) - const total = await db.query('lunch').cardinality('eaters').get().toObject() + const total = await db.query2('lunch').cardinality('eaters').get() // console.log('Total Eaters: ', total.eaters) deepEqual(total.eaters.cardinality, 11, 'Total Eaters') const groupByDay = await db - .query('lunch') + .query2('lunch') .cardinality('eaters') .groupBy('day') .get() - .toObject() const meals = Object.entries(groupByDay) //@ts-ignore .map((m) => m[1].eaters.cardinality) @@ -314,11 +291,10 @@ await test('cardinality with dates', async (t) => { } const groupByMonth = await db - .query('lunch') + .query2('lunch') .cardinality('eaters') .groupBy('day', 'month') .get() - .toObject() const eatersByMonth = Object.entries(groupByMonth).map((e) => { //@ts-ignore @@ -327,19 +303,14 @@ await test('cardinality with dates', async (t) => { // console.log('Total Eaters by Month: ', eatersByMonth) deepEqual( eatersByMonth, + //@ts-ignore [{ Jun: { cardinality: 5 } }, { Jul: { cardinality: 11 } }], 'Total Eaters by Month', ) }) await test('formating timestamp', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { trip: { pickup: 'timestamp', @@ -371,7 +342,7 @@ await test('formating timestamp', async (t) => { }) deepEqual( - await db.query('trip').sum('distance').groupBy('pickup').get(), + await db.query2('trip').sum('distance').groupBy('pickup').get(), { 1733916600000: { distance: { sum: 513.44 }, @@ -385,7 +356,7 @@ await test('formating timestamp', async (t) => { deepEqual( await db - .query('trip') + .query2('trip') .sum('distance') .groupBy('pickup', { step: 40 * 60, display: dtFormat }) .get(), @@ -399,7 +370,7 @@ await test('formating timestamp', async (t) => { deepEqual( await db - .query('trip') + .query2('trip') .sum('distance') .groupBy('pickup', { display: dtFormat }) .get(), @@ -412,13 +383,7 @@ await test('formating timestamp', async (t) => { }) await test('timezone offsets', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { trip: { pickup: 'timestamp', @@ -451,7 +416,7 @@ await test('timezone offsets', async (t) => { deepEqual( await db - .query('trip') + .query2('trip') .sum('distance') .groupBy('pickup', { step: 'day', timeZone: 'America/Sao_Paulo' }) .get(), @@ -468,7 +433,7 @@ await test('timezone offsets', async (t) => { ) deepEqual( await db - .query('trip') + .query2('trip') .sum('distance') .groupBy('pickup', { step: 'hour', timeZone: 'America/Sao_Paulo' }) .get(), @@ -484,7 +449,7 @@ await test('timezone offsets', async (t) => { ) deepEqual( await db - .query('trip') + .query2('trip') .sum('distance') .groupBy('dropoff', { step: 'month', timeZone: 'America/Sao_Paulo' }) .get(), diff --git a/test/aggregate/validation.ts b/test/aggregate/validation.ts index 42ee757b74..fe8b873938 100644 --- a/test/aggregate/validation.ts +++ b/test/aggregate/validation.ts @@ -1,20 +1,10 @@ -import { equal } from 'node:assert' import { BasedDb } from '../../src/index.js' -import { allCountryCodes } from '../shared/examples.js' import test from '../shared/test.js' -import { throws, deepEqual } from '../shared/assert.js' -import { fastPrng } from '../../src/utils/index.js' +import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' await test('undefined numbers', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { vote: { props: { @@ -36,7 +26,7 @@ await test('undefined numbers', async (t) => { }) deepEqual( - await db.query('vote').max('AU', 'FI').groupBy('region').get().toObject(), + await db.query2('vote').max('AU', 'FI').groupBy('region').get(), { EU: { AU: { max: 23 }, @@ -46,7 +36,7 @@ await test('undefined numbers', async (t) => { 'number is initialized with zero', ) deepEqual( - await db.query('vote').avg('AU', 'FI').groupBy('region').get().toObject(), + await db.query2('vote').avg('AU', 'FI').groupBy('region').get(), { EU: { AU: { avg: 16.5 }, @@ -57,30 +47,19 @@ await test('undefined numbers', async (t) => { ) deepEqual( - await db - .query('vote') - .harmonicMean('AU', 'FI') - .groupBy('region') - .get() - .toObject(), + await db.query2('vote').hmean('AU', 'FI').groupBy('region').get(), { EU: { AU: { hmean: 13.93939393939394 }, FI: { hmean: 0 }, }, }, - 'harmonic_mean affected by count because number is initialized with zero', + 'hmean affected by count because number is initialized with zero', ) }) await test('boundary cases for validation', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { movie: { name: 'string', @@ -139,7 +118,7 @@ await test('boundary cases for validation', async (t) => { }) deepEqual( - await db.query('movie').groupBy('year').count().get(), + await db.query2('movie').groupBy('year').count().get(), { 1994: { count: 1, @@ -152,7 +131,7 @@ await test('boundary cases for validation', async (t) => { ) deepEqual( - await db.query('movie').groupBy('genre').min('year').get(), + await db.query2('movie').groupBy('genre').min('year').get(), { undefined: { year: { min: 1994 }, diff --git a/test/alias/alias.ts b/test/alias/alias.ts index 46e7505455..ea96e1760b 100644 --- a/test/alias/alias.ts +++ b/test/alias/alias.ts @@ -1,17 +1,11 @@ import { notEqual } from 'assert' -import { BasedDb } from '../../src/index.js' import { deepEqual } from '../shared/assert.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' +import { checksum } from '../../src/db-client/query2/index.js' await test('simple', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -31,7 +25,7 @@ await test('simple', async (t) => { }) deepEqual( - await db.query('user', await user1).get(), + await db.query2('user', await user1).get(), { id: 1, externalId: 'cool', @@ -40,27 +34,22 @@ await test('simple', async (t) => { 'One alias', ) - deepEqual((await db.query('user', user2).get()).toObject(), { + deepEqual(await db.query2('user', await user2).get(), { id: 2, externalId: 'cool2', potato: '', }) - deepEqual( - (await db.query('user').filter('externalId', '=', 'cool').get()).toObject(), - [ - { - id: 1, - externalId: 'cool', - potato: '', - }, - ], - ) + deepEqual(await db.query2('user').filter('externalId', '=', 'cool').get(), [ + { + id: 1, + externalId: 'cool', + potato: '', + }, + ]) deepEqual( - ( - await db.query('user').filter('externalId', 'includes', 'cool').get() - ).toObject(), + await db.query2('user').filter('externalId', 'includes', 'cool').get(), [ { id: 1, @@ -74,45 +63,52 @@ await test('simple', async (t) => { }, ], ) - const res1 = await db.upsert('user', { - externalId: 'potato', - potato: 'success', - }) + const res1 = await db.upsert( + 'user', + { + externalId: 'potato', + }, + { + potato: 'success', + }, + ) - deepEqual((await db.query('user', res1).get()).toObject(), { + deepEqual(await db.query2('user', res1).get(), { id: 3, externalId: 'potato', potato: 'success', }) - const res2 = await db.upsert('user', { - externalId: 'potato', - potato: 'wrong', - }) - deepEqual((await db.query('user', res2).get()).toObject(), { + + const res2 = await db.upsert( + 'user', + { + externalId: 'potato', + }, + { + potato: 'wrong', + }, + ) + + deepEqual(await db.query2('user', res2).get(), { id: 3, externalId: 'potato', potato: 'wrong', }) + deepEqual( - ( - await db.query('user', { externalId: 'i-dont-exists-haha!' }).get() - ).toObject(), + await db.query2('user', { externalId: 'i-dont-exists-haha!' }).get(), null, 'Get non existing alias', ) - deepEqual( - (await db.query('user', 123).get()).toObject(), - null, - 'Get non existing id', - ) + deepEqual(await db.query2('user', 123).get(), null, 'Get non existing id') await db.create('user', { potato: 'power', externalId: 'cool', }) - deepEqual(await db.query('user').get().toObject(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, externalId: '', potato: '' }, { id: 2, externalId: 'cool2', potato: '' }, { id: 3, externalId: 'potato', potato: 'wrong' }, @@ -121,14 +117,7 @@ await test('simple', async (t) => { }) await test('alias - references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -149,17 +138,22 @@ await test('alias - references', async (t) => { }, }) - await db.upsert('user', { - name: '2', - email: '2@saulx.com', - bestFriend: db.upsert('user', { email: 'jim@saulx.com' }), - friends: { - add: [db.upsert('user', { email: 'jim@saulx.com' })], + await db.upsert( + 'user', + { + email: '2@saulx.com', }, - }) + { + name: '2', + bestFriend: db.upsert('user', { email: 'jim@saulx.com' }, {}), + friends: { + add: [db.upsert('user', { email: 'jim@saulx.com' }, {})], + }, + }, + ) deepEqual( - await db.query('user').include('email', 'friends').get().toObject(), + await db.query2('user').include('email', 'friends').get(), [ { id: 1, @@ -175,17 +169,26 @@ await test('alias - references', async (t) => { 'simple', ) - await db.upsert('user', { - name: '2', - email: '2@saulx.com', - bestFriend: db.upsert('user', { email: 'jim@saulx.com', name: 'jim' }), - friends: { - add: [db.upsert('user', { email: 'jim@saulx.com', name: 'jim' })], + await db.upsert( + 'user', + { + email: '2@saulx.com', }, - }) + { + name: '2', + bestFriend: db.upsert( + 'user', + { email: 'jim@saulx.com' }, + { name: 'jim' }, + ), + friends: { + add: [db.upsert('user', { email: 'jim@saulx.com' }, { name: 'jim' })], + }, + }, + ) deepEqual( - await db.query('user').include('friends', 'email').get().toObject(), + await db.query2('user').include('friends', 'email').get(), [ { id: 1, @@ -203,24 +206,16 @@ await test('alias - references', async (t) => { deepEqual( await db - .query('user') + .query2('user') .filter('email', 'includes', '2', { lowerCase: true }) - .get() - .toObject(), + .get(), [{ id: 2, name: '2', email: '2@saulx.com' }], 'update 2', ) }) await test('Get single node by alias', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -231,18 +226,22 @@ await test('Get single node by alias', async (t) => { }, }) - await db.upsert('user', { - name: '2', - email: '2@saulx.com', - }) + await db.upsert( + 'user', + { + email: '2@saulx.com', + }, + { + name: '2', + }, + ) deepEqual( await db - .query('user', { + .query2('user', { email: '2@saulx.com', }) - .get() - .toObject(), + .get(), { id: 1, name: '2', @@ -252,14 +251,7 @@ await test('Get single node by alias', async (t) => { }) await test('Update existing alias field', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -273,25 +265,30 @@ await test('Update existing alias field', async (t) => { }) const email = 'nuno@saulx.com' - await db.upsert('user', { - name: 'nuno', - email, - currentToken: - // INFO: Works if this field is undefined or an empty string - 'aff1ffc48253ffe063005ecce308996da1ab01c864276faaa88bd94fab4a092d604bbd916470ff1def223bc9e8b662b7', - }) + await db.upsert( + 'user', + { + email, + currentToken: + // INFO: Works if this field is undefined or an empty string + 'aff1ffc48253ffe063005ecce308996da1ab01c864276faaa88bd94fab4a092d604bbd916470ff1def223bc9e8b662b7', + }, + { + name: 'nuno', + }, + ) - const existingUser = await db.query('user', { email }).get().toObject() + const existingUser = await db.query2('user', { email }).get() let newToken = 'e2d88cf5d303972f2eb0c381e093afb8728eaebc8114a322418403eeaf30eb767d3d7dfaef784e9c2059d6cfa78cea87' - await db.update('user', existingUser.id, { + await db.update('user', existingUser!.id, { currentToken: newToken, status: 'login', }) await db.drain() - deepEqual(await db.query('user', { email }).get(), { + deepEqual(await db.query2('user', { email }).get(), { id: 1, name: 'nuno', email: 'nuno@saulx.com', @@ -301,7 +298,7 @@ await test('Update existing alias field', async (t) => { newToken = '6093127416cbc7ff8126cda605a2239a2e061a5c65a77cc38b23034441832d2c40afdaa91f83285c52edccc5dd8d18d5' - await db.update('user', existingUser.id, { + await db.update('user', existingUser!.id, { currentToken: newToken, status: 'login', }) @@ -309,11 +306,10 @@ await test('Update existing alias field', async (t) => { deepEqual( await db - .query('user', { + .query2('user', { email, }) - .get() - .toObject(), + .get(), { id: 1, name: 'nuno', @@ -323,7 +319,7 @@ await test('Update existing alias field', async (t) => { }, ) - await db.update('user', existingUser.id, { + await db.update('user', existingUser!.id, { currentToken: null, status: 'clear', }) @@ -332,11 +328,10 @@ await test('Update existing alias field', async (t) => { deepEqual( await db - .query('user', { + .query2('user', { email, }) - .get() - .toObject(), + .get(), { id: 1, name: 'nuno', @@ -348,7 +343,7 @@ await test('Update existing alias field', async (t) => { newToken = '1e6d1b9baf291d0d3f581ca147eda5a62feba5f2e84039322d9b8e0999e5d9a8c9feae5c7707d63be670615675ad2381' - await db.update('user', existingUser.id, { + await db.update('user', existingUser!.id, { currentToken: newToken, status: 'login', }) @@ -356,11 +351,10 @@ await test('Update existing alias field', async (t) => { deepEqual( await db - .query('user', { + .query2('user', { email, }) - .get() - .toObject(), + .get(), { id: 1, name: 'nuno', @@ -372,14 +366,7 @@ await test('Update existing alias field', async (t) => { }) await test('same-name-alias', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => db.destroy()) - - await db.setSchema({ + const db = await testDb(t, { types: { sequence: { props: { @@ -403,18 +390,18 @@ await test('same-name-alias', async (t) => { const rounds = [{ name: 'semi1' }, { name: 'semi2' }, { name: 'final' }] for (const sequence of sequences) { - db.upsert('sequence', sequence) + db.upsert('sequence', sequence, {}) } await db.drain() for (const round of rounds) { - await db.upsert('round', round) + await db.upsert('round', round, {}) } await db.drain() - deepEqual(await db.query('round').get().toObject(), [ + deepEqual(await db.query2('round').get(), [ { id: 1, name: 'semi1' }, { id: 2, name: 'semi2' }, { id: 3, name: 'final' }, @@ -422,15 +409,7 @@ await test('same-name-alias', async (t) => { }) await test('nested alias', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => db.destroy()) - - await db.setSchema({ + const db = await testDb(t, { types: { thing: { obj: { @@ -443,32 +422,34 @@ await test('nested alias', async (t) => { }, }) - await db.upsert('thing', { - obj: { - a: 'jibber', + await db.upsert( + 'thing', + { + obj: { + a: 'jibber', + }, }, - }) + {}, + ) - await db.upsert('thing', { - obj: { - b: 'flurp', + await db.upsert( + 'thing', + { + obj: { + b: 'flurp', + }, }, - }) + {}, + ) - deepEqual(await db.query('thing').get().toObject(), [ + deepEqual(await db.query2('thing').get(), [ { id: 1, obj: { a: 'jibber', b: '' } }, { id: 2, obj: { b: 'flurp', a: '' } }, ]) }) await test('json and crc32', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { article: { @@ -482,26 +463,19 @@ await test('json and crc32', async (t) => { article: 'a', }) - const checksum = (await db.query('user', user1).get()).checksum + const checksum1 = checksum(await db.query2('user', user1).get()) await db.update('user', user1, { article: 'b', }) - const checksum2 = (await db.query('user', user1).get()).checksum + const checksum2 = checksum(await db.query2('user', user1).get()) - notEqual(checksum, checksum2, 'Checksum is not the same') + notEqual(checksum1, checksum2, 'Checksum is not the same') }) await test('Get single node by alias', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -512,210 +486,30 @@ await test('Get single node by alias', async (t) => { }, }) - await db.upsert('user', { - name: '2', - email: '2@saulx.com', - }) - - deepEqual( - await db - .query('user', { - email: '2@saulx.com', - }) - .get() - .toObject(), + await db.upsert( + 'user', { - id: 1, - name: '2', email: '2@saulx.com', }, + { name: '2' }, ) -}) - -await test('Update existing alias field', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ - types: { - user: { - props: { - name: 'string', - email: 'alias', - status: ['login', 'clear'], - currentToken: 'alias', - }, - }, - }, - }) - - const email = 'nuno@saulx.com' - await db.upsert('user', { - name: 'nuno', - email, - currentToken: - // INFO: Works if this field is undefined or an empty string - 'aff1ffc48253ffe063005ecce308996da1ab01c864276faaa88bd94fab4a092d604bbd916470ff1def223bc9e8b662b7', - }) - - const existingUser = await db.query('user', { email }).get().toObject() - - let newToken = - 'e2d88cf5d303972f2eb0c381e093afb8728eaebc8114a322418403eeaf30eb767d3d7dfaef784e9c2059d6cfa78cea87' - await db.update('user', existingUser.id, { - currentToken: newToken, - status: 'login', - }) - await db.drain() - - deepEqual( - await db - .query('user', { - email, - }) - .get() - .toObject(), - { - id: 1, - name: 'nuno', - email: 'nuno@saulx.com', - status: 'login', - currentToken: newToken, - }, - ) - - newToken = - '6093127416cbc7ff8126cda605a2239a2e061a5c65a77cc38b23034441832d2c40afdaa91f83285c52edccc5dd8d18d5' - await db.update('user', existingUser.id, { - currentToken: newToken, - status: 'login', - }) - await db.drain() - - deepEqual( - await db - .query('user', { - email, - }) - .get() - .toObject(), - { - id: 1, - name: 'nuno', - email: 'nuno@saulx.com', - status: 'login', - currentToken: newToken, - }, - ) - - await db.update('user', existingUser.id, { - currentToken: null, - status: 'clear', - }) - - await db.drain() - - deepEqual( - await db - .query('user', { - email, - }) - .get() - .toObject(), - { - id: 1, - name: 'nuno', - email: 'nuno@saulx.com', - status: 'clear', - currentToken: '', - }, - ) - - newToken = - '1e6d1b9baf291d0d3f581ca147eda5a62feba5f2e84039322d9b8e0999e5d9a8c9feae5c7707d63be670615675ad2381' - await db.update('user', existingUser.id, { - currentToken: newToken, - status: 'login', - }) - await db.drain() deepEqual( await db - .query('user', { - email, + .query2('user', { + email: '2@saulx.com', }) - .get() - .toObject(), + .get(), { id: 1, - name: 'nuno', - email: 'nuno@saulx.com', - status: 'login', - currentToken: newToken, + name: '2', + email: '2@saulx.com', }, ) }) -await test('same-name-alias', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => db.destroy()) - - await db.setSchema({ - types: { - sequence: { - props: { - name: 'alias', - }, - }, - round: { - props: { - name: 'alias', - }, - }, - }, - }) - - const sequences = [ - { name: 'semi1' }, - { name: 'semi1-othershit' }, - { name: 'semi2' }, - { name: 'semi2-othershit' }, - ] - const rounds = [{ name: 'semi1' }, { name: 'semi2' }, { name: 'final' }] - - for (const sequence of sequences) { - db.upsert('sequence', sequence) - } - for (const round of rounds) { - await db.upsert('round', round) - } - - await db.drain() - - deepEqual(await db.query('round').get().toObject(), [ - { id: 1, name: 'semi1' }, - { id: 2, name: 'semi2' }, - { id: 3, name: 'final' }, - ]) -}) - await test('alias and ref', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => db.destroy()) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -743,12 +537,12 @@ await test('alias and ref', async (t) => { const user1 = await db.create('user', { name: 'Mario' }) //await db.update('user', user, { role: { alias: 'admin' }}) - await db.upsert('role', { alias: 'admin', users: { add: [user1] } }) + await db.upsert('role', { alias: 'admin' }, { users: { add: [user1] } }) const user2 = await db.create('user', { name: 'Luigi' }) - await db.upsert('role', { alias: 'admin', users: { add: [user2] } }) + await db.upsert('role', { alias: 'admin' }, { users: { add: [user2] } }) - deepEqual(await db.query('role', adminRole).include('name', 'users').get(), { + deepEqual(await db.query2('role', adminRole).include('name', 'users').get(), { id: 1, name: 'Admin Role', users: [ @@ -765,14 +559,7 @@ await test('alias and ref', async (t) => { }) await test('alias and edge ref', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => db.destroy()) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -814,18 +601,17 @@ await test('alias and edge ref', async (t) => { const user2 = await db.create('user', { name: 'Luigi' }) const adminRole = await db - .query('role', { alias: 'admin' }) + .query2('role', { alias: 'admin' }) .include('id') .get() - .toObject() await db.update('project', prj, { - users: { add: [{ id: user1, $role: adminRole }] }, + users: { add: [{ id: user1, $role: adminRole!.id }] }, }) deepEqual( await db - .query('project', prj) + .query2('project', prj) .include('name', 'users', 'users.$role') .get(), { diff --git a/test/alias/aliasOps.perf.ts b/test/alias/aliasOps.perf.ts index 11e6c1ea83..4f9be4f9b4 100644 --- a/test/alias/aliasOps.perf.ts +++ b/test/alias/aliasOps.perf.ts @@ -1,17 +1,10 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' -import { deepEqual, equal, perf } from '../shared/assert.js' +import { perf } from '../shared/assert.js' +import { testDb } from '../shared/index.js' await test('await updates', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - const status = ['a', 'b', 'c', 'd', 'e', 'f'] - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { diff --git a/test/alias/aliasOps.ts b/test/alias/aliasOps.ts index 458b4d4760..5d09cda1a3 100644 --- a/test/alias/aliasOps.ts +++ b/test/alias/aliasOps.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' -import { deepEqual, equal } from '../shared/assert.js' +import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' await test('upsert', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -25,7 +19,7 @@ await test('upsert', async (t) => { status: 'a', }) - deepEqual(await db.query('user', user1).get(), { + deepEqual(await db.query2('user', user1).get(), { id: 1, status: 'a', externalId: 'cool', @@ -36,7 +30,7 @@ await test('upsert', async (t) => { status: 'b', }) - deepEqual(await db.query('user', user1).get(), { + deepEqual(await db.query2('user', user1).get(), { id: 1, status: 'b', externalId: '', diff --git a/test/alias/create.perf.ts b/test/alias/create.perf.ts index e32e6f674c..b3b3ff4553 100644 --- a/test/alias/create.perf.ts +++ b/test/alias/create.perf.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' import { perf } from '../shared/assert.js' +import { testDb } from '../shared/index.js' await test('create 1m items with an alias', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { test: { alias: 'alias', diff --git a/test/alias/doubleAlias.ts b/test/alias/doubleAlias.ts index fa4fe72746..84f35974ff 100644 --- a/test/alias/doubleAlias.ts +++ b/test/alias/doubleAlias.ts @@ -1,16 +1,9 @@ -import { BasedDb } from '../../src/index.js' import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' await test('aliasDouble', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { plot: { props: { @@ -37,7 +30,7 @@ await test('aliasDouble', async (t) => { deepEqual( await db - .query('plot', { + .query2('plot', { slug: 'test-plot-2', }) .get(), @@ -46,7 +39,7 @@ await test('aliasDouble', async (t) => { deepEqual( await db - .query('plot', { + .query2('plot', { uuid: 'flap2', }) .get(), diff --git a/test/alias/filter.ts b/test/alias/filter.ts index 68b7439294..89adecf0f6 100644 --- a/test/alias/filter.ts +++ b/test/alias/filter.ts @@ -1,16 +1,9 @@ -import { BasedDb } from '../../src/index.js' import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' await test('aliasFilter', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { plot: { props: { @@ -31,10 +24,12 @@ await test('aliasFilter', async (t) => { age: 20, }) - const a = await db.query('plot', { slug: 'kavel-omval-naast-de-poort' }).get() + const a = await db + .query2('plot', { slug: 'kavel-omval-naast-de-poort' }) + .get() const b = await db - .query('plot', { slug: 'kavel-omval-naast-de-poort' }) + .query2('plot', { slug: 'kavel-omval-naast-de-poort' }) .filter('age', '>', 10) .get() diff --git a/test/alias/insert.ts b/test/alias/insert.ts index d7205a1494..bd86eb566a 100644 --- a/test/alias/insert.ts +++ b/test/alias/insert.ts @@ -1,17 +1,9 @@ -import { BasedDb } from '../../src/index.js' import { equal } from '../shared/assert.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' await test('alias insert', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -23,13 +15,18 @@ await test('alias insert', async (t) => { }, }) - await db.insert('user', { - uuid: 'xx', - one: 1, - two: 2, - }) + await db.insert( + 'user', + { + uuid: 'xx', + }, + { + one: 1, + two: 2, + }, + ) - equal(await db.query('user').get(), [ + equal(await db.query2('user').get(), [ { id: 1, uuid: 'xx', @@ -38,13 +35,18 @@ await test('alias insert', async (t) => { }, ]) - await db.insert('user', { - uuid: 'xx', - one: 5, - two: 6, - }) + await db.insert( + 'user', + { + uuid: 'xx', + }, + { + one: 5, + two: 6, + }, + ) - equal(await db.query('user').get(), [ + equal(await db.query2('user').get(), [ { id: 1, uuid: 'xx', @@ -53,13 +55,18 @@ await test('alias insert', async (t) => { }, ]) - await db.insert('user', { - uuid: 'yy', - one: 5, - two: 6, - }) + await db.insert( + 'user', + { + uuid: 'yy', + }, + { + one: 5, + two: 6, + }, + ) - equal(await db.query('user').get(), [ + equal(await db.query2('user').get(), [ { id: 1, uuid: 'xx', diff --git a/test/alias/upsert.ts b/test/alias/upsert.ts index 65446b6774..f2c512b6d1 100644 --- a/test/alias/upsert.ts +++ b/test/alias/upsert.ts @@ -1,16 +1,11 @@ +import { DbServer } from '../../dist/index.js' import { BasedDb, DbClient, getDefaultHooks } from '../../src/index.js' import { equal } from '../shared/assert.js' import test from '../shared/test.js' await test('alias upsert', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const server = new DbServer({ path: t.tmp }) + const schema = { types: { user: { props: { @@ -20,37 +15,50 @@ await test('alias upsert', async (t) => { }, }, }, + } as const + const client1 = new DbClient({ + hooks: getDefaultHooks(server), }) - - const client1 = db.client - const client2 = new DbClient({ - hooks: getDefaultHooks(db.server), + const client2 = new DbClient({ + hooks: getDefaultHooks(server), }) + await client1.setSchema(schema) + const ids = await Promise.all([ client1.create('user', { uuid: 'a', }), - client1.upsert('user', { - uuid: 'x', - one: 1, - }), + client1.upsert( + 'user', + { + uuid: 'x', + }, + { + one: 1, + }, + ), client1.create('user', { uuid: 'b', }), client2.create('user', { uuid: 'c', }), - client2.upsert('user', { - uuid: 'x', - two: 2, - }), + client2.upsert( + 'user', + { + uuid: 'x', + }, + { + two: 2, + }, + ), client2.create('user', { uuid: 'd', }), ]) - const results = await db.query('user').get() + const results = await client1.query2('user').get() equal( results, diff --git a/test/alignModify.ts b/test/alignModify.ts index c41c6dfaf6..c06db4d4d5 100644 --- a/test/alignModify.ts +++ b/test/alignModify.ts @@ -8,7 +8,7 @@ await test('alignModify - putrefs', async (t) => { }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) const flushModify = db.client.hooks.flushModify @@ -42,7 +42,7 @@ await test('alignModify - putrefs', async (t) => { }) } await db.drain() - const res = await db.query('user').include('friends', 'str').get().toObject() + const res = await db.query2('user').include('friends', 'str').get() deepEqual(res, [ { id: 1, diff --git a/test/based-client/addSpecs.ts b/test/based-client/addSpecs.ts index 77c5b3067a..a0d308d7f9 100644 --- a/test/based-client/addSpecs.ts +++ b/test/based-client/addSpecs.ts @@ -54,7 +54,7 @@ test('addSpecs', async (t: T) => { let errCnt = 0 // let msgCnt = 0 - client.query('cookie').subscribe( + client.query2('cookie').subscribe( () => {}, () => { errCnt++ diff --git a/test/based-client/authorize.ts b/test/based-client/authorize.ts index a5cb5aaee0..6d01550209 100644 --- a/test/based-client/authorize.ts +++ b/test/based-client/authorize.ts @@ -136,7 +136,7 @@ test('authorize observe', async (t: T) => { await new Promise((resolve) => { client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe( @@ -153,7 +153,7 @@ test('authorize observe', async (t: T) => { await new Promise((resolve) => { client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe( @@ -194,7 +194,7 @@ test('authorize after observe', async (t: T) => { let receiveCnt = 0 client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe( @@ -239,7 +239,7 @@ test('authorize from server after observe', async (t: T) => { let receiveCnt = 0 client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe( diff --git a/test/based-client/authorizeOnSpec.ts b/test/based-client/authorizeOnSpec.ts index 24b73c026a..6fcf66eae4 100644 --- a/test/based-client/authorizeOnSpec.ts +++ b/test/based-client/authorizeOnSpec.ts @@ -91,7 +91,7 @@ test('Specific authorize on spec', async (t: T) => { t.is(authCalled, 1) - await client.query('slax').get() + await client.query2('slax').get() t.is(authCalled, 2) client.channel('klax').subscribe(() => {}) diff --git a/test/based-client/browser/index.ts b/test/based-client/browser/index.ts index 7faea2361b..f15ffe1be0 100644 --- a/test/based-client/browser/index.ts +++ b/test/based-client/browser/index.ts @@ -75,7 +75,7 @@ export const app = () => { client.on('connect', (v) => { log.innerHTML += `

CONNECT: true
` }) - client.query('counter').subscribe( + client.query2('counter').subscribe( (d) => { log.innerHTML = `cnt: ${d}` }, @@ -86,7 +86,7 @@ export const app = () => { const text = document.createElement('pre') body.appendChild(text) - client.query('text').subscribe( + client.query2('text').subscribe( (d) => { text.innerHTML = d.join('\n') }, diff --git a/test/based-client/dbQuery.ts b/test/based-client/dbQuery.ts index 20ff1fbd0a..573fcd6b10 100644 --- a/test/based-client/dbQuery.ts +++ b/test/based-client/dbQuery.ts @@ -29,13 +29,13 @@ test('db query', async (t: T) => { type: 'query', uninstallAfterIdleTime: 1e3, fn: (_, __, update) => { - return db.query('user').subscribe(update) + return db.query2('user').subscribe(update) }, }, getUser: { type: 'function', async fn() { - return db.query('user').get() + return db.query2('user').get() }, }, }, @@ -70,8 +70,8 @@ test('db query', async (t: T) => { const res = await client.call('getUser') const resOld = await clientOld.call('getUser') - client.query('users').subscribe((res) => nextResolve?.(res)) - clientOld.query('users').subscribe((res) => nextResolveOld?.(res)) + client.query2('users').subscribe((res) => nextResolve?.(res)) + clientOld.query2('users').subscribe((res) => nextResolveOld?.(res)) const [resQuery, resQueryOld] = await Promise.all([ new Promise((r) => (nextResolve = r)), new Promise((r) => (nextResolveOld = r)), diff --git a/test/based-client/error.ts b/test/based-client/error.ts index 2d50ffe21f..29905f1262 100644 --- a/test/based-client/error.ts +++ b/test/based-client/error.ts @@ -137,7 +137,7 @@ test('observable authorize error', async (t: T) => { // TODO: Check error instance of const error = (await new Promise((resolve) => { - coreClient.query('counter', {}).subscribe( + coreClient.query2('counter', {}).subscribe( (v) => {}, (err) => { @@ -174,7 +174,7 @@ test('throw in an interval', async (t: T) => { }) await t.throwsAsync( new Promise((_, reject) => - coreClient.query('errorTimer', {}).subscribe(() => {}, reject), + coreClient.query2('errorTimer', {}).subscribe(() => {}, reject), ), ) }) diff --git a/test/based-client/get.ts b/test/based-client/get.ts index 36d49982d3..f8fe85bf3c 100644 --- a/test/based-client/get.ts +++ b/test/based-client/get.ts @@ -33,7 +33,7 @@ const setup = async (t: T) => { type: 'function', uninstallAfterIdleTime: 1e3, fn: async (based, payload) => { - const bla = await based.query('any', payload).get() + const bla = await based.query2('any', payload).get() return bla }, }, @@ -80,7 +80,7 @@ const setup = async (t: T) => { type: 'function', uninstallAfterIdleTime: 1e3, fn: async (based, payload) => { - return based.query('checkPayload', payload).get() + return based.query2('checkPayload', payload).get() }, }, }, @@ -105,14 +105,14 @@ test('get while subscribed', async (t: T) => { }) const res0 = await new Promise((resolve) => { - coreClient.query('any', 'xxx').subscribe((res) => { + coreClient.query2('any', 'xxx').subscribe((res) => { resolve(res) }) }) t.is(res0, 'xxx') - const res1 = await coreClient.query('any', 'xxx').get() + const res1 = await coreClient.query2('any', 'xxx').get() t.is(res1, res0) - const res2 = await coreClient.query('any', 'xxx').get() + const res2 = await coreClient.query2('any', 'xxx').get() t.is(res2, res1) }) @@ -130,25 +130,25 @@ test('get', async (t: T) => { }, }) - const str = await coreClient.query('any', 'xxx').get() + const str = await coreClient.query2('any', 'xxx').get() t.is(str, 'xxx') const nestedStr = await coreClient.call('nestedAny', 'xxx') t.is(nestedStr, 'xxx') - const num = await coreClient.query('any', 19).get() + const num = await coreClient.query2('any', 19).get() t.is(num, 19) const nestedNum = await coreClient.call('nestedAny', 19) t.is(nestedNum, 19) - const boolTrue = await coreClient.query('any', true).get() + const boolTrue = await coreClient.query2('any', true).get() t.is(boolTrue, true) const nestedBoolTrue = await coreClient.call('nestedAny', true) t.is(nestedBoolTrue, true) - const boolFalse = await coreClient.query('any', false).get() + const boolFalse = await coreClient.query2('any', false).get() t.is(boolFalse, false) const nestedBoolFalse = await coreClient.call('nestedAny', false) t.is(nestedBoolFalse, false) const power = await coreClient - .query('checkPayload', { + .query2('checkPayload', { power: { msg: 'powerfull stuff', }, @@ -169,24 +169,24 @@ test('get', async (t: T) => { await wait(1e3) - t.is(await coreClient.query('counter').get(), 0) + t.is(await coreClient.query2('counter').get(), 0) await wait(100) - t.is(await coreClient.query('counter').get(), 0) + t.is(await coreClient.query2('counter').get(), 0) await wait(1000) // stays zero because it has 0 cache time - t.is(await coreClient.query('counter').get(), 0) + t.is(await coreClient.query2('counter').get(), 0) await wait(1000) t.is(Object.keys(server.activeObservables).length, 0) t.is(server.activeObservablesById.size, 0) - t.is(await coreClient.query('counter-cached').get(), 0) - t.is(await coreClient.query('counter-cached').get(), 0) + t.is(await coreClient.query2('counter-cached').get(), 0) + t.is(await coreClient.query2('counter-cached').get(), 0) await wait(1500) @@ -219,14 +219,14 @@ test.only('authorize get', async (t: T) => { }) const error: BasedError = await t.throwsAsync( - coreClient.query('counter').get(), + coreClient.query2('counter').get(), ) t.is(error.code, BasedErrorCode.AuthorizeRejectedError) await coreClient.setAuthState({ token: 'mock_token' }) - await t.notThrowsAsync(coreClient.query('counter').get()) + await t.notThrowsAsync(coreClient.query2('counter').get()) }) test('getWhen', async (t: T) => { @@ -267,7 +267,7 @@ test('getWhen', async (t: T) => { }, }) - const g = await client.query('flap').getWhen((d) => d.status) + const g = await client.query2('flap').getWhen((d) => d.status) t.is(g.count, 2) }) diff --git a/test/based-client/hooks.ts b/test/based-client/hooks.ts index 62636cad7b..cd3587646d 100644 --- a/test/based-client/hooks.ts +++ b/test/based-client/hooks.ts @@ -109,14 +109,14 @@ test('Query hook', async (t: T) => { flap: { type: 'function', fn: (based) => { - return based.query('myobs').get() + return based.query2('myobs').get() }, }, myobs2: { type: 'query', closeAfterIdleTime: 500, fn: (based, _payload, update) => { - return based.query('myobs').subscribe(update) + return based.query2('myobs').subscribe(update) }, }, myobs: { @@ -135,7 +135,7 @@ test('Query hook', async (t: T) => { await client.connect({ url: async () => t.context.ws, }) - const close = client.query('myobs', { bla: true }).subscribe(() => {}) + const close = client.query2('myobs', { bla: true }).subscribe(() => {}) await wait(500) @@ -146,7 +146,7 @@ test('Query hook', async (t: T) => { t.is(unSubCnt, 1) - await client.query('myobs').get() + await client.query2('myobs').get() t.is(getCnt, 1) @@ -154,7 +154,7 @@ test('Query hook', async (t: T) => { t.is(getCnt, 2) - const close2 = client.query('myobs2', { bla: true }).subscribe(() => {}) + const close2 = client.query2('myobs2', { bla: true }).subscribe(() => {}) await wait(500) diff --git a/test/based-client/lazyConnect.ts b/test/based-client/lazyConnect.ts index 9cec86bf6e..3fbd9687fc 100644 --- a/test/based-client/lazyConnect.ts +++ b/test/based-client/lazyConnect.ts @@ -70,7 +70,7 @@ test('lazyConnect', async (t: T) => { await wait(2e3) // let msgCnt = 0 - const close = client.query('cookie').subscribe( + const close = client.query2('cookie').subscribe( () => {}, () => { errCnt++ diff --git a/test/based-client/messages.ts b/test/based-client/messages.ts index f6fa94ae23..9f2225db4a 100644 --- a/test/based-client/messages.ts +++ b/test/based-client/messages.ts @@ -59,7 +59,7 @@ test('message incoming/outgoing', async (t: T) => { }) let cnt = 0 - const close = client.query('counter').subscribe(() => { + const close = client.query2('counter').subscribe(() => { cnt++ }) diff --git a/test/based-client/nestedFunctions.ts b/test/based-client/nestedFunctions.ts index e1c7ce2b1a..83cf7bca78 100644 --- a/test/based-client/nestedFunctions.ts +++ b/test/based-client/nestedFunctions.ts @@ -30,7 +30,7 @@ const testShared = async ( let cnt = 0 - const closeX = coreClient.query('counter').subscribe(() => { + const closeX = coreClient.query2('counter').subscribe(() => { cnt++ }) @@ -42,18 +42,18 @@ const testShared = async ( let incomingCntNoJson = 0 - const close = coreClient.query('obsWithNested').subscribe(() => { + const close = coreClient.query2('obsWithNested').subscribe(() => { incomingCntNoJson++ }) let incomingCnt = 0 - const close2 = coreClient.query('obsWithNested', 'json').subscribe(() => { + const close2 = coreClient.query2('obsWithNested', 'json').subscribe(() => { incomingCnt++ }) await wait(1e3) - const bla = await coreClient.query('obsWithNested', 'json').get() + const bla = await coreClient.query2('obsWithNested', 'json').get() t.is(bla.bla.length, 1e4) @@ -64,12 +64,12 @@ const testShared = async ( close2() const close3 = coreClient - .query('obsWithNestedLvl2', 'glurk') + .query2('obsWithNestedLvl2', 'glurk') .subscribe(() => { incomingCnt2++ }) - const bla2 = await coreClient.query('obsWithNestedLvl2', 'glakkel').get() + const bla2 = await coreClient.query2('obsWithNestedLvl2', 'glakkel').get() t.is(bla2.bla.length, 1e4) @@ -106,7 +106,7 @@ test.serial('nested functions (raw api)', async (t: T) => { closeAfterIdleTime: 1e3, uninstallAfterIdleTime: 1e3, fn: (based, _, update) => { - return based.query('obsWithNested', 'json').subscribe(update) + return based.query2('obsWithNested', 'json').subscribe(update) }, }, obsWithNested: { @@ -115,7 +115,7 @@ test.serial('nested functions (raw api)', async (t: T) => { uninstallAfterIdleTime: 1e3, fn: async (based, payload, update) => { return based - .query(payload === 'json' ? 'objectCounter' : 'counter', payload) + .query2(payload === 'json' ? 'objectCounter' : 'counter', payload) .subscribe(update) }, }, @@ -164,7 +164,7 @@ test.serial('nested functions (raw api)', async (t: T) => { uninstallAfterIdleTime: 1e3, fn: async (based, payload, context) => { const x = await based.call('hello', payload, context) - await based.query('obsWithNested', 'json', context).get() + await based.query2('obsWithNested', 'json', context).get() return x }, }, @@ -199,7 +199,7 @@ test.serial('nested functions (fancy api)', async (t: T) => { uninstallAfterIdleTime: 1e3, type: 'query', fn: (based, _, update) => { - return based.query('obsWithNested', 'json').subscribe(update) + return based.query2('obsWithNested', 'json').subscribe(update) }, }, obsWithNested: { @@ -208,7 +208,7 @@ test.serial('nested functions (fancy api)', async (t: T) => { type: 'query', fn: async (based, payload, update) => { return based - .query(payload === 'json' ? 'objectCounter' : 'counter', payload) + .query2(payload === 'json' ? 'objectCounter' : 'counter', payload) .subscribe(update) }, }, @@ -257,7 +257,7 @@ test.serial('nested functions (fancy api)', async (t: T) => { uninstallAfterIdleTime: 1e3, fn: async (based, payload, context) => { const x = await based.call('hello', payload, context) - await based.query('obsWithNested', 'json').get() + await based.query2('obsWithNested', 'json').get() return x }, }, diff --git a/test/based-client/nestedFunctionsError.ts b/test/based-client/nestedFunctionsError.ts index d2966e63fd..b6e4334a0b 100644 --- a/test/based-client/nestedFunctionsError.ts +++ b/test/based-client/nestedFunctionsError.ts @@ -90,7 +90,7 @@ test('nested query functions fn does not exist error', async (t: T) => { type: 'query', uninstallAfterIdleTime: 1e3, fn: async (based, _, update) => { - return based.query('blabla').subscribe(update) + return based.query2('blabla').subscribe(update) }, }, }, @@ -105,7 +105,7 @@ test('nested query functions fn does not exist error', async (t: T) => { const errors: any[] = [] let r = 0 - client.query('hello').subscribe( + client.query2('hello').subscribe( () => { r++ }, diff --git a/test/based-client/nestedQuerySimple.ts b/test/based-client/nestedQuerySimple.ts index 184cce0891..5c49d3f647 100644 --- a/test/based-client/nestedQuerySimple.ts +++ b/test/based-client/nestedQuerySimple.ts @@ -44,7 +44,7 @@ test('query simple', async (t: T) => { type: 'query', fn: (based, _, update) => { update(1) - return based.query('nested').subscribe((r) => { + return based.query2('nested').subscribe((r) => { internal.push(r) }) }, @@ -55,9 +55,9 @@ test('query simple', async (t: T) => { await server.start() client.connect({ url: t.context.ws }) - client.query('bla').subscribe(() => {}) + client.query2('bla').subscribe(() => {}) await wait(1000) - client.query('bla', { x: 1 }).subscribe(() => {}) + client.query2('bla', { x: 1 }).subscribe(() => {}) await wait(1000) t.true(internal.length > 1) for (const r of internal) { diff --git a/test/based-client/null.ts b/test/based-client/null.ts index 0b99f660a6..07b78fe2b3 100644 --- a/test/based-client/null.ts +++ b/test/based-client/null.ts @@ -43,7 +43,7 @@ test('null', async (t: T) => { nestedNull: { type: 'query', fn: (b, __, update) => { - return b.query('null').subscribe(update) + return b.query2('null').subscribe(update) }, }, }, @@ -57,18 +57,18 @@ test('null', async (t: T) => { }, }) - const val = await client.query('null').get() + const val = await client.query2('null').get() t.deepEqual(val, null) const x = await client.call('nullFn') t.deepEqual(x, null) - const val2 = await client.query('nestedNull').get() + const val2 = await client.query2('nestedNull').get() t.deepEqual(val2, null) const obs: any[] = [] - const close = client.query('null').subscribe((v) => { + const close = client.query2('null').subscribe((v) => { obs.push(v) }) diff --git a/test/based-client/payloadPerf.ts b/test/based-client/payloadPerf.ts index b364b79b76..8a64ea8e54 100644 --- a/test/based-client/payloadPerf.ts +++ b/test/based-client/payloadPerf.ts @@ -82,7 +82,7 @@ test.serial('query perf', async (t: T) => { }) let close = client - .query('counter', { + .query2('counter', { myQuery: 1, }) .subscribe(() => { @@ -107,7 +107,7 @@ test.serial('query perf', async (t: T) => { }) close = client - .query('counterUint8', { + .query2('counterUint8', { myQuery: 1, }) .subscribe(() => { diff --git a/test/based-client/persist.ts b/test/based-client/persist.ts index d94ac3edd6..905727a597 100644 --- a/test/based-client/persist.ts +++ b/test/based-client/persist.ts @@ -70,7 +70,7 @@ test.serial('persist, store 1M length array or 8mb (nodejs)', async (t: T) => { const r: any[] = [] const close = client - .query( + .query2( 'counter', { myQuery: 123, @@ -82,7 +82,7 @@ test.serial('persist, store 1M length array or 8mb (nodejs)', async (t: T) => { }) client - .query( + .query2( 'bigData', { myQuery: 123, @@ -108,7 +108,7 @@ test.serial('persist, store 1M length array or 8mb (nodejs)', async (t: T) => { let fromStorage: any await new Promise((resolve) => client2 - .query( + .query2( 'counter', { myQuery: 123, @@ -125,7 +125,7 @@ test.serial('persist, store 1M length array or 8mb (nodejs)', async (t: T) => { await new Promise((resolve) => client2 - .query( + .query2( 'bigData', { myQuery: 123, diff --git a/test/based-client/protocolContentType.ts b/test/based-client/protocolContentType.ts index 9229bc6b76..ff13bef702 100644 --- a/test/based-client/protocolContentType.ts +++ b/test/based-client/protocolContentType.ts @@ -242,74 +242,74 @@ test.serial('fallback to old protocol - incoming', async (t: T) => { const bufResults: any[] = [] const closers = [ - client.query('errorQuery').subscribe((d, err) => { + client.query2('errorQuery').subscribe((d, err) => { obs1Results.push(d || err) }), - clientOld.query('errorQuery').subscribe((d, err) => { + clientOld.query2('errorQuery').subscribe((d, err) => { obs2Results.push(d || err) }), - client.query('nullQuery').subscribe((d) => { + client.query2('nullQuery').subscribe((d) => { obs1Results.push(d === null ? undefined : d) }), - clientOld.query('nullQuery').subscribe((d) => { + clientOld.query2('nullQuery').subscribe((d) => { obs2Results.push(d) }), - client.query('undefinedQuery').subscribe((d) => { + client.query2('undefinedQuery').subscribe((d) => { obs1Results.push(d) }), - clientOld.query('undefinedQuery').subscribe((d) => { + clientOld.query2('undefinedQuery').subscribe((d) => { obs2Results.push(d) }), - client.query('numberQuery').subscribe((d) => { + client.query2('numberQuery').subscribe((d) => { obs1Results.push(d) }), - clientOld.query('numberQuery').subscribe((d) => { + clientOld.query2('numberQuery').subscribe((d) => { obs2Results.push(d) }), client - .query('stringQuery', { + .query2('stringQuery', { myQuery: 123, }) .subscribe((d) => { obs1Results.push(d) }), clientOld - .query('stringQuery', { + .query2('stringQuery', { myQuery: 123, }) .subscribe((d) => { obs2Results.push(d) }), client - .query('bigStringQuery', { + .query2('bigStringQuery', { myQuery: 123, }) .subscribe((d) => { obs1Results.push(d) }), clientOld - .query('bigStringQuery', { + .query2('bigStringQuery', { myQuery: 123, }) .subscribe((d) => { obs2Results.push(d) }), client - .query('flap', { + .query2('flap', { myQuery: 123, }) .subscribe((d) => { obs1Results.push(d) }), clientOld - .query('flap', { + .query2('flap', { myQuery: 123, }) .subscribe((d) => { obs2Results.push(d) }), client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe((d) => { diff --git a/test/based-client/query.ts b/test/based-client/query.ts index a1a7a90570..d7b643e501 100644 --- a/test/based-client/query.ts +++ b/test/based-client/query.ts @@ -48,7 +48,7 @@ test.only('query functions', async (t: T) => { const obs2Results: any[] = [] const close = client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe((d) => { @@ -56,7 +56,7 @@ test.only('query functions', async (t: T) => { }) const close2 = client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe((d) => { diff --git a/test/based-client/queryCache.ts b/test/based-client/queryCache.ts index c9feb83efe..4273c92cf9 100644 --- a/test/based-client/queryCache.ts +++ b/test/based-client/queryCache.ts @@ -51,7 +51,7 @@ test('query cache', async (t: T) => { for (let i = 0; i < 1000; i++) { const close = client - .query('counter', { + .query2('counter', { myQuery: i, }) .subscribe((d) => { @@ -65,7 +65,7 @@ test('query cache', async (t: T) => { await wait(1000) const close = client - .query('counter', { + .query2('counter', { myQuery: 1212, }) .subscribe((d) => { diff --git a/test/based-client/queryCtxBound.ts b/test/based-client/queryCtxBound.ts index 53fcd73fe1..8cff7c7ba3 100644 --- a/test/based-client/queryCtxBound.ts +++ b/test/based-client/queryCtxBound.ts @@ -67,7 +67,7 @@ test('query ctx bound + default verifyAuthState', async (t: T) => { const errs: any[] = [] let close = client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe( @@ -105,7 +105,7 @@ test('query ctx bound + default verifyAuthState', async (t: T) => { close() const r2: any[] = [] - close = client.query('counter', 'error').subscribe( + close = client.query2('counter', 'error').subscribe( (d) => { r2.push({ ...d }) }, @@ -118,10 +118,13 @@ test('query ctx bound + default verifyAuthState', async (t: T) => { close() - t.deepEqual(await client.query('counter').get(), { userId: 1, cnt: 0 }) - t.deepEqual(await client.query('counter', 'bla').get(), { userId: 1, cnt: 0 }) + t.deepEqual(await client.query2('counter').get(), { userId: 1, cnt: 0 }) + t.deepEqual(await client.query2('counter', 'bla').get(), { + userId: 1, + cnt: 0, + }) - t.throwsAsync(() => client.query('counter', 'error').get()) + t.throwsAsync(() => client.query2('counter', 'error').get()) await wait(1000) @@ -181,7 +184,7 @@ test('query ctx bound on authState.token', async (t: T) => { const results: any[] = [] const close = client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe((d) => { @@ -274,7 +277,7 @@ test('query ctx bound on authState.userId require auth', async (t: T) => { const errs: any[] = [] let close = client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe( @@ -312,7 +315,7 @@ test('query ctx bound on authState.userId require auth', async (t: T) => { close() const r2: any[] = [] - close = client.query('counter', 'error').subscribe( + close = client.query2('counter', 'error').subscribe( (d) => { r2.push({ ...d }) }, @@ -325,10 +328,13 @@ test('query ctx bound on authState.userId require auth', async (t: T) => { close() - t.deepEqual(await client.query('counter').get(), { userId: 1, cnt: 0 }) - t.deepEqual(await client.query('counter', 'bla').get(), { userId: 1, cnt: 0 }) + t.deepEqual(await client.query2('counter').get(), { userId: 1, cnt: 0 }) + t.deepEqual(await client.query2('counter', 'bla').get(), { + userId: 1, + cnt: 0, + }) - t.throwsAsync(() => client.query('counter', 'error').get()) + t.throwsAsync(() => client.query2('counter', 'error').get()) await wait(1000) @@ -408,11 +414,11 @@ test('query ctx bound on geo', async (t: T) => { const results: any[] = [] - const close = client.query('counter').subscribe((d) => { + const close = client.query2('counter').subscribe((d) => { results.push({ ...d }) }) - const close2 = client2.query('counter').subscribe((d) => { + const close2 = client2.query2('counter').subscribe((d) => { results.push({ ...d }) }) @@ -468,7 +474,7 @@ test('query ctx bound internal (nested calls)', async (t: T) => { closeAfterIdleTime: 1, uninstallAfterIdleTime: 1e3, fn: async (based, payload, update, error, ctx) => { - return based.query('nest', payload, ctx).subscribe(update) + return based.query2('nest', payload, ctx).subscribe(update) }, }, }, @@ -483,7 +489,7 @@ test('query ctx bound internal (nested calls)', async (t: T) => { }) await client.once('connect') const results: any[] = [] - const close = client.query('counter').subscribe((d) => { + const close = client.query2('counter').subscribe((d) => { results.push({ ...d }) }) await wait(250) @@ -536,7 +542,7 @@ test('query ctx bound internal (nested call from call)', async (t: T) => { public: true, uninstallAfterIdleTime: 1e3, fn: async (based, payload, ctx) => { - return based.query('nest', payload, ctx).get() + return based.query2('nest', payload, ctx).get() }, }, }, @@ -604,7 +610,7 @@ test.serial('ctxBound attachCtx perf', async (t: T) => { if (cnt === amount) { resolve() } - return based.query('nest', payload, ctx).get() + return based.query2('nest', payload, ctx).get() }, }, }, diff --git a/test/based-client/queryDiff.ts b/test/based-client/queryDiff.ts index 403ba6fb0d..f6235f9391 100644 --- a/test/based-client/queryDiff.ts +++ b/test/based-client/queryDiff.ts @@ -59,7 +59,7 @@ test('observablesDiff', async (t: T) => { const results: any[] = [] const close = coreClient - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe((d) => { diff --git a/test/based-client/queryErrorHandling.ts b/test/based-client/queryErrorHandling.ts index 1e81d9d790..839efdfafd 100644 --- a/test/based-client/queryErrorHandling.ts +++ b/test/based-client/queryErrorHandling.ts @@ -43,7 +43,7 @@ test('query error nested', async (t: T) => { type: 'query', fn: (based, _, update) => { update(1) - return based.query('nested').subscribe( + return based.query2('nested').subscribe( (r) => { r.flap.snurp }, @@ -55,7 +55,7 @@ test('query error nested', async (t: T) => { type: 'query', fn: (based, _, update, onError) => { update(1) - return based.query('nested').subscribe(async (r) => { + return based.query2('nested').subscribe(async (r) => { await wait(1) r.cookiepants.snurp }, onError) @@ -69,29 +69,29 @@ test('query error nested', async (t: T) => { const closers = [] client.connect({ url: t.context.ws }) - client.query('bla').subscribe( + client.query2('bla').subscribe( () => {}, () => {}, ) let errs: any[] = [] await wait(10) - client.query('bla', { x: 1 }).subscribe( + client.query2('bla', { x: 1 }).subscribe( () => {}, (err) => { errs.push(err) }, ) - client.query('bla', new Uint8Array(1000)).subscribe( + client.query2('bla', new Uint8Array(1000)).subscribe( () => {}, () => {}, ) - client.query('asyncBla', new Uint8Array(1000)).subscribe( + client.query2('asyncBla', new Uint8Array(1000)).subscribe( () => {}, () => {}, ) - client.query('asyncBla', new Uint8Array(1000)).subscribe( + client.query2('asyncBla', new Uint8Array(1000)).subscribe( () => {}, (err) => { errs.push(err) @@ -142,7 +142,7 @@ test('query error alternate', async (t: T) => { const errs: any[] = [] const results: any[] = [] - const close = client.query('bla', { x: 1 }).subscribe( + const close = client.query2('bla', { x: 1 }).subscribe( (d) => { results.push(d) }, @@ -218,7 +218,7 @@ test('query error alternate auth', async (t: T) => { const errs: any[] = [] const results: any[] = [] - const close = client.query('bla', { x: 1 }).subscribe( + const close = client.query2('bla', { x: 1 }).subscribe( (d) => { results.push(d) }, diff --git a/test/based-client/queryInstancePerf.ts b/test/based-client/queryInstancePerf.ts index 5e2d90f278..a3d22a434c 100644 --- a/test/based-client/queryInstancePerf.ts +++ b/test/based-client/queryInstancePerf.ts @@ -73,7 +73,7 @@ test('query functions perf (100k query fn instances)', async (t: T) => { for (let i = 0; i < 1e5; i++) { closers.push( client - .query('counter', { + .query2('counter', { myQuery: i, }) .subscribe(() => { diff --git a/test/based-client/queryReusedDiff.ts b/test/based-client/queryReusedDiff.ts index bee8932a09..02ce9e3a4f 100644 --- a/test/based-client/queryReusedDiff.ts +++ b/test/based-client/queryReusedDiff.ts @@ -59,7 +59,7 @@ test('query reuse diff', async (t: T) => { const obs2Results: any[] = [] const close = client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe((d, checksum) => { @@ -70,7 +70,7 @@ test('query reuse diff', async (t: T) => { close() const close2 = client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe((d, checksum) => { diff --git a/test/based-client/queryUint8Payload.ts b/test/based-client/queryUint8Payload.ts index 2bbe825b62..0aa2405e41 100644 --- a/test/based-client/queryUint8Payload.ts +++ b/test/based-client/queryUint8Payload.ts @@ -63,11 +63,11 @@ test('query uint8Array args', async (t: T) => { flap2[100] = 1 - const close = client.query('counter', flap).subscribe((d) => { + const close = client.query2('counter', flap).subscribe((d) => { obs1Results.push(d) }) - const close2 = client.query('counter', flap2).subscribe((d) => { + const close2 = client.query2('counter', flap2).subscribe((d) => { obs1Results.push(d) }) @@ -81,7 +81,7 @@ test('query uint8Array args', async (t: T) => { close() close2() - const x = await client.query('bla', flap2).get() + const x = await client.query2('bla', flap2).get() t.is(x.length, 8888890) diff --git a/test/based-client/reEvaluateAuthState.ts.ts b/test/based-client/reEvaluateAuthState.ts.ts index 0836371481..01fa09b2b3 100644 --- a/test/based-client/reEvaluateAuthState.ts.ts +++ b/test/based-client/reEvaluateAuthState.ts.ts @@ -80,7 +80,7 @@ test('re-evaluate authState', async (t: T) => { let counter = 0 await t.notThrowsAsync( new Promise((resolve) => { - client.query('counter').subscribe( + client.query2('counter').subscribe( () => { counter++ }, diff --git a/test/based-client/relay.ts b/test/based-client/relay.ts index 87fd1f2fff..318d876374 100644 --- a/test/based-client/relay.ts +++ b/test/based-client/relay.ts @@ -122,7 +122,7 @@ test('Relay', async (t: T) => { }, }) - const x = await client.query('counter').get() + const x = await client.query2('counter').get() t.is(x, 1) const hello = await client.call('hello', { snap: 'je' }) t.is(hello, 'from hello je') @@ -139,7 +139,7 @@ test('Relay', async (t: T) => { t.deepEqual(msges, ['bla']) - const count = await client.query('flap').get() + const count = await client.query2('flap').get() t.true(count > 0) diff --git a/test/based-client/ssr.ts b/test/based-client/ssr.ts index 4632d57839..8af09fe645 100644 --- a/test/based-client/ssr.ts +++ b/test/based-client/ssr.ts @@ -48,8 +48,8 @@ test('query cache', async (t: T) => { }, }) - await client.query('counter').get() - await client.query('counter', { bla: true }).get() + await client.query2('counter').get() + await client.query2('counter', { bla: true }).get() const script = createInlineFromCurrentCache(client, [{ endpoint: 'counter' }]) @@ -68,7 +68,7 @@ test('query cache', async (t: T) => { client.cache.clear() const selectiveGet = await createInlineCache(client, [ - client.query('counter'), + client.query2('counter'), ]) t.deepEqual( diff --git a/test/based-client/throttle.ts b/test/based-client/throttle.ts index afc513238f..9dc2089af8 100644 --- a/test/based-client/throttle.ts +++ b/test/based-client/throttle.ts @@ -51,7 +51,7 @@ test('throttle', async (t: T) => { const obs1Results: any[] = [] const close = client - .query('counter', { + .query2('counter', { myQuery: 123, }) .subscribe((d) => { diff --git a/test/bench.perf.ts b/test/bench.perf.ts index 65d1799fdb..a0c01eda52 100644 --- a/test/bench.perf.ts +++ b/test/bench.perf.ts @@ -29,7 +29,7 @@ await test('test embedded', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) await db.setSchema(schema) @@ -51,7 +51,7 @@ await test('test embedded', async (t) => { res = ( await Promise.all( arr.map(() => - db.query('test').filter('x', '=', 0).range(1, 10_001).get(), + db.query2('test').filter('x', '=', 0).range(1, 10_001).get(), ), ) ).reduce((prev, cur) => prev + cur.length, 0) @@ -64,7 +64,7 @@ await test('test embedded', async (t) => { await Promise.all( Array.from({ length: N3 }).map(async (_, i) => { await s.acquire() - db.query('test', i + 1) + db.query2('test', i + 1) .get() .then(() => s.release()) }), @@ -72,7 +72,7 @@ await test('test embedded', async (t) => { await s.drain() //res = ( // await Promise.all( - // Array.from({ length: N3 }).map((_, i) => db.query('test', i + 1).get()), + // Array.from({ length: N3 }).map((_, i) => db.query2('test', i + 1).get()), // ) //).reduce((prev, cur) => prev + cur.length, 0) }, 'q1') @@ -110,7 +110,7 @@ await test('test client-server', async (t) => { res = ( await Promise.all( arr.map(() => - client1.query('test').filter('x', '=', 0).range(1, 10_001).get(), + client1.query2('test').filter('x', '=', 0).range(1, 10_001).get(), ), ) ).reduce((prev, cur) => prev + cur.length, 0) @@ -122,7 +122,7 @@ await test('test client-server', async (t) => { res = ( await Promise.all( Array.from({ length: N3 }).map((_, i) => - client1.query('test', i + 1).get(), + client1.query2('test', i + 1).get(), ), ) ).reduce((prev, cur) => prev + cur.length, 0) diff --git a/test/bigNode.perf.ts b/test/bigNode.perf.ts index cbb7947d90..62acefb4e1 100644 --- a/test/bigNode.perf.ts +++ b/test/bigNode.perf.ts @@ -1,16 +1,10 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { deepEqual } from './shared/assert.js' import { SchemaProps, serialize } from '../src/schema/index.js' import { deSerializeSchema, resultToObject } from '../src/protocol/index.js' +import { testDb } from './shared/index.js' await test('big nodes', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - const makeALot = (n: number) => { const props: SchemaProps = {} for (let i = 0; i < n; i++) { @@ -19,7 +13,7 @@ await test('big nodes', async (t) => { return props } - await db.setSchema({ + const db = await testDb(t, { types: { mega: { props: { @@ -54,8 +48,8 @@ await test('big nodes', async (t) => { }) await db.drain() - const mega = (await db.query('mega').get()).toObject() - const giga = (await db.query('giga').get()).toObject() + const mega = await db.query2('mega').get() + const giga = await db.query2('giga').get() deepEqual(mega[1].f4092, 1337) deepEqual(giga[1].f100, 1337) @@ -63,15 +57,15 @@ await test('big nodes', async (t) => { db.update('giga', giga1, { ref: mega2 }) await db.drain() - const megaRefQ = await db.query('mega').include('ref').get() + const megaRefQ = await db.query2('mega').include('ref').get() - const megaRef = megaRefQ.toObject() - const gigaRef = (await db.query('giga').include('ref').get()).toObject() + const megaRef = megaRefQ + const gigaRef = await db.query2('giga').include('ref').get() deepEqual(gigaRef[0].ref.id, 2) deepEqual(megaRef[1].ref.id, 1) - const megaInclude = await db.query('mega').get() + const megaInclude = await db.query2('mega').get() const serializedSchema = serialize(megaInclude.def.readSchema!) const deserializedSchema = deSerializeSchema(serializedSchema) @@ -85,7 +79,7 @@ await test('big nodes', async (t) => { deepEqual(obj[1].f4092, 1337) - const megaIncludeSelective = await db.query('mega').include('f4092').get() + const megaIncludeSelective = await db.query2('mega').include('f4092').get() const serializedSchemaSmall = serialize(megaIncludeSelective.def.readSchema!) const deserializedSchemaSmall = deSerializeSchema(serializedSchemaSmall) diff --git a/test/binary.ts b/test/binary.ts index ea68816ca2..4204d59734 100644 --- a/test/binary.ts +++ b/test/binary.ts @@ -1,18 +1,13 @@ -import { BasedDb } from '../src/index.js' import { ENCODER } from '../src/utils/uint8.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' import { deepEqual, equal } from './shared/assert.js' import { italy } from './shared/examples.js' import { notEqual } from 'node:assert' +import { checksum as q2checksum } from '../src/db-client/query2/index.js' await test('simple', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -29,7 +24,7 @@ await test('simple', async (t) => { await db.drain() deepEqual( - (await db.query('user').get()).toObject(), + await db.query2('user').get(), [ { id: 1, @@ -43,7 +38,7 @@ await test('simple', async (t) => { file: new Uint8Array([1, 2, 3, 4]), }) - deepEqual((await db.query('user', id).get()).toObject(), { + deepEqual(await db.query2('user', id).get(), { id, file: new Uint8Array([1, 2, 3, 4]), }) @@ -53,20 +48,11 @@ await test('simple', async (t) => { file: italyBytes, }) - equal( - (await db.query('user', id2).get()).toObject().file.length, - italyBytes.byteLength, - ) + equal((await db.query2('user', id2).get()).file.length, italyBytes.byteLength) }) await test('binary and crc32', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { article: { @@ -80,13 +66,13 @@ await test('binary and crc32', async (t) => { article: new Uint8Array([1]), }) - const checksum = (await db.query('user', user1).get()).checksum + const checksum = q2checksum(await db.query2('user', user1).get()) await db.update('user', user1, { article: new Uint8Array([2]), }) - const checksum2 = (await db.query('user', user1).get()).checksum + const checksum2 = q2checksum(await db.query2('user', user1).get()) notEqual(checksum, checksum2, 'Checksum is not the same') }) diff --git a/test/boolean.perf.ts b/test/boolean.perf.ts index 06956e73ea..fb2aeddc5d 100644 --- a/test/boolean.perf.ts +++ b/test/boolean.perf.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' import { perf } from './shared/assert.js' await test('create 1m booleans', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { test: { boolean: 'boolean', diff --git a/test/boolean.ts b/test/boolean.ts index f3378463ed..006c6178fe 100644 --- a/test/boolean.ts +++ b/test/boolean.ts @@ -1,16 +1,9 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { deepEqual } from './shared/assert.js' +import { testDb } from './shared/index.js' await test('boolean', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - // t.after(() => t.backup(db)) - t.after(() => db.stop(true)) - - await db.setSchema({ + const client = await testDb(t, { types: { user: { props: { @@ -20,34 +13,33 @@ await test('boolean', async (t) => { }, }) - const user1 = await db.create('user', {}) + await client.create('user', {}) - db.create('user', { + await client.create('user', { isNice: true, }) - db.create('user', { + await client.create('user', { isNice: false, }) - await db.drain() + await client.drain() - deepEqual((await db.query('user').get()).toObject(), [ + deepEqual(await client.query2('user').get(), [ { id: 1, isNice: false }, { id: 2, isNice: true }, { id: 3, isNice: false }, ]) - deepEqual( - (await db.query('user').filter('isNice', '=', true).get()).toObject(), - [{ id: 2, isNice: true }], - ) + deepEqual(await client.query2('user').filter('isNice', '=', true).get(), [ + { id: 2, isNice: true }, + ]) - deepEqual((await db.query('user').filter('isNice').get()).toObject(), [ + deepEqual(await client.query2('user').filter('isNice').get(), [ { id: 2, isNice: true }, ]) - deepEqual((await db.query('user').filter('isNice', false).get()).toObject(), [ + deepEqual(await client.query2('user').filter('isNice', '=', false).get(), [ { id: 1, isNice: false }, { id: 3, isNice: false }, ]) diff --git a/test/capped.ts b/test/capped.ts index e035f9b8cd..ce811465a8 100644 --- a/test/capped.ts +++ b/test/capped.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { deepEqual } from './shared/assert.js' +import { testDb } from './shared/index.js' await test('capped type', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const client = await testDb(t, { types: { meas: { capped: 3, @@ -22,80 +16,80 @@ await test('capped type', async (t) => { }, }) - db.create('meas', { + client.create('meas', { temperature: 0, humidity: 99, }) - db.create('meas', { + client.create('meas', { temperature: 1, humidity: 98, }) - db.create('meas', { + client.create('meas', { temperature: 2, humidity: 97, wind: 50, }) - deepEqual((await db.query('meas').get()).toObject(), [ + deepEqual(await client.query2('meas').get(), [ { id: 1, temperature: 0, humidity: 99, wind: 10 }, { id: 2, temperature: 1, humidity: 98, wind: 10 }, { id: 3, temperature: 2, humidity: 97, wind: 50 }, ]) - db.create('meas', { + client.create('meas', { temperature: -100, humidity: 1, }) - deepEqual((await db.query('meas').get()).toObject(), [ + deepEqual(await client.query2('meas').get(), [ { id: 1, temperature: -100, humidity: 1, wind: 10 }, { id: 2, temperature: 1, humidity: 98, wind: 10 }, { id: 3, temperature: 2, humidity: 97, wind: 50 }, ]) - db.create('meas', { + client.create('meas', { temperature: -50, humidity: 1, wind: 5, }) - db.create('meas', { + client.create('meas', { temperature: -40, humidity: 1, }) - deepEqual((await db.query('meas').get()).toObject(), [ + deepEqual(await client.query2('meas').get(), [ { id: 1, temperature: -100, humidity: 1, wind: 10 }, { id: 2, temperature: -50, humidity: 1, wind: 5 }, { id: 3, temperature: -40, humidity: 1, wind: 10 }, ]) - db.create('meas', { + client.create('meas', { temperature: -50, humidity: 1, }) - db.create('meas', { + client.create('meas', { temperature: -40, humidity: 1, }) - db.create('meas', { + client.create('meas', { temperature: -50, humidity: 1, wind: 5, }) - db.create('meas', { + client.create('meas', { temperature: -40, humidity: 1, }) - db.create('meas', { + client.create('meas', { temperature: -50, humidity: 1, }) - db.create('meas', { + client.create('meas', { temperature: -40, humidity: 1, }) - deepEqual((await db.query('meas').get()).toObject(), [ + deepEqual(await client.query2('meas').get(), [ { id: 1, temperature: -40, humidity: 1, wind: 10 }, { id: 2, temperature: -50, humidity: 1, wind: 10 }, { id: 3, temperature: -40, humidity: 1, wind: 10 }, @@ -103,13 +97,7 @@ await test('capped type', async (t) => { }) await test('capped references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const client = await testDb(t, { types: { user: { props: { @@ -135,12 +123,12 @@ await test('capped references', async (t) => { }, }) - const user = await db.create('user', {}) + const user = await client.create('user', {}) for (let i = 0; i < 10; i++) { - db.create('article', { latest: user }) + client.create('article', { latest: user }) } - deepEqual(await db.query('user', user).include('**').get(), { + deepEqual(await client.query2('user', user).include('**').get(), { id: 1, latestArticles: [{ id: 6 }, { id: 7 }, { id: 8 }, { id: 9 }, { id: 10 }], }) diff --git a/test/cardinality.ts b/test/cardinality.ts index 5cd999e926..2a15ae245d 100644 --- a/test/cardinality.ts +++ b/test/cardinality.ts @@ -1,16 +1,11 @@ -import { BasedDb, xxHash64 } from '../src/index.js' +import { xxHash64 } from '../src/index.js' import { ENCODER } from '../src/utils/uint8.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' import { deepEqual } from './shared/assert.js' await test('hll', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { article: { derp: 'number', @@ -43,13 +38,12 @@ await test('hll', async (t) => { myUniqueValuesCount: 'myCoolValue', }) + console.log('a') deepEqual( - ( - await db - .query('article') - .include('myUniqueValuesCount', 'myUniqueValuesCountFromArray') - .get() - ).toObject(), + await db + .query2('article') + .include('myUniqueValuesCount', 'myUniqueValuesCountFromArray') + .get(), [ { id: 1, @@ -58,15 +52,14 @@ await test('hll', async (t) => { }, ], ) + console.log('b') deepEqual( - ( - await db - .query('article') - .include('myUniqueValuesCount') - .filter('myUniqueValuesCount', '!=', 0) - .get() - ).toObject(), + await db + .query2('article') + .include('myUniqueValuesCount') + .filter('myUniqueValuesCount', '!=', 0) + .get(), [ { id: 1, @@ -98,12 +91,10 @@ await test('hll', async (t) => { }) deepEqual( - ( - await db - .query('article') - .include('myUniqueValuesCount', 'myUniqueValuesCountFromArray') - .get() - ).toObject(), + await db + .query2('article') + .include('myUniqueValuesCount', 'myUniqueValuesCountFromArray') + .get(), [ { id: 1, myUniqueValuesCount: 1, myUniqueValuesCountFromArray: 0 }, { id: 2, myUniqueValuesCountFromArray: 7, myUniqueValuesCount: 0 }, @@ -111,13 +102,11 @@ await test('hll', async (t) => { ) deepEqual( - ( - await db - .query('article') - .include('myUniqueValuesCountFromArray') - .filter('myUniqueValuesCountFromArray', '=', 7) - .get() - ).toObject(), + await db + .query2('article') + .include('myUniqueValuesCountFromArray') + .filter('myUniqueValuesCountFromArray', '=', 7) + .get(), [ { id: 2, @@ -127,13 +116,11 @@ await test('hll', async (t) => { ) deepEqual( - ( - await db - .query('article') - .include('myUniqueValuesCount') - .filter('myUniqueValuesCount', '>', 1) - .get() - ).toObject(), + await db + .query2('article') + .include('myUniqueValuesCount') + .filter('myUniqueValuesCount', '>', 1) + .get(), [], ) @@ -161,12 +148,10 @@ await test('hll', async (t) => { await db.drain() deepEqual( - ( - await db - .query('article') - .include('myUniqueValuesCount', 'myUniqueValuesCountFromArray') - .get() - ).toObject(), + await db + .query2('article') + .include('myUniqueValuesCount', 'myUniqueValuesCountFromArray') + .get(), [ { id: 1, myUniqueValuesCount: 7, myUniqueValuesCountFromArray: 0 }, { id: 2, myUniqueValuesCountFromArray: 7, myUniqueValuesCount: 0 }, @@ -193,13 +178,11 @@ await test('hll', async (t) => { await db.drain() deepEqual( - ( - await db - .query('article') - .filter('myUniqueValuesCount', '=', 11) - .or('myUniqueValuesCountFromArray', '>', 6) - .get() - ).toObject(), + await db + .query2('article') + .filter('myUniqueValuesCount', '=', 11) + .or('myUniqueValuesCountFromArray', '>', 6) + .get(), [ { id: 1, @@ -227,7 +210,7 @@ await test('hll', async (t) => { deepEqual( await db - .query('article') + .query2('article') .filter('id', '>=', 3) .include('contributors.$tokens') .get(), @@ -271,13 +254,11 @@ await test('hll', async (t) => { }) deepEqual( - ( - await db - .query('article') - .filter('id', '>=', 3) - .include('contributors.$tokens') - .get() - ).toObject(), + await db + .query2('article') + .filter('id', '>=', 3) + .include('contributors.$tokens') + .get(), [ { id: 3, @@ -294,11 +275,10 @@ await test('hll', async (t) => { // handle undefined case deepEqual( await db - .query('article') + .query2('article') .filter('id', '>=', 3) .include('contributors.$undeftokens') - .get() - .toObject(), + .get(), [ { id: 3, @@ -323,11 +303,10 @@ await test('hll', async (t) => { deepEqual( await db - .query('article') + .query2('article') .filter('id', '>=', 3) .include('contributors.$undeftokens') - .get() - .toObject(), + .get(), [ { id: 3, @@ -343,13 +322,7 @@ await test('hll', async (t) => { }) await test('switches', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { store: { name: 'string', @@ -365,14 +338,14 @@ await test('switches', async (t) => { const visits = ['Clint', 'Lee', 'Clint', 'Aldo', 'Lee'] - const store1 = db.create('store', { + const store1 = await db.create('store', { name: 'Handsome Sportsman', visitors: visits, visits: visits.length, }) deepEqual( - await db.query('store').include('visitors').get(), + await db.query2('store').include('visitors').get(), [ { id: 1, @@ -389,7 +362,7 @@ await test('switches', async (t) => { await db.drain() deepEqual( - await db.query('store').include('visitors').get(), + await db.query2('store').include('visitors').get(), [ { id: 1, @@ -401,23 +374,14 @@ await test('switches', async (t) => { }) await test('defaultPrecision', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ - // props: { - // myRootCount: 'cardinality', - // }, + const db = await testDb(t, { types: { - stores: { + store: { name: 'string', customers: { items: { ref: 'customer', - prop: 'customer', + prop: 'client', }, }, }, @@ -432,10 +396,41 @@ await test('defaultPrecision', async (t) => { name: 'Alex Atala', productsBought: ['fork', 'knife', 'knife', 'frying pan'], }) - const sto = db.create('stores', { - name: "Worderland's Kitchen", + const sto = db.create('store', { + name: 'Worderlands Kitchen', customers: [cus], }) - // await db.query('stores').include('*', '**').get().inspect() + const pb = await db.query2('customer').include('productsBought').get() + // pb.inspect() + deepEqual( + pb, + [ + { + id: 1, + productsBought: 3, + }, + ], + 'simple cardinality default precision', + ) + + const pbr = await db.query2('store').include('*', '**').get() + // pbr.inspect() + deepEqual( + pbr, + [ + { + id: 1, + name: 'Worderlands Kitchen', + customers: [ + { + id: 1, + name: 'Alex Atala', + productsBought: 3, + }, + ], + }, + ], + 'simple cardinality default precision on ref', + ) }) diff --git a/test/clientServer.perf.ts b/test/clientServer.perf.ts index 0959f4d621..aeaf9508eb 100644 --- a/test/clientServer.perf.ts +++ b/test/clientServer.perf.ts @@ -40,7 +40,7 @@ await test('client server rapid fire', async (t) => { }) userIds.push(userId) })(), - client.query('user').sort('name').include('name', 'users').get(), + client.query2('user').sort('name').include('name', 'users').get(), ) } } else { @@ -51,11 +51,7 @@ await test('client server rapid fire', async (t) => { }) await Promise.all(promises) - const allUsers1 = await clients[0] - .query('user') - .range(0, 100_000) - .get() - .toObject() + const allUsers1 = await clients[0].query2('user').range(0, 100_000).get() let id = 0 for (const user of allUsers1) { diff --git a/test/clientServer.ts b/test/clientServer.ts index b083101a04..471b6886a8 100644 --- a/test/clientServer.ts +++ b/test/clientServer.ts @@ -23,7 +23,7 @@ await test('client server basic', async (t) => { name: 'jamez', }) - deepEqual(await client1.query('user').get(), [ + deepEqual(await client1.query2('user').get(), [ { id: 1, name: 'youzi' }, { id: 2, name: 'jamez' }, ]) @@ -36,7 +36,7 @@ await test('client server basic', async (t) => { }, }) - deepEqual(await client1.query('user').get(), [ + deepEqual(await client1.query2('user').get(), [ { id: 1, age: 0 }, { id: 2, age: 0 }, ]) @@ -77,7 +77,7 @@ await test('client server basic', async (t) => { }) deepEqual( - await client1.query('user', res).include('*', '**').get(), + await client1.query2('user', res).include('*', '**').get(), { id: 1, age: 0, diff --git a/test/colvec.ts b/test/colvec.ts index 3c88b88cb3..7fef5e9ca0 100644 --- a/test/colvec.ts +++ b/test/colvec.ts @@ -1,15 +1,9 @@ import test from './shared/test.js' -import { BasedDb } from '../src/index.js' import { deepEqual, perf } from './shared/assert.js' +import { testDb } from './shared/index.js' -await test.skip('colvec', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ +await test.skip('basic', async (t) => { + const client = await testDb(t, { types: { row: { props: { @@ -26,7 +20,7 @@ await test.skip('colvec', async (t) => { }, }) - deepEqual(db.server.schemaTypesParsed.col.blockCapacity, 10_000) + deepEqual(client.schemaTypesParsed.col.blockCapacity, 10_000) let seed = 100 const next = () => (seed = (214013 * seed + 2531011) % 10e3) @@ -43,18 +37,18 @@ await test.skip('colvec', async (t) => { await perf(async () => { for (let i = 0; i < N; i++) { genVec() - db.create('row', { vec }) + client.create('row', { vec }) } - await db.drain() + await client.drain() }, 'row') reset() await perf(async () => { for (let i = 0; i < N; i++) { genVec() - db.create('col', { vec }) + client.create('col', { vec }) } - await db.drain() + await client.drain() }, 'col') vec[0] = 2311.0 @@ -67,49 +61,15 @@ await test.skip('colvec', async (t) => { vec[7] = 7261.0 await perf(async () => { await db - .query('row') + .query2('row') .include('*') .filter('vec', 'like', vec, { fn: 'euclideanDistance', score: 1 }) .get() }, 'QUERY row') - - await perf(async () => { - global.__basedDb__native__.colvecTest( - db.server.dbCtxExternal, - 3, - 1, - 1, - N + 1, - ) - }, 'QUERY col') - - const res = await db.query('col').include('vec').range(0, 2).get().toObject() - deepEqual(res, [ - { - id: 1, - vec: new Float32Array([ - 2311, 5054, 1.5612034346858506e-39, 1.007378107771942e-37, - 3.76158192263132e-37, 1.6815581571897805e-44, 0, 5391, - ]), - }, - { - id: 2, - vec: new Float32Array([ - 5391, 5094, 1.5612034346858506e-39, 4.029512431087768e-37, - 3.76158192263132e-37, 1.6815581571897805e-44, 0, 6071, - ]), - }, - ]) }) -await test('colvec int8', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ +await test('int8 vector', async (t) => { + const client = await testDb(t, { types: { col: { blockCapacity: 10_000, @@ -122,13 +82,13 @@ await test('colvec int8', async (t) => { }) for (let i = 0; i < 5; i++) { - db.create('col', { + client.create('col', { str: Int8Array.from([i + 1, i + 2, i + 3, i + 4]), }) } - // await db.drain() + await client.drain() - deepEqual(await db.query('col').include('str').get(), [ + deepEqual(await client.query2('col').include('str').get(), [ { id: 1, str: new Int8Array([1, 2, 3, 4]) }, { id: 2, str: new Int8Array([2, 3, 4, 5]) }, { id: 3, str: new Int8Array([3, 4, 5, 6]) }, @@ -137,14 +97,8 @@ await test('colvec int8', async (t) => { ]) }) -await test('colvec float32', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ +await test('float32 vector', async (t) => { + const client = await testDb(t, { types: { col: { blockCapacity: 10_000, @@ -157,11 +111,11 @@ await test('colvec float32', async (t) => { }) for (let i = 0; i < 1; i++) { - db.create('col', { + client.create('col', { str: Float32Array.from([1.23123, 1.3]), }) } - deepEqual(await db.query('col').include('str').get(), [ + deepEqual(await client.query2('col').include('str').get(), [ { id: 1, str: new Float32Array([1.23123, 1.3]) }, ]) }) diff --git a/test/concurrency.perf.ts b/test/concurrency.perf.ts index 2a82db8479..b3d16f09ba 100644 --- a/test/concurrency.perf.ts +++ b/test/concurrency.perf.ts @@ -2,15 +2,10 @@ import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { italy } from './shared/examples.js' import { setTimeout as setTimeoutAsync } from 'timers/promises' +import { testDb } from './shared/index.js' await test('concurrency', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -47,7 +42,7 @@ await test('concurrency', async (t) => { queries++ try { const x = await db - .query('user') + .query2('user') .include((s) => s('friends').range(0, 10)) .range(0, 1000_000) .get() @@ -88,7 +83,7 @@ await test('many instances', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) return db }), ) @@ -114,7 +109,7 @@ await test('many instances', async (t) => { } await Promise.all( dbs.map((db) => - db.query('t').search('contribution', 's').include('i').get(), + db.query2('t').search('contribution', 's').include('i').get(), ), ) await Promise.all(dbs.map((db) => db.drain())) diff --git a/test/copy.ts b/test/copy.ts index 13534d15c7..0c5412c6fc 100644 --- a/test/copy.ts +++ b/test/copy.ts @@ -7,7 +7,7 @@ await test('copy', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) await db.setSchema({ types: { @@ -91,10 +91,9 @@ await test('copy', async (t) => { }) const res = await db - .query('edition') + .query2('edition') .include('*', 'versionOf', 'versions', 'sequences', 'sequences.pages') .get() - .toObject() deepEqual(res, [ { diff --git a/test/crc32c.ts b/test/crc32c.ts index ca95a71ddb..863b6e5378 100644 --- a/test/crc32c.ts +++ b/test/crc32c.ts @@ -1,10 +1,10 @@ -import { BasedDb } from '../src/index.js' import { ENCODER } from '../src/utils/uint8.js' import test from './shared/test.js' import { equal } from './shared/assert.js' import { crc32 as nativeCrc32 } from '../src/index.js' import crc32c from '../src/hash/crc32c.js' import { LangCode } from '../src/zigTsExports.js' +import { testDb } from './shared/index.js' await test('Comparing hash generation collision', async (t) => { let crc32set = new Set() @@ -89,13 +89,7 @@ await test('Comparing hash generation collision', async (t) => { }) await test('simple', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { transaction: { props: { @@ -118,22 +112,15 @@ await test('simple', async (t) => { myNativeMadeHash: nativeCrc32(ENCODER.encode('oid123')), }) - equal( - await db.query('transaction').include('id', 'myHash').get().toObject(), - [ - { - id: 1, - myHash: 2628032717, - }, - ], - ) + equal(await db.query2('transaction').include('id', 'myHash').get(), [ + { + id: 1, + myHash: 2628032717, + }, + ]) equal( - await db - .query('transactionN') - .include('id', 'myNativeMadeHash') - .get() - .toObject(), + await db.query2('transactionN').include('id', 'myNativeMadeHash').get(), [ { id: 1, diff --git a/test/db-schema/schema.ts b/test/db-schema/schema.ts index 6812162e4d..bfa8b9facf 100644 --- a/test/db-schema/schema.ts +++ b/test/db-schema/schema.ts @@ -1,21 +1,16 @@ import test from '../shared/test.js' -import { BasedDb } from '../../src/index.js' +import { DbClient, DbServer, getDefaultHooks } from '../../src/index.js' import { setTimeout } from 'node:timers/promises' import { deepEqual, throws } from '../shared/assert.js' +import { testDb, testDbClient, testDbServer } from '../shared/index.js' await test('support many fields on type', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.destroy()) - - const props = {} + const props: Record = {} for (let i = 0; i < 248; i++) { props['myProp' + i] = 'string' } - await db.setSchema({ + await testDb(t, { types: { flurp: props, }, @@ -23,13 +18,8 @@ await test('support many fields on type', async (t) => { }) await test('schema hash', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.destroy()) - - await db.setSchema({ + const server = await testDbServer(t) + const client = await testDbClient(server, { types: { flurp: { name: 'string', @@ -37,9 +27,9 @@ await test('schema hash', async (t) => { }, }) - const hash1 = db.server.schema!.hash + const hash1 = server.schema!.hash - await db.setSchema({ + await client.setSchema({ types: { flurp: { name: 'string', @@ -48,7 +38,7 @@ await test('schema hash', async (t) => { }, }) - const hash2 = db.server.schema!.hash + const hash2 = server.schema!.hash if (!hash1 || !hash2 || hash1 === hash2) { throw new Error('Incorrect hash') @@ -56,31 +46,31 @@ await test('schema hash', async (t) => { }) await test('dont accept modify with mismatch schema', async (t) => { - const db = new BasedDb({ - path: t.tmp, + const server = await testDbServer(t) + const client = new DbClient({ + hooks: Object.assign(getDefaultHooks(server), { + async flushModify(buf: Uint8Array) { + buf = new Uint8Array(buf) + await setTimeout(100) + return server.modify(buf) + }, + }), }) - await db.start({ clean: true }) - t.after(() => db.destroy()) - db.client.hooks.flushModify = async (buf) => { - buf = new Uint8Array(buf) - await setTimeout(100) - return db.server.modify(buf) - } - - await db.setSchema({ + await client.setSchema({ types: { flurp: { name: 'string', }, }, }) - await db.create('flurp', { + + await client.create('flurp', { name: 'xxx', }) - const q1 = db.query('flurp') - const setSchemaPromise = db.setSchema({ + const q1 = client.query2('flurp') + const setSchemaPromise = client.setSchema({ types: { flurp: { title: 'string', @@ -88,17 +78,19 @@ await test('dont accept modify with mismatch schema', async (t) => { }, }) - db.create('flurp', { + client.create('flurp', { name: 'yyy', }) + await setSchemaPromise throws(() => { - return db.create('flurp', { + return client.create('flurp', { name: 'zzz', }) }) - const res = await db.query('flurp').get().toObject() + + const res = (await client.query2('flurp').get()) as any deepEqual(res, [ { id: 1, title: '' }, @@ -107,12 +99,11 @@ await test('dont accept modify with mismatch schema', async (t) => { }) await test('set schema before start', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) + const server = new DbServer({ path: t.tmp }) + const client = await testDbClient(server) await throws(() => - db.setSchema({ + client.setSchema({ types: { flurp: { props: { @@ -123,6 +114,6 @@ await test('set schema before start', async (t) => { }), ) - await db.start({ clean: true }) - t.after(() => db.destroy()) + await server.start({ clean: true }) + t.after(() => server.destroy()) }) diff --git a/test/db-schema/schemaChange.ts b/test/db-schema/schemaChange.ts index 94422c46d9..2b981be12c 100644 --- a/test/db-schema/schemaChange.ts +++ b/test/db-schema/schemaChange.ts @@ -3,15 +3,11 @@ import { BasedDb } from '../../src/index.js' import { deepCopy } from '../../src/utils/index.js' import type { SchemaIn } from '../../src/schema/index.js' import { deepEqual } from '../shared/assert.js' +import { testDbClient, testDbServer } from '../shared/index.js' await test('set schema dont migrate', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => db.destroy()) + const server = await testDbServer(t) + const client = await testDbClient(server) let schema = { types: { @@ -24,18 +20,18 @@ await test('set schema dont migrate', async (t) => { } let updates = 0 - db.client.on('schema', () => { + client.on('schema', () => { updates++ }) - await db.setSchema(deepCopy(schema) as SchemaIn) - await db.setSchema(deepCopy(schema) as SchemaIn) - await db.setSchema(deepCopy(schema) as SchemaIn) + await client.setSchema(deepCopy(schema) as SchemaIn) + await client.setSchema(deepCopy(schema) as SchemaIn) + await client.setSchema(deepCopy(schema) as SchemaIn) deepEqual(updates, 1, '1 update') // deepEqual(migrates, 0, '0 migrates') - await db.setSchema({ + await client.setSchema({ types: { yes: { name: 'string', @@ -65,7 +61,7 @@ await test('set schema dont migrate', async (t) => { deepEqual(updates, 2, '2 update') // deepEqual(migrates, 1, '1 migrates') - await db.setSchema({ + await client.setSchema({ types: { nope: { name: 'string', @@ -80,16 +76,16 @@ await test('set schema dont migrate', async (t) => { deepEqual(updates, 3, '3 update') // deepEqual(migrates, 2, '2 migrates') - await db.create('nope', { + await client.create('nope', { name: 'abe', }) - await db.create('yes', { + await client.create('yes', { name: 'bill', success: true, }) - await db.setSchema({ + await client.setSchema({ types: { nope: { name: 'string', diff --git a/test/db-schema/schemaDebug.ts b/test/db-schema/schemaDebug.ts index 6be56d8b58..bed86d1cc5 100644 --- a/test/db-schema/schemaDebug.ts +++ b/test/db-schema/schemaDebug.ts @@ -1,9 +1,8 @@ import { serverChildProcess } from '../shared/serverChildProcess.js' -import { DbClient, DbClientHooks, DbServer } from '../../src/index.js' +import { DbClient, DbClientHooks } from '../../src/index.js' import test from '../shared/test.js' import { deepCopy, deepMerge, wait } from '../../src/utils/index.js' import { copy, emptyDir } from 'fs-extra/esm' -import { deepEqual, equal } from '../shared/assert.js' import type { SchemaOut } from '../../src/schema/index.js' const cleanProps = (props) => { @@ -88,7 +87,7 @@ await test('schema debug', async (t) => { let i = 1 while (i--) { - const contestants = await client.query('contestant').get().toObject() + const contestants = await client.query2('contestant').get() await client.setSchema( deepMerge(schema, { diff --git a/test/db-schema/schemaProblems.ts b/test/db-schema/schemaProblems.ts deleted file mode 100644 index 642ccc610c..0000000000 --- a/test/db-schema/schemaProblems.ts +++ /dev/null @@ -1,253 +0,0 @@ -import test from '../shared/test.js' -import { BasedDb } from '../../src/index.js' -import { clientWorker } from '../shared/startWorker.js' -import { equal } from '../shared/assert.js' - -await test('schema problems', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => { - return t.backup(db) - }) - - const int = setInterval(async () => { - await db.save() - }, 1e3) - - t.after(() => { - clearInterval(int) - }) - - const q: any[] = [] - q.push( - clientWorker(t, db, async (c) => { - await c.query('flap').get().inspect() - }), - ) - - q.push( - clientWorker(t, db, async (c) => { - // c.query('flap') - // .count() - // .subscribe( - // (d) => { - // console.log('subc', d) - // }, - // (err) => { - // console.log(err) - // }, - // ) - - c.query('seq') - .include('flap') - .subscribe( - (d) => { - // console.log('sub3', d) - }, - (err) => { - // console.log(err) - }, - ) - - c.query('flap').subscribe( - (d) => { - // console.log('sub2', d) - }, - (err) => { - // console.log(err) - }, - ) - await new Promise((resolve) => setTimeout(resolve, 20000)) - }), - ) - - q.push( - clientWorker(t, db, async (c) => { - c.query('flap') - .include('flap') - .subscribe( - (d) => { - // console.log('sub', d) - }, - (err) => { - // console.log(err) - }, - ) - await new Promise((resolve) => setTimeout(resolve, 1000)) - }), - ) - q.push( - clientWorker(t, db, async (c) => { - await c.setSchema({ - types: { - seq: { - flap: { - items: { - ref: 'flap', - prop: 'seq', - }, - }, - }, - flap: { - props: { - email: 'alias', - x: 'uint8', - seq: { - ref: 'seq', - prop: 'flap', - }, - }, - }, - }, - }) - await c.create('flap', { - x: 10, - email: 'boink@boik.com', - }) - await c.create('seq', {}) - await new Promise((resolve) => setTimeout(resolve, 300)) - await c.setSchema({ - types: { - seq: { - mrDerpol: 'string', - flap: { - items: { - ref: 'flap', - prop: 'seq', - }, - }, - }, - flap: { - props: { - mrDerpol: 'string', - email: 'alias', - x: 'uint32', - flap: 'int8', - seq: { - ref: 'seq', - prop: 'flap', - }, - }, - }, - }, - }) - // console.log('schema 1 changed') - await new Promise((resolve) => setTimeout(resolve, 300)) - await c.setSchema({ - types: { - seq: { - flap: { - items: { - ref: 'flap', - prop: 'seq', - }, - }, - }, - flap: { - props: { - email: 'alias', - x: 'uint32', - flap: 'int8', - y: 'boolean', - seq: { - ref: 'seq', - prop: 'flap', - }, - }, - }, - }, - }) - // console.log('schema 2 changed') - }), - clientWorker(t, db, async (c) => { - await c.schemaIsSet() - c.flushTime = 0 - await new Promise((resolve) => setTimeout(resolve, 600)) - for (let i = 0; i < 1e5; i++) { - await c.create('flap', { - x: i, - email: `boinkx${i}@boik.com`, - seq: 1, - }) - await c.drain() - } - await c.drain() - }), - clientWorker(t, db, async (c) => { - await c.schemaIsSet() - await new Promise((resolve) => setTimeout(resolve, 600)) - - for (let i = 0; i < 5e5; i++) { - await c.create('flap', { - x: i, - email: `boinkDoink${i}@boik.com`, - seq: 1, - }) - if (i % 500 === 0) { - await c.drain() - } - } - await c.drain() - }), - clientWorker(t, db, async (c) => { - await c.schemaIsSet() - await new Promise((resolve) => setTimeout(resolve, 600)) - - for (let i = 0; i < 5e5; i++) { - await c.create('flap', { - x: i, - email: `boink${i}@boik.com`, - seq: 1, - }) - if (i % 1500 === 0) { - await c.drain() - } - } - await c.drain() - }), - ) - - await Promise.all(q) - - equal( - (await db.query('flap').count().get().inspect().toObject()).count, - 1_100_001, - ) - equal((await db.query('seq').count().get().inspect().toObject()).count, 1) - - // to Object on nested refs does not work if combin count + sum - equal( - ( - await db - .query('seq') - .include((s) => s('flap').count()) - .get() - .inspect() - .toObject() - )[0].flap.count, - 1_100_000, - ) - - // setSchema (client) - // validates the schema - // setServerLocalSchema(SERVER) - // add lastSchemaId and makes checksum - // schemaTypesParsed() - // setLocalSchema (client) - // adds client.schema and emits 'schema' - // schemaTypesParsed() - // subscribeSchema (client hook) - // listen on incoming schema (over network) and calls setLocalSchema - // migrateSchema - // ? - // schemaIsReady() < remove this - // make .once(schema) awaitable - // never put an empty schema on the top - - // server on schema - // client on schema -}) diff --git a/test/db-schema/schemaProblemsModify.perf.ts b/test/db-schema/schemaProblemsModify.perf.ts deleted file mode 100644 index eff9f52ab4..0000000000 --- a/test/db-schema/schemaProblemsModify.perf.ts +++ /dev/null @@ -1,64 +0,0 @@ -import test from '../shared/test.js' -import { BasedDb } from '../../src/index.js' -import { clientWorker } from '../shared/startWorker.js' - -await test('schema problems modify', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - const int = setInterval(async () => { - await db.save() - }, 1e3) - - t.after(() => { - clearInterval(int) - return t.backup(db) - }) - - await db.start({ clean: true }) - - const types = {} - for (let i = 0; i < 200; i++) { - types[(~~(Math.random() * 1e6)).toString(16)] = { - blurf: 'string', - flap: 'uint32', - gurk: 'text', - snak: 'string', - gook: 'alias', - gorgor: 'timestamp', - } - } - - const bla = Object.keys(types) - - await db.setSchema({ - locales: { en: {} }, - types, - }) - - const q: any[] = [] - - q.push( - clientWorker( - t, - db, - async (c, { bla }) => { - await c.schemaIsSet() - c.flushTime = 0 - await new Promise((resolve) => setTimeout(resolve, 600)) - for (let i = 0; i < 1e5; i++) { - await c.create(bla[~~(Math.random() * bla.length)], { - flap: i, - gook: `boinkx${i}@boik.com`, - gorgor: 1, - }) - } - await c.drain() - }, - { bla }, - ).catch((err) => {}), - ) - - await Promise.all(q) -}) diff --git a/test/db-schema/schemaProblemsSave.perf.ts b/test/db-schema/schemaProblemsSave.perf.ts deleted file mode 100644 index b13dc3d22a..0000000000 --- a/test/db-schema/schemaProblemsSave.perf.ts +++ /dev/null @@ -1,105 +0,0 @@ -import test from '../shared/test.js' -import { BasedDb } from '../../src/index.js' -import { randomString, wait } from '../../src/utils/index.js' -import { type Schema } from '../../src/schema/index.js' - -await test('schema problems save', async (t) => { - let db: BasedDb | null = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - await db.save() - - const types: Schema['types'] = {} - - for (let i = 0; i < 50; i++) { - types[(~~(Math.random() * 1e6)).toString(16)] = { - blurf: 'string', - flap: 'uint32', - gurk: 'text', - snak: 'string', - gook: 'alias', - gorgor: 'timestamp', - } - } - - const keys = Object.keys(types) - const type = keys[3] - - types[type] = Object.assign(types[type], { - myRef: { - ref: 'seq', - prop: 'votes', - }, - }) - - types.seq = { - votes: { - items: { - ref: type, - prop: 'myRef', - }, - }, - } - - await db.setSchema({ - locales: { en: {} }, - types, - }) - - const seqId = await db.create('seq', {}) - - for (let i = 0; i < 1e5; i++) { - await db.create(type, { - blurf: '213123 ' + i, - }) - } - - const id = await db.create(type, { - blurf: '213123', - }) - - let update = 0 - const int2 = setInterval(async () => { - if (db) { - update++ - await db.schemaIsSet() - if (update % 5 === 0) { - await db.create(type, { - blurf: randomString(1000), - }) - } - await db.update(type, id, { - blurf: randomString(1000), - myRef: seqId, - }) - } - }, 10) - - const int = setInterval(async () => { - let d = db - db = null - await d?.save() - await d?.stop() - d = new BasedDb({ - path: t.tmp, - }) - await d.start() - db = d - }, 1e3) - - const q = [] - - await Promise.all(q) - - await wait(20e3) - clearInterval(int) - clearInterval(int2) - await wait(1e3) - - if (db) { - return db.destroy() - } -}) diff --git a/test/db-schema/schemaUpdates.ts b/test/db-schema/schemaUpdates.ts index 9984f289e4..5a23d8e444 100644 --- a/test/db-schema/schemaUpdates.ts +++ b/test/db-schema/schemaUpdates.ts @@ -1,34 +1,20 @@ import { setTimeout } from 'node:timers/promises' -import { DbClient } from '../../src/db-client/index.js' -import { DbServer } from '../../src/db-server/index.js' import { deepEqual } from '../shared/assert.js' import test from '../shared/test.js' -import { BasedDb, getDefaultHooks } from '../../src/index.js' import { wait } from '../../src/utils/index.js' +import { testDb, testDbClient, testDbServer } from '../shared/index.js' await test('client server schema updates', async (t) => { - const server = new DbServer({ - path: t.tmp, - }) - - await server.start({ clean: true }) - t.after(() => server.destroy()) - - const client1 = new DbClient({ - hooks: getDefaultHooks(server), - }) - - const client2 = new DbClient({ - hooks: getDefaultHooks(server), - }) - - await client1.setSchema({ + const server = await testDbServer(t, { noBackup: true }) + const schema = { types: { user: { name: 'string', }, }, - }) + } as const + const client1 = await testDbClient(server, schema) + const client2 = await testDbClient(server) await client1.create('user', { name: 'youzi', @@ -38,12 +24,12 @@ await test('client server schema updates', async (t) => { name: 'jamez', }) - deepEqual(await client1.query('user').get(), [ + deepEqual(await client1.query2('user').get(), [ { id: 1, name: 'youzi' }, { id: 2, name: 'jamez' }, ]) - await client1.setSchema({ + const client1Updated = await client1.setSchema({ types: { user: { age: 'number', @@ -51,16 +37,12 @@ await test('client server schema updates', async (t) => { }, }) - deepEqual(await client1.query('user').get(), [ + deepEqual(await client1Updated.query2('user').get(), [ { id: 1, age: 0 }, { id: 2, age: 0 }, ]) - const ageSorted = await client2 - .query('user') - .sort('age', 'asc') - .get() - .toObject() + const ageSorted = await client2.query2('user').sort('age', 'asc').get() await client1.setSchema({ types: { @@ -70,11 +52,7 @@ await test('client server schema updates', async (t) => { }, }) - const ageSorted2 = await client1 - .query('user') - .sort('age', 'asc') - .get() - .toObject() + const ageSorted2 = await client1.query2('user').sort('age', 'asc').get() deepEqual(ageSorted, ageSorted2) @@ -89,29 +67,16 @@ await test('client server schema updates', async (t) => { }, }) - const ageSorted3 = await client1 - .query('user') - .sort('age', 'asc') - .get() - .toObject() + const ageSorted3 = await client1.query2('user').sort('age', 'asc').get() deepEqual(ageSorted3, ageSorted2) }) await test('rapid schema updates', async (t) => { - const server = new DbServer({ - path: t.tmp, - }) - await server.start({ clean: true }) - t.after(() => server.destroy()) + const server = await testDbServer(t, { noBackup: true }) - const client1 = new DbClient({ - hooks: getDefaultHooks(server), - }) - - const client2 = new DbClient({ - hooks: getDefaultHooks(server), - }) + const client1 = await testDbClient(server) + const client2 = await testDbClient(server) await client1.setSchema({ types: { @@ -159,39 +124,28 @@ await test('rapid schema updates', async (t) => { await Promise.all(promises) - const res = await client1.query('user').get().toObject() + const res = await client1.query2('user').get() deepEqual( [ { id: 1, name: 'youzi', field0: '' }, { id: 2, name: 'jamez', field0: '' }, ], - res, + res as any, ) }) await test('rapid modifies during schema update', async (t) => { - const server = new DbServer({ - path: t.tmp, - }) - await server.start({ clean: true }) - t.after(() => server.destroy()) - - const client1 = new DbClient({ - hooks: getDefaultHooks(server), - }) - - const client2 = new DbClient({ - hooks: getDefaultHooks(server), - }) - // console.log('set schema 1') - await client1.setSchema({ + const server = await testDbServer(t, { noBackup: true }) + const schema = { types: { user: { name: 'string', }, }, - }) + } as const + const client1 = await testDbClient(server, schema) + const client2 = await testDbClient(server) const youzies = 500_000 @@ -217,28 +171,22 @@ await test('rapid modifies during schema update', async (t) => { while (b--) { const name = 'jamex' + b const id = await client2.create('user', { name }) - const res = await client2.query('user', id).get().toObject() + const res = (await client2.query2('user', id).get())! deepEqual(res.id, id) deepEqual(res.name, name) } - const all = await client2.query('user').range(0, 1000_000).get().toObject() + const all = (await client2.query2('user').range(0, 1000_000).get())! // await wait(1e3) // console.log(all.length, all.slice(0, 10), all.slice(-10)) - deepEqual(all[0], { id: 1, name: 'youzi499999', age: 0 }) - deepEqual(all.at(-1), { id: 501000, name: 'jamex0', age: 0 }) + deepEqual(all[0], { id: 1, name: 'youzi499999', age: 0 } as any) + deepEqual(all.at(-1), { id: 501000, name: 'jamex0', age: 0 } as any) deepEqual(all.length, youzies + jamesies) }) await test('tree after schema update', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { nurp: { props: { @@ -281,6 +229,4 @@ await test('tree after schema update', async (t) => { await db.create('user', { name: 'dr youz', }) - - await db.save() }) diff --git a/test/default.ts b/test/default.ts index fc4a9bffe1..eb7006e3c3 100644 --- a/test/default.ts +++ b/test/default.ts @@ -1,23 +1,7 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { deepEqual } from './shared/assert.js' import { convertToTimestamp } from '../src/utils/index.js' - -const derp = new Set([ - '$nice', - '$role', - '$count', - '$score', - '$flag', - '$amount', - '$big', - '$huge', - '$max', - '$label', - '$bin', - '$timestamp', - '$enum', -]) +import { testDb } from './shared/index.js' const defaultTimestamp = '2023-03-15T12:00:00.000Z' const defaultJson = { enabled: true, value: 10 } @@ -25,14 +9,8 @@ const defaultBinary = new Uint8Array([1, 2, 3]) const defaultText = { en: 'Default Label' } await test('edges', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - // Add all supported data types as edge properties (no date/text) - await db.setSchema({ + const client = await testDb(t, { types: { user: { props: { @@ -61,11 +39,11 @@ await test('edges', async (t) => { }, }) - const userId = await db.create('user', { - friends: [db.create('user')], + const userId = await client.create('user', { + friends: [client.create('user', {})], }) - deepEqual(await db.query('user', userId).include('friends.**').get(), { + deepEqual(await client.query2('user', userId).include('friends.**').get(), { id: 2, friends: [ { @@ -92,13 +70,13 @@ await test('edges', async (t) => { ], }) - await db.update('user', userId, { + await client.update('user', userId, { friends: { update: [{ id: 1, $role: 'derp' }], }, }) - deepEqual(await db.query('user', userId).include('friends.**').get(), { + deepEqual(await client.query2('user', userId).include('friends.**').get(), { id: 2, friends: [ { @@ -125,13 +103,13 @@ await test('edges', async (t) => { ], }) - await db.update('user', userId, { + await clint.update('user', userId, { friends: { update: [{ id: 1, $nice: false }], }, }) - deepEqual(await db.query('user', userId).include('friends.**').get(), { + deepEqual(await client.query2('user', userId).include('friends.**').get(), { id: 2, friends: [ { @@ -160,13 +138,7 @@ await test('edges', async (t) => { }) await test('separate', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, fi: {}, @@ -199,7 +171,7 @@ await test('separate', async (t) => { const userId = await db.create('user', {}) deepEqual( - await db.query('user', userId).include('*', '**').get(), + await db.query2('user', userId).include('*', '**').get(), { id: userId, flap: { @@ -220,7 +192,7 @@ await test('separate', async (t) => { }) deepEqual( - await db.query('user', userId2).include('*', '**').get(), + await db.query2('user', userId2).include('*', '**').get(), { id: userId2, flap: { @@ -236,13 +208,7 @@ await test('separate', async (t) => { }) await test('default values for all props in user type', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, }, @@ -266,6 +232,7 @@ await test('default values for all props in user type', async (t) => { default: defaultTimestamp, }, level: { + type: 'enum', enum: ['low', 'medium', 'high'], default: 'medium', }, @@ -294,6 +261,7 @@ await test('default values for all props in user type', async (t) => { // default: [], // something in there }, meta: { + type: 'object', props: { rating: { type: 'uint8', @@ -313,7 +281,7 @@ await test('default values for all props in user type', async (t) => { const userId = await db.create('user', {}) deepEqual( - await db.query('user', userId).include('*', '**').get(), + await db.query2('user', userId).include('*', '**').get(), { id: userId, isNice: true, @@ -343,7 +311,7 @@ await test('default values for all props in user type', async (t) => { }) deepEqual( - await db.query('user', userNullId).get(), + await db.query2('user', userNullId).get(), { id: 2, label: { en: 'Default Label' }, @@ -362,13 +330,7 @@ await test('default values for all props in user type', async (t) => { }) await test('negative default values for numeric types', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -394,7 +356,7 @@ await test('negative default values for numeric types', async (t) => { const userId = await db.create('user', {}) deepEqual( - await db.query('user', userId).get(), + await db.query2('user', userId).get(), { id: userId, negativeNumber: -42, @@ -411,7 +373,7 @@ await test('negative default values for numeric types', async (t) => { }) deepEqual( - await db.query('user', userOverrideId).get(), + await db.query2('user', userOverrideId).get(), { id: 2, negativeNumber: -100, @@ -423,13 +385,7 @@ await test('negative default values for numeric types', async (t) => { }) await test('object', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { snurp: { preferences: { @@ -448,7 +404,7 @@ await test('object', async (t) => { const snurpId = await db.create('snurp', {}) deepEqual( - await db.query('snurp', snurpId).get(), + await db.query2('snurp', snurpId).get(), { id: snurpId, preferences: { @@ -470,7 +426,7 @@ await test('object', async (t) => { }, }) - deepEqual(await db.query('snurp', snurpCustomId).get(), { + deepEqual(await db.query2('snurp', snurpCustomId).get(), { id: 2, preferences: { units: 'imperial', diff --git a/test/delete.perf.ts b/test/delete.perf.ts index 3e05b466ae..4474e5804e 100644 --- a/test/delete.perf.ts +++ b/test/delete.perf.ts @@ -8,7 +8,7 @@ await test('delete performance', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) await db.setSchema({ types: { @@ -55,7 +55,7 @@ await test('delete performance', async (t) => { }, 'delete 1M users') assert(t0 < 400, 'delete 1M users') - deepEqual((await db.query('user').get()).toObject(), []) + deepEqual(await db.query2('user').get(), []) const amountArticles = 1e6 const articles: any[] = [] @@ -76,7 +76,7 @@ await test('delete performance', async (t) => { }, 'delete 1M articles') assert(t2 < 400, 'delete 1M users') - deepEqual((await db.query('article').get()).toObject(), []) + deepEqual(await db.query2('article').get(), []) const articles2: any[] = [] @@ -111,5 +111,5 @@ await test('delete performance', async (t) => { } await db.drain() }, 'delete 1M articles - again') // < 400 - deepEqual((await db.query('article').get()).toObject(), []) + deepEqual(await db.query2('article').get(), []) }) diff --git a/test/delete.ts b/test/delete.ts index af9dcf583e..8552b7905f 100644 --- a/test/delete.ts +++ b/test/delete.ts @@ -1,16 +1,10 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { deepEqual, throws } from './shared/assert.js' -import assert from 'node:assert' +import { BasedDb, DbClient, getDefaultHooks } from '../src/sdk.js' +import { testDb } from './shared/index.js' await test('delete', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { nurp: { props: { @@ -38,11 +32,11 @@ await test('delete', async (t) => { await db.delete('user', simple) await db.drain() - deepEqual((await db.query('user').get()).toObject(), []) + deepEqual(await db.query2('user').get(), []) const nurp = db.create('nurp', {}) await db.drain() - deepEqual((await db.query('nurp').include('email').get()).toObject(), [ + deepEqual(await db.query2('nurp').include('email').get(), [ { email: '', id: 1, @@ -52,7 +46,7 @@ await test('delete', async (t) => { db.delete('nurp', nurp) await db.drain() - deepEqual((await db.query('user').include('email').get()).toObject(), []) + deepEqual(await db.query2('user').include('email').get(), []) const nurp2 = db.create('nurp', { email: 'flippie' }) await db.drain() @@ -62,7 +56,7 @@ await test('delete', async (t) => { }) await db.drain() - deepEqual((await db.query('nurp').include('email').get()).toObject(), [ + deepEqual(await db.query2('nurp').include('email').get(), [ { email: '', id: 2, @@ -71,13 +65,7 @@ await test('delete', async (t) => { }) await test('non existing 1', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { nurp: { props: { @@ -104,35 +92,31 @@ await test('non existing 1', async (t) => { db.delete('user', simple) await db.drain() - deepEqual((await db.query('user').get()).toObject(), []) + deepEqual(await db.query2('user').get(), []) - const nurp = db.create('nurp', {}) + db.create('nurp', {}) await db.drain() - deepEqual((await db.query('nurp').include('email').get()).toObject(), [ + deepEqual(await db.query2('nurp').include('email').get(), [ { email: '', id: 1, }, ]) + // TODO delete doesn't throw anymore, right? + // this can be handled in js - throws(() => db.delete('nurp', 213123123)) + //throws(() => db.delete('nurp', 213123123)) - throws(() => db.delete('user', simple)) + //throws(() => db.delete('user', simple)) // this has to be ignored in C - throws(() => db.delete('user', simple)) + //throws(() => db.delete('user', simple)) }) await test('non existing 2', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { nurp: { props: { @@ -157,37 +141,28 @@ await test('non existing 2', async (t) => { await db.delete('user', simple) - deepEqual((await db.query('user').get()).toObject(), []) + deepEqual(await db.query2('user').get(), []) db.create('nurp', {}) await db.drain() - deepEqual((await db.query('nurp').include('email').get()).toObject(), [ + deepEqual(await db.query2('nurp').include('email').get(), [ { email: '', id: 1, }, ]) - // this can be handled in js - throws(() => db.delete('nurp', 213123123)) - - throws(() => db.delete('user', simple)) - - // this has to be ignored in C - throws(() => db.delete('user', simple)) + // TODO delete doesn't throw anymore, right? + //throws(() => db.delete('nurp', 213123123)) + //throws(() => db.delete('user', simple)) + //throws(() => db.delete('user', simple)) + //throws(() => db.delete('user', 0)) }) await test('save', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const schema = { types: { user: { props: { @@ -197,35 +172,44 @@ await test('save', async (t) => { }, }, }, + } as const + + const db = new BasedDb({ + path: t.tmp, }) + await db.start({ clean: true }) + t.after(() => t.backup(db.server)) + const client = await db.setSchema(schema) - const first = db.create('user', { + const first = client.create('user', { name: 'mr snurp', age: 99, email: 'snurp@snurp.snurp', }) - db.create('user', { + client.create('user', { name: 'mr slurp', age: 99, email: 'slurp@snurp.snurp', }) - await db.drain() + await client.drain() await db.save() - db.delete('user', first) + client.delete('user', first) - await db.drain() + await client.drain() await db.save() const db2 = new BasedDb({ path: t.tmp, }) - await db2.start() - t.after(() => db2.destroy(), true) - deepEqual(await db2.query('user').include('id').get().toObject(), [{ id: 2 }]) - deepEqual(await db.query('user').include('id').get().toObject(), [{ id: 2 }]) + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), + }) + + deepEqual(await client2.query2('user').include('id').get(), [{ id: 2 }]) + deepEqual(await client.query2('user').include('id').get(), [{ id: 2 }]) }) diff --git a/test/dependent.ts b/test/dependent.ts index b980406367..01b4dc5a1e 100644 --- a/test/dependent.ts +++ b/test/dependent.ts @@ -1,16 +1,8 @@ -import { equal } from './shared/assert.js' -import { BasedDb } from '../src/index.js' +import { deepEqual, equal } from './shared/assert.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' await test('dependent', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - const schema = { types: { show: { @@ -51,37 +43,182 @@ await test('dependent', async (t) => { }, } as const - await db.setSchema(schema) + const client = await testDb(t, schema) const createShowTree = async () => { - const showId = await db.create('show', {}) - const editionId = await db.create('edition', { + const showId = await client.create('show', {}) + const editionId = await client.create('edition', { show: showId, }) - const sequenceId = await db.create('sequence', { + const sequenceId = await client.create('sequence', { edition: editionId, }) - const pageId = await db.create('page', { + const pageId = await client.create('page', { sequence: sequenceId, }) - await db.create('item', { + await client.create('item', { page: pageId, }) - await db.drain() + await client.drain() for (const type in schema.types) { - const len = (await db.query(type).get()).length + const len = (await client.query2(type).get()).length equal(len, 1) } return showId } const showId = await createShowTree() - await db.delete('show', showId) - await db.drain() + await client.delete('show', showId) + await client.drain() for (const type in schema.types) { - equal((await db.query(type).get()).length, 0) + equal((await client.query2(type).get()).length, 0) } await createShowTree() }) + +await test('del children', async (t) => { + const client = await testDb(t, { + types: { + parent: { + children: { + type: 'references', + items: { ref: 'child', prop: 'parent' }, + }, + }, + child: { + parent: { + type: 'reference', + ref: 'parent', + prop: 'children', + dependent: true, + }, + }, + }, + }) + + for (let n = 1; n <= 5; n++) { + const head = client.create('parent', {}) + const children: ReturnType[] = [] + + for (let i = 0; i < n; i++) { + children.push(client.create('child', { parent: head })) + } + deepEqual(await client.query2('parent', head).include('**').get(), { + id: await head, + children: (await Promise.all(children)).map((id: number) => ({ id })), + }) + + for (const child of children) { + client.delete('child', child) + } + await client.drain() + deepEqual(await client.query2('parent', head).include('**').get(), { + id: await head, + children: [], + }) + + for (let i = 0; i < n; i++) { + children.push(client.create('child', { parent: head })) + } + await client.delete('parent', head) + deepEqual(await client.query2('parent').get(), []) + deepEqual(await client.query2('child').get(), []) + } +}) + +await test('circle of friends', async (t) => { + const client = await testDb(t, { + types: { + human: { + name: { type: 'string', maxBytes: 8 }, + friends: { + type: 'references', + items: { ref: 'human', prop: 'friends', dependent: true }, + }, + }, + }, + }) + + const h1 = client.create('human', { name: 'joe' }) + const h2 = client.create('human', { name: 'john' }) + const h3 = client.create('human', { name: 'jack' }) + + client.update('human', h2, { + friends: [h1, h3], + }) + client.update('human', h3, { + friends: [h2, h1], + }) + //client.update('human', h3, { + // friends: { add: [h2, h1] }, + //}) + + deepEqual(await client.query2('human').include('**').get(), [ + { + id: 1, + friends: [ + { + id: 2, + name: 'john', + }, + { + id: 3, + name: 'jack', + }, + ], + }, + { + id: 2, + friends: [ + { + id: 1, + name: 'joe', + }, + { + id: 3, + name: 'jack', + }, + ], + }, + { + id: 3, + friends: [ + { + id: 2, + name: 'john', + }, + { + id: 1, + name: 'joe', + }, + ], + }, + ]) + + client.delete('human', 1) + deepEqual(await client.query2('human').include('**').get(), [ + { + id: 2, + friends: [ + { + id: 3, + name: 'jack', + }, + ], + }, + { + id: 3, + friends: [ + { + id: 2, + name: 'john', + }, + ], + }, + ]) + + client.delete('human', 2) + deepEqual(await client.query2('human').include('**').get(), []) +}) diff --git a/test/edges/edgeFilterNested.ts b/test/edges/edgeFilterNested.ts index a1d2fb172a..a0b2ddcaf5 100644 --- a/test/edges/edgeFilterNested.ts +++ b/test/edges/edgeFilterNested.ts @@ -1,15 +1,9 @@ import { deepEqual } from '../shared/assert.js' -import { BasedDb } from '../../src/index.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' await test('edge enum', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { initiative: { name: 'string', @@ -27,7 +21,7 @@ await test('edge enum', async (t) => { items: { ref: 'initiative', prop: 'users', - // $role: ['a', 'b'], + $role: ['a', 'b'], }, }, }, @@ -47,10 +41,10 @@ await test('edge enum', async (t) => { deepEqual( await db - .query('user') - .include('name', (q) => { - q('initiatives').filter('$role', '=', 'a').include('name') - }) + .query2('user') + .include('name', (q) => + q('initiatives').filter('$role', '=', 'a').include('name'), + ) .get(), [ { @@ -64,12 +58,14 @@ await test('edge enum', async (t) => { deepEqual( await db - .query('user') - .include('name', (q) => { + .query2('user') + .include('name', (q) => q('initiatives') .filter('$role', '=', 'a') - .include((q) => q('users').include('$role').filter('$role', '=', 'b')) - }) + .include((q) => + q('users').include('$role').filter('$role', '=', 'b'), + ), + ) .get(), [ { diff --git a/test/edges/edgeNumbers.ts b/test/edges/edgeNumbers.ts index c788887c78..6ef0153836 100644 --- a/test/edges/edgeNumbers.ts +++ b/test/edges/edgeNumbers.ts @@ -1,15 +1,9 @@ import { deepEqual } from '../shared/assert.js' -import { BasedDb } from '../../src/index.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' await test('number', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -44,7 +38,7 @@ await test('number', async (t) => { }, }) - deepEqual(await db.query('user', user2).include('**').get(), { + deepEqual(await db.query2('user', user2).include('**').get(), { id: 2, bestFriend: { id: 1, diff --git a/test/edges/edgeText.ts b/test/edges/edgeText.ts index aeca3a324a..ec74c0f947 100644 --- a/test/edges/edgeText.ts +++ b/test/edges/edgeText.ts @@ -1,19 +1,13 @@ -import { BasedDb } from '../../src/index.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' import { deepEqual } from '../shared/assert.js' // Text is currently not supported in edge props: FDN-1713 FDN-730 await test.skip('text in an edge prop', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, - it: { fallback: 'en' }, + it: { fallback: ['en'] }, }, types: { user: { @@ -42,7 +36,7 @@ await test.skip('text in an edge prop', async (t) => { $x: { en: 'hello' }, }, }) - deepEqual(await db.query('user', user2).include('**').get(), { + deepEqual(await db.query2('user', user2).include('**').get(), { id: user2, bestFriend: { id: user1, @@ -58,12 +52,12 @@ await test.skip('text in an edge prop', async (t) => { { id: user2, $x: { en: 'hello' } }, ], }) - deepEqual(await db.query('user', user1).include('**').get(), { + deepEqual(await db.query2('user', user1).include('**').get(), { id: user1, bestFriend: null, friends: [{ id: user3, $x: { en: 'hello' } }], }) - deepEqual(await db.query('user', user3).include('**').get(), { + deepEqual(await db.query2('user', user3).include('**').get(), { id: user3, bestFriend: { id: user2, @@ -78,7 +72,7 @@ await test.skip('text in an edge prop', async (t) => { await db.update('user', user3, { friends: { update: [{ id: user2, $index: 0 }] }, }) - deepEqual(await db.query('user', user3).include('**').get(), { + deepEqual(await db.query2('user', user3).include('**').get(), { id: user3, bestFriend: { id: user2, $x: 0 }, friends: [ diff --git a/test/edges/edgeType.ts b/test/edges/edgeType.ts index 54e1cfc6e9..d558387f4f 100644 --- a/test/edges/edgeType.ts +++ b/test/edges/edgeType.ts @@ -1,15 +1,14 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' import { deepEqual, equal } from '../shared/assert.js' +import { + countDirtyBlocks, + testDbServer, + testDbClient, +} from '../shared/index.js' await test('single reference', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await db.setSchema({ + const server = await testDbServer(t) + const db = await testDbClient(server, { types: { user: { props: { @@ -49,17 +48,13 @@ await test('single reference', async (t) => { $note: 'funny', }, }) + + deepEqual(await countDirtyBlocks(server), 3) }) await test('json type edge', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const server = await testDbServer(t, { noBackup: true }) + const db = await testDbClient(server, { types: { workspace: { name: 'string', @@ -93,7 +88,7 @@ await test('json type edge', async (t) => { ], }) const retrieved = await db - .query('serviceAccount', serviceAccountId) + .query2('serviceAccount', serviceAccountId) .include( 'id', 'workspaces.id', @@ -101,7 +96,6 @@ await test('json type edge', async (t) => { 'workspaces.$permissionsJson', ) .get() - .toObject() equal(retrieved?.workspaces.length, 1, 'Expected to have length 1') deepEqual( diff --git a/test/edges/edges.ts b/test/edges/edges.ts index 523655b6ac..1dbb0680e2 100644 --- a/test/edges/edges.ts +++ b/test/edges/edges.ts @@ -1,16 +1,10 @@ -import { BasedDb } from '../../src/index.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' import { deepEqual } from '../shared/assert.js' import { italy, sentence } from '../shared/examples.js' await test('multiple references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -107,23 +101,21 @@ await test('multiple references', async (t) => { // console.log( // 'derp', // await db - // .query('article') + // .query2('article') // .include('contributors.$role') // // .include('contributors.$role', 'contributors.$bigString') // .get(), // ) deepEqual( - ( - await db - .query('article') - .include('contributors.$role', 'contributors.$bigString') - .get() - ).toObject(), + await db + .query2('article') + .include('contributors.$role', 'contributors.$bigString') + .get(), [ { id: artStrudel, - contributors: [{ id: 1, $role: 'writer' }], + contributors: [{ id: 1, $role: 'writer', $bigString: '' }], }, { id: artItaly, @@ -132,52 +124,41 @@ await test('multiple references', async (t) => { ], ) - deepEqual( - ( - await db.query('article').include('contributors.$rating').get() - ).toObject(), - [ - { - id: artStrudel, - contributors: [{ id: 1, $rating: 5 }], - }, - { - id: artItaly, - contributors: [{ id: 1, $rating: 0 }], - }, - ], - ) + deepEqual(await db.query2('article').include('contributors.$rating').get(), [ + { + id: artStrudel, + contributors: [{ id: 1, $rating: 5 }], + }, + { + id: artItaly, + contributors: [{ id: 1, $rating: 0 }], + }, + ]) - deepEqual( - (await db.query('article').include('contributors.$lang').get()).toObject(), - [ - { - id: 1, - contributors: [{ id: 1, $lang: 'en' }], - }, - { - id: 2, - contributors: [{ id: 1 }], - }, - ], - ) + deepEqual(await db.query2('article').include('contributors.$lang').get(), [ + { + id: 1, + contributors: [{ id: 1, $lang: 'en' }], + }, + { + id: 2, + contributors: [{ id: 1, $lang: '' }], + }, + ]) - deepEqual( - (await db.query('article').include('contributors.$on').get()).toObject(), - [ - { - id: 1, - contributors: [{ id: 1, $on: true }], - }, - { - id: 2, - contributors: [{ id: 1, $on: false }], - }, - ], - ) + deepEqual(await db.query2('article').include('contributors.$on').get(), [ + { + id: 1, + contributors: [{ id: 1, $on: true }], + }, + { + id: 2, + contributors: [{ id: 1, $on: false }], + }, + ]) deepEqual( - (await db.query('article').include('contributors.$file').get()).toObject(), + await db.query2('article').include('contributors.$file').get(), [ { id: 1, @@ -185,7 +166,7 @@ await test('multiple references', async (t) => { }, { id: 2, - contributors: [{ id: 1 }], + contributors: [{ id: 1, $file: new Uint8Array() }], }, ], 'Buffer edge value', @@ -204,14 +185,12 @@ await test('multiple references', async (t) => { } deepEqual( - ( - await db - .query('article') - .include((s) => - s('contributors').filter('$role', '=', 'writer').include('$role'), - ) - .get() - ).toObject(), + await db + .query2('article') + .include((s) => + s('contributors').filter('$role', '=', 'writer').include('$role'), + ) + .get(), [ { id: 1, @@ -230,16 +209,14 @@ await test('multiple references', async (t) => { ) deepEqual( - ( - await db - .query('article') - .include((s) => - s('contributors') - .filter('$bigString', '=', italy) - .include('$bigString'), - ) - .get() - ).toObject(), + await db + .query2('article') + .include((s) => + s('contributors') + .filter('$bigString', '=', italy) + .include('$bigString'), + ) + .get(), [ { id: 1, @@ -265,18 +242,21 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article', lastArticle) + .query2('article', lastArticle) .include('contributors.$rating') - .get() - .toObject(), + .get(), { id: 5, - contributors: [{ id: 2, $rating: 2 }, { id: 3 }, { id: 1 }], + contributors: [ + { id: 2, $rating: 2 }, + { id: 3, $rating: 0 }, + { id: 1, $rating: 0 }, + ], }, ) deepEqual( - await db.query('article', 3).include('contributors.$countries.id').get(), + await db.query2('article', 3).include('contributors.$countries.id').get(), { id: 3, contributors: [ @@ -295,7 +275,7 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('contributors') .range(lastArticle - 3, 1000) .get(), @@ -308,13 +288,7 @@ await test('multiple references', async (t) => { }) await test('single reference', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -353,19 +327,16 @@ await test('single reference', async (t) => { author: { id: mrDrol, $role: 'boss' }, }) - deepEqual( - (await db.query('article').include('author.$role', '*').get()).toObject(), - [ - { + deepEqual(await db.query2('article').include('author.$role', '*').get(), [ + { + id: 1, + name: 'This is a nice article', + author: { id: 1, - name: 'This is a nice article', - author: { - id: 1, - $role: 'boss', - }, + $role: 'boss', }, - ], - ) + }, + ]) await db.create('article', { name: 'This is a nice article with mr drol as writer', @@ -373,13 +344,11 @@ await test('single reference', async (t) => { }) deepEqual( - ( - await db - .query('article') - .include('author.$role', '*') - .filter('author.$role', '=', 'boss') - .get() - ).toObject(), + await db + .query2('article') + .include('author.$role', '*') + .filter('author.$role', '=', 'boss') + .get(), [ { id: 1, @@ -397,35 +366,36 @@ await test('single reference', async (t) => { author: { id: mrDrol, $msg: sentence }, }) - deepEqual( - (await db.query('article').include('author.$msg', '*').get()).toObject(), - [ - { id: 1, name: 'This is a nice article', author: { id: 1 } }, - { - id: 2, - name: 'This is a nice article with mr drol as writer', - author: { id: 1 }, + deepEqual(await db.query2('article').include('author.$msg', '*').get(), [ + { + id: 1, + name: 'This is a nice article', + author: { + id: 1, + $msg: '', }, - { - id: 3, - name: 'Power article', - author: { - id: 1, - $msg: sentence, - }, + }, + { + id: 2, + name: 'This is a nice article with mr drol as writer', + author: { + id: 1, + $msg: '', }, - ], - ) + }, + { + id: 3, + name: 'Power article', + author: { + id: 1, + $msg: sentence, + }, + }, + ]) }) await test('preserve fields', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -453,7 +423,7 @@ await test('preserve fields', async (t) => { $x: 42, }, }) - deepEqual(await db.query('user', user2).include('**').get(), { + deepEqual(await db.query2('user', user2).include('**').get(), { id: user2, bestFriend: { id: user1, @@ -469,12 +439,12 @@ await test('preserve fields', async (t) => { { id: user2, $x: 20 }, ], }) - deepEqual(await db.query('user', user1).include('**').get(), { + deepEqual(await db.query2('user', user1).include('**').get(), { id: user1, bestFriend: null, friends: [{ id: user3, $x: 10 }], }) - deepEqual(await db.query('user', user3).include('**').get(), { + deepEqual(await db.query2('user', user3).include('**').get(), { id: user3, bestFriend: { id: user2, @@ -489,7 +459,8 @@ await test('preserve fields', async (t) => { await db.update('user', user3, { friends: { update: [{ id: user2, $index: 0 }] }, }) - deepEqual(await db.query('user', user3).include('**').get(), { + + deepEqual(await db.query2('user', user3).include('**').get(), { id: user3, bestFriend: { id: user2, $x: 0 }, friends: [ diff --git a/test/edges/edgesMain.ts b/test/edges/edgesMain.ts index d5e5dc480b..db143c993c 100644 --- a/test/edges/edgesMain.ts +++ b/test/edges/edgesMain.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' import { deepEqual } from '../shared/assert.js' await test('multiple', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -62,7 +56,7 @@ await test('multiple', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('contributors.$rdy') .include('contributors.$rating') .include('contributors.$derp') @@ -102,7 +96,7 @@ await test('multiple', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('name') .include('contributors.$rdy') .include('contributors.$rating') @@ -142,7 +136,7 @@ await test('multiple', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('name') .include('contributors.$rdy') .include('contributors.$rating') @@ -160,7 +154,14 @@ await test('multiple', async (t) => { { id: 2, name: 'Typical Thursday', - contributors: [{ id: 1, $rating: 1, $rdy: false }], + contributors: [ + { + id: 1, + $rating: 1, + $rdy: false, + $derp: '', + }, + ], }, ], ) @@ -178,7 +179,7 @@ await test('multiple', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('name') .include('contributors.$rdy') .include('contributors.$rating') @@ -196,20 +197,21 @@ await test('multiple', async (t) => { { id: 2, name: 'Typical Thursday', - contributors: [{ id: 1, $rating: 1, $rdy: false }], + contributors: [ + { + id: 1, + $rating: 1, + $rdy: false, + $derp: '', + }, + ], }, ], ) }) await test('single', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -252,7 +254,7 @@ await test('single', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('contributor.$rdy') .include('contributor.$rating') .include('contributor.$derp') @@ -270,28 +272,19 @@ await test('single', async (t) => { ], ) - deepEqual( - await db.query('article').include('contributor.$rdy').get().toObject(), - [ - { + deepEqual(await db.query2('article').include('contributor.$rdy').get(), [ + { + id: 1, + contributor: { id: 1, - contributor: { - id: 1, - $rdy: true, - }, + $rdy: true, }, - ], - ) + }, + ]) }) await test('multi references update', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -342,10 +335,10 @@ await test('multi references update', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('contributors.$age') .get() - .then((v) => v.toObject()), + .then((v) => v), [{ id: 1, contributors: [{ id: 1, $age: 66 }] }], 'age 66', ) @@ -364,33 +357,27 @@ await test('multi references update', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('contributors.$age') .get() - .then((v) => v.toObject()), + .then((v) => v), [{ id: 1, contributors: [{ id: 1, $age: 2 }] }], 'age 2', ) deepEqual( await db - .query('article') + .query2('article') .include('contributors.$plonki') .get() - .then((v) => v.toObject()), + .then((v) => v), [{ id: 1, contributors: [{ id: 1, $plonki: 100 }] }], 'plonki 100', ) }) await test('single ref update', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: {}, article: { @@ -419,7 +406,7 @@ await test('single ref update', async (t) => { }) deepEqual( - await db.query('article').include('writer.$age').get(), + await db.query2('article').include('writer.$age').get(), [{ id: 1, writer: { id: 1, $age: 66 } }], 'age 66', ) @@ -433,13 +420,13 @@ await test('single ref update', async (t) => { }) deepEqual( - await db.query('article').include('writer.$age').get(), + await db.query2('article').include('writer.$age').get(), [{ id: 1, writer: { id: 1, $age: 202 } }], 'age 202', ) deepEqual( - await db.query('article').include('writer.$plonki').get(), + await db.query2('article').include('writer.$plonki').get(), [{ id: 1, writer: { id: 1, $plonki: 100 } }], 'plonki 100', ) diff --git a/test/edges/edgesReference.ts b/test/edges/edgesReference.ts index 30841ef769..f85c2d3407 100644 --- a/test/edges/edgesReference.ts +++ b/test/edges/edgesReference.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' import { deepEqual } from '../shared/assert.js' await test('multi reference', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -61,26 +55,16 @@ await test('multi reference', async (t) => { }, }) - deepEqual( - await db.query('article').include('contributor.$friend').get().toObject(), - [ - { - id: 1, - contributor: { id: 1, $friend: { id: 2, name: 'Mr Yur' } }, - }, - ], - ) + deepEqual(await db.query2('article').include('contributor.$friend').get(), [ + { + id: 1, + contributor: { id: 1, $friend: { id: 2, name: 'Mr Yur' } }, + }, + ]) }) await test('multiple references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { country: { props: { @@ -162,10 +146,10 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('name', 'contributor.$countries') .get() - .then((v) => v.toObject()), + .then((v) => v), [ { id: 1, @@ -205,11 +189,7 @@ await test('multiple references', async (t) => { } await db.drain() const articles = ( - await db - .query('article') - .include('name', 'contributor.$countries') - .get() - .toObject() + await db.query2('article').include('name', 'contributor.$countries').get() ).slice(-10) for (const article of articles) { diff --git a/test/edges/edgesReferences.ts b/test/edges/edgesReferences.ts index c7e5ccf2fe..5e4862e4fb 100644 --- a/test/edges/edgesReferences.ts +++ b/test/edges/edgesReferences.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' import { deepEqual } from '../shared/assert.js' await test('multi reference', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -102,17 +96,17 @@ await test('multi reference', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('contributors.$age') .get() - .then((v) => v.toObject()), + .then((v) => v), [{ id: 1, contributors: [{ id: 1, $age: 66 }] }], 'age 66', ) deepEqual( await db - .query('article') + .query2('article') .include('contributors.$friend.name', 'contributors.$friend.location') .get(), [ @@ -134,7 +128,7 @@ await test('multi reference', async (t) => { ) deepEqual( - await db.query('article').include('contributors.$friend').get(), + await db.query2('article').include('contributors.$friend').get(), [ { id: 1, @@ -157,13 +151,7 @@ await test('multi reference', async (t) => { }) await test('multiple references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { country: { props: { @@ -251,10 +239,10 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('contributors.id') .get() - .then((v) => v.toObject()), + .then((v) => v), [ { id: 1, contributors: [{ id: mrDerp }] }, { id: 2, contributors: [{ id: mrFlap }] }, @@ -263,10 +251,10 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('contributors.id', 'contributors.$countries.id') .get() - .then((v) => v.toObject()), + .then((v) => v), [ { id: 1, @@ -281,10 +269,10 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('contributors.id', 'contributors.$countries.code') .get() - .then((v) => v.toObject()), + .then((v) => v), [ { id: 1, @@ -315,10 +303,10 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include('contributors.id', 'contributors.$countries') .get() - .then((v) => v.toObject()), + .then((v) => v), [ { id: 1, @@ -349,12 +337,12 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include((t) => { t('contributors').include('$countries').include('name').sort('name') }) .get() - .then((v) => v.toObject()), + .then((v) => v), [ { id: 1, @@ -387,12 +375,12 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include((t) => { t('contributors').include('name').filter('nationality', '=', nl) }) .get() - .then((v) => v.toObject()), + .then((v) => v), [ { id: 1, @@ -412,7 +400,7 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include((t) => { t('contributors') .include('name') @@ -421,7 +409,7 @@ await test('multiple references', async (t) => { .filter('nationality', '=', nl) }) .get() - .then((v) => v.toObject()), + .then((v) => v), [ { id: 1, @@ -445,7 +433,7 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include((s) => { s('contributors') .include('name') @@ -456,7 +444,7 @@ await test('multiple references', async (t) => { .filter('nationality', '=', nl) }) .get() - .toObject(), + , [ { id: 1, @@ -477,7 +465,7 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include((s) => { s('contributors') .include('name') @@ -488,7 +476,7 @@ await test('multiple references', async (t) => { .filter('nationality', '=', nl) }) .get() - .then((v) => v.toObject()), + .then((v) => v), [ { id: 1, @@ -506,7 +494,7 @@ await test('multiple references', async (t) => { deepEqual( await db - .query('article') + .query2('article') .include((s) => { s('contributors') .include('name') @@ -517,7 +505,7 @@ await test('multiple references', async (t) => { .filter('nationality', '=', nl) }) .get() - .then((v) => v.toObject()), + .then((v) => v), [ { id: 1, @@ -538,15 +526,7 @@ await test('multiple references', async (t) => { }) await test('simple references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { round: { name: 'alias', @@ -595,19 +575,13 @@ await test('simple references', async (t) => { }, }) - deepEqual(await db.query('phase').include('scenarios').get(), [ + deepEqual(await db.query2('phase').include('scenarios').get(), [ { id: 1, scenarios: [{ id: scenarioId1, name: 'scenario' }] }, ]) }) await test('many to many', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - t.after(() => t.backup(db)) - - await db.start() + const db = await testDb(t, {}) await db.setSchema({ types: { @@ -634,9 +608,9 @@ await test('many to many', async (t) => { scenarios: { add: [{ id: phaseId2, $name: 'a' }] }, }) - // await db.query('phase').include('scenarios').get(). + // await db.query2('phase').include('scenarios').get(). - deepEqual(await db.query('phase').include('scenarios').get(), [ + deepEqual(await db.query2('phase').include('scenarios').get(), [ { id: 1, scenarios: [{ id: 2, name: 'phase' }] }, { id: 2, scenarios: [{ id: 1, name: 'phase' }] }, { id: 3, scenarios: [] }, @@ -648,7 +622,7 @@ await test('many to many', async (t) => { await db.drain() - deepEqual(await db.query('phase').include('scenarios').get(), [ + deepEqual(await db.query2('phase').include('scenarios').get(), [ { id: 1, scenarios: [ diff --git a/test/empty.ts b/test/empty.ts index 1e1575dfa3..b08c18bf0e 100644 --- a/test/empty.ts +++ b/test/empty.ts @@ -6,7 +6,7 @@ await test('empty db and no schema', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) await db.save() }) @@ -15,7 +15,7 @@ await test('empty db and no nodes', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) await db.setSchema({ types: { @@ -34,7 +34,7 @@ await test('empty db and deleted nodes', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) await db.setSchema({ types: { diff --git a/test/enum.ts b/test/enum.ts index 5e981e5f16..d5cb1d47f2 100644 --- a/test/enum.ts +++ b/test/enum.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { deepEqual, throws } from './shared/assert.js' +import { testDb } from './shared/index.js' await test('enum', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const client = await testDb(t, { types: { user: { props: { @@ -23,21 +17,21 @@ await test('enum', async (t) => { }, }) - const user1 = await db.create('user', { + const user1 = await client.create('user', { fancyness: 'mid', }) - const user2 = db.create('user', { + const user2 = client.create('user', { fancyness: 'fire', }) - db.create('user', { + client.create('user', { fancyness: 'beta', }) - db.create('user', {}) + client.create('user', {}) - deepEqual((await db.query('user').include('fancyness').get()).toObject(), [ + deepEqual(await client.query2('user').include('fancyness').get(), [ { id: 1, fancyness: 'mid' }, { id: 2, fancyness: 'fire' }, { id: 3, fancyness: 'beta' }, @@ -45,46 +39,44 @@ await test('enum', async (t) => { ]) deepEqual( - ( - await db - .query('user') - .include('fancyness') - .filter('fancyness', '=', 'fire') - .get() - ).toObject(), + await client + .query2('user') + .include('fancyness') + .filter('fancyness', '=', 'fire') + .get(), [ { id: 2, fancyness: 'fire' }, { id: 4, fancyness: 'fire' }, ], ) - db.update('user', user1, { + client.update('user', user1, { fancyness: 'beta', }) - deepEqual((await db.query('user').include('fancyness').get()).toObject(), [ + deepEqual(await client.query2('user').include('fancyness').get(), [ { id: 1, fancyness: 'beta' }, { id: 2, fancyness: 'fire' }, { id: 3, fancyness: 'beta' }, { id: 4, fancyness: 'fire' }, ]) - await db.update('user', user1, { + await client.update('user', user1, { fancyness: null, }) throws(() => - db.update('user', user2, { + client.update('user', user2, { fancyness: 3, }), ) throws(() => - db.update('user', user2, { + client.update('user', user2, { fancyness: 'fond', }), ) - deepEqual((await db.query('user').include('fancyness').get()).toObject(), [ + deepEqual(await client.query2('user').include('fancyness').get(), [ { id: 1, fancyness: 'fire' }, { id: 2, fancyness: 'fire' }, { id: 3, fancyness: 'beta' }, @@ -93,13 +85,7 @@ await test('enum', async (t) => { }) await test('emoji enum', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const client = await testDb(t, { types: { review: { props: { @@ -113,10 +99,10 @@ await test('emoji enum', async (t) => { }, }) - db.create('review', {}) - db.create('review', { score: '🙂' }) + client.create('review', {}) + client.create('review', { score: '🙂' }) - deepEqual(await db.query('review').include('score').get(), [ + deepEqual(await client.query2('review').include('score').get(), [ { id: 1, score: '😐', @@ -127,10 +113,10 @@ await test('emoji enum', async (t) => { }, ]) - db.create('review', { score: '☹️' }) - db.create('review', { score: '😐' }) + client.create('review', { score: '☹️' }) + client.create('review', { score: '😐' }) deepEqual( - await db.query('review').include('score').sort('score', 'desc').get(), + await client.query2('review').include('score').sort('score', 'desc').get(), [ { id: 2, score: '🙂' }, { id: 1, score: '😐' }, diff --git a/test/errors.ts b/test/errors.ts index aea96c9905..3ace96ff7b 100644 --- a/test/errors.ts +++ b/test/errors.ts @@ -1,16 +1,9 @@ -import { BasedDb } from '../src/index.js' import { equal, throws } from './shared/assert.js' +import { testDb } from './shared/index.js' import test from './shared/test.js' await test('handle errors - references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { friends: { @@ -27,7 +20,7 @@ await test('handle errors - references', async (t) => { friends: [2], }) - equal(await db.query('user').include('friends').get(), [ + equal(await db.query2('user').include('friends').get(), [ { id: 1, friends: [], @@ -36,14 +29,7 @@ await test('handle errors - references', async (t) => { }) await test('handle errors - single ref', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { friend: { @@ -58,7 +44,7 @@ await test('handle errors - single ref', async (t) => { friend: 2, }) - equal(await db.query('user').include('friend').get(), [ + equal(await db.query2('user').include('friend').get(), [ { id: 1, friend: null, @@ -67,14 +53,7 @@ await test('handle errors - single ref', async (t) => { }) await test('handle errors - non existent id', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { name: 'string', @@ -88,7 +67,7 @@ await test('handle errors - non existent id', async (t) => { await db.drain() - equal(await db.query('user').get(), [ + equal(await db.query2('user').get(), [ { id: 1, name: 'bob' }, { id: 2, name: 'bert' }, ]) diff --git a/test/exists.ts b/test/exists.ts index 97244b8557..4ca2e1fe91 100644 --- a/test/exists.ts +++ b/test/exists.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { deepEqual } from './shared/assert.js' +import { testDb } from './shared/index.js' await test('exists', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -35,21 +29,21 @@ await test('exists', async (t) => { const id2 = await db.create('user', {}) - deepEqual(await db.query('user').filter('name', 'exists').get(), [ + deepEqual(await db.query2('user').filter('name', 'exists').get(), [ { id: 1, name: 'mr derp', }, ]) - deepEqual(await db.query('user').filter('name', '!exists').get(), [ + deepEqual(await db.query2('user').filter('name', '!exists').get(), [ { id: 2, name: '', }, ]) - deepEqual(await db.query('user').filter('friend', '!exists').get(), [ + deepEqual(await db.query2('user').filter('friend', '!exists').get(), [ { id: 1, name: 'mr derp', @@ -60,7 +54,7 @@ await test('exists', async (t) => { }, ]) - deepEqual(await db.query('user').filter('friends', '!exists').get(), [ + deepEqual(await db.query2('user').filter('friends', '!exists').get(), [ { id: 1, name: 'mr derp', @@ -73,7 +67,7 @@ await test('exists', async (t) => { await db.update('user', id1, { friends: [id2] }) - deepEqual(await db.query('user').filter('friends', 'exists').get(), [ + deepEqual(await db.query2('user').filter('friends', 'exists').get(), [ { id: 1, name: 'mr derp', @@ -86,7 +80,7 @@ await test('exists', async (t) => { await db.update('user', id1, { friends: null }) - deepEqual(await db.query('user').filter('friends', 'exists').get(), []) + deepEqual(await db.query2('user').filter('friends', 'exists').get(), []) const friends: any[] = [] for (let i = 0; i < 1e6; i++) { @@ -97,19 +91,13 @@ await test('exists', async (t) => { await db.update('user', id1, { friends }) - deepEqual(await db.query('user').filter('friends', '!exists').get(), [ + deepEqual(await db.query2('user').filter('friends', '!exists').get(), [ { id: 2, name: '' }, ]) }) await test('with other filters', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -131,7 +119,7 @@ await test('with other filters', async (t) => { name: 'dude', start: Date.now() + 10000, }) - const id2 = await db.create('user', { + await db.create('user', { name: 'cool guy has friends', friends: [id1], }) @@ -142,7 +130,7 @@ await test('with other filters', async (t) => { deepEqual( await db - .query('user') + .query2('user') .include('name') .filter('start', '>', 'now') .filter('derp', 'exists') @@ -153,7 +141,7 @@ await test('with other filters', async (t) => { deepEqual( await db - .query('user') + .query2('user') .include('name') .filter('name', '!exists') .filter('start', '>', 'now') @@ -163,8 +151,8 @@ await test('with other filters', async (t) => { ) deepEqual( - await db.query('user').include('name').filter('friends', '!exists').get(), - [{ id: 3, name: 'sad guy has no friends' }], + await db.query2('user').include('name').filter('friends', '!exists').get(), + [{ id: id3, name: 'sad guy has no friends' }], '!exists refs', ) }) diff --git a/test/expire.ts b/test/expire.ts index 10ed4a8ea5..da34d931a4 100644 --- a/test/expire.ts +++ b/test/expire.ts @@ -1,4 +1,4 @@ -import { BasedDb } from '../src/index.js' +import { BasedDb, DbClient, getDefaultHooks } from '../src/index.js' import { equal } from './shared/assert.js' import { deepEqual } from '../src/utils/index.js' import test from './shared/test.js' @@ -9,9 +9,9 @@ await test('expire', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) - await db.setSchema({ + const schema = { types: { token: { name: 'string', @@ -28,46 +28,42 @@ await test('expire', async (t) => { }, }, }, - }) + } as const + const client = await db.setSchema(schema) - const user1 = await db.create('user') - const token1 = await db.create('token', { + const user1 = await client.create('user', {}) + const token1 = await client.create('token', { name: 'my token', user: user1, }) - db.expire('token', token1, 1) - await db.drain() - equal((await db.query('token').get().toObject()).length, 1) + client.expire('token', token1, 1) + await client.drain() + equal((await client.query2('token').get()).length, 1) await setTimeout(2e3) - equal((await db.query('token').get().toObject()).length, 0) + equal((await client.query2('token').get()).length, 0) - const token2 = await db.create('token', { + const token2 = await client.create('token', { name: 'my new token', user: user1, }) - await db.expire('token', token2, 1) - await db.drain() + await client.expire('token', token2, 1) + await client.drain() await db.save() - equal( - (await db.query('token').get().toObject()).length, - 1, - '1 token before save', - ) + equal((await client.query2('token').get()).length, 1, '1 token before save') const db2 = new BasedDb({ path: t.tmp, }) t.after(() => db2.destroy(), true) await db2.start() + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), + }) - equal( - (await db2.query('token').get().toObject()).length, - 1, - '1 token after load', - ) + equal((await client2.query2('token').get()).length, 1, '1 token after load') await setTimeout(3e3) equal( - (await db2.query('token').get().toObject()).length, + (await client2.query2('token').get()).length, 0, '0 tokens after expiry', ) @@ -78,9 +74,9 @@ await test('refresh', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) - await db.setSchema({ + const client = await db.setSchema({ types: { user: { props: { @@ -93,10 +89,10 @@ await test('refresh', async (t) => { const id1 = await db.create('user', { name: 'dude', }) - await db.expire('user', id1, 1) - await db.drain() - await db.expire('user', id1, 3) + await client.expire('user', id1, 1) + await client.drain() + await client.expire('user', id1, 3) await db.drain() await setTimeout(1100) - deepEqual(await db.query('user', id1).get(), { id: 1, name: 'dude' }) + deepEqual(await client.query2('user', id1).get(), { id: 1, name: 'dude' }) }) diff --git a/test/exporter.ts b/test/exporter.ts index 8129e00e1a..a8eeab3404 100644 --- a/test/exporter.ts +++ b/test/exporter.ts @@ -51,8 +51,8 @@ await test('export to csv', async (t) => { await db.setSchema({ locales: { en: {}, - it: { fallback: 'en' }, - fi: { fallback: 'en' }, + it: { fallback: ['en'] }, + fi: { fallback: ['en'] }, }, types: { product: { diff --git a/test/filter/api.ts b/test/filter/api.ts index 9b633f647e..7d5b6d90cf 100644 --- a/test/filter/api.ts +++ b/test/filter/api.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { deepEqual } from '../shared/assert.js' await test('filter api: object', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { bestFriend: { @@ -35,7 +29,7 @@ await test('filter api: object', async (t) => { deepEqual( await db - .query('user') + .query2('user') .include('friends') .filter('friends', 'includes', { id: 2 }) .get(), @@ -48,7 +42,7 @@ await test('filter api: object', async (t) => { deepEqual( await db - .query('user') + .query2('user') .include('friends') .filter('friends', 'includes', [{ id: 2 }, { id: 1 }]) .get(), @@ -61,7 +55,7 @@ await test('filter api: object', async (t) => { ) deepEqual( - await db.query('user').filter('bestFriend', '=', { id: 9 }).get(), + await db.query2('user').filter('bestFriend', '=', { id: 9 }).get(), [ { id: 10, @@ -72,7 +66,7 @@ await test('filter api: object', async (t) => { deepEqual( await db - .query('user') + .query2('user') .filter('bestFriend', '=', [{ id: 9 }, { id: 10 }]) .get(), [ diff --git a/test/filter/edges.ts b/test/filter/edges.ts index 33341ad038..42e49fc2e3 100644 --- a/test/filter/edges.ts +++ b/test/filter/edges.ts @@ -1,15 +1,10 @@ import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' await test('filter edges', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { team: { props: { @@ -56,7 +51,7 @@ await test('filter edges', async (t) => { deepEqual( await db - .query('team', 1) + .query2('team', 1) .include((q) => q('files').filter('fileType', '=', 'document').include('id'), ) @@ -74,7 +69,7 @@ await test('filter edges', async (t) => { deepEqual( await db - .query('team') + .query2('team') .filter('files', 'exists') .include((s) => s('files').filter('fileType', '=', 'document').include('id'), @@ -93,13 +88,7 @@ await test('filter edges', async (t) => { }) await test('filter references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { team: { props: { @@ -145,11 +134,11 @@ await test('filter references', async (t) => { fileType: 'document', teams: [t3], }) - // await db.query('team').include('*', '**').get().inspect(100) - // await db.query('libraryFile').include('*', '**').get().inspect(100) + // await db.query2('team').include('*', '**').get().inspect(100) + // await db.query2('libraryFile').include('*', '**').get().inspect(100) //await db - // .query('team') + // .query2('team') // .include('files') // // need to make this // // still missing diff --git a/test/filter/filter.ts b/test/filter/filter.ts index 0ad56ee3c3..139bc027e3 100644 --- a/test/filter/filter.ts +++ b/test/filter/filter.ts @@ -1,17 +1,11 @@ import { BasedDb } from '../../src/index.js' -import test from '../shared/test.js' import { equal, deepEqual } from '../shared/assert.js' +import test from '../shared/test.js' +import { testDb } from '../shared/index.js' await test('single', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - const status = ['error', 'danger', 'ok', '🦄'] - - await db.setSchema({ + const db = await testDb(t, { types: { org: { props: { @@ -47,7 +41,7 @@ await test('single', async (t) => { const x = [10, 20] - deepEqual((await db.query('org').filter('x', '=', x).get()).toObject(), [ + deepEqual(await db.query2('org').filter('x', '=', x).get(), [ { id: 1, status: 'ok', @@ -55,73 +49,49 @@ await test('single', async (t) => { name: 'hello', }, ]) - deepEqual( - (await db.query('org').filter('orgs', '=', [org, org2]).get()).toObject(), - [ - { - id: 3, - status: undefined, - x: 0, - name: 'hello ???????', - }, - ], - ) - deepEqual( - (await db.query('org').filter('status', '=', 'error').get()).toObject(), - [], - ) - deepEqual( - (await db.query('org').filter('status', '=', 'ok').get()).toObject(), - [ - { - id: 1, - status: 'ok', - x: 10, - name: 'hello', - }, - { - id: 2, - status: 'ok', - x: 0, - name: 'x', - }, - ], - ) - deepEqual( - (await db.query('org').filter('name', 'includes', '0').get()).toObject(), - [], - ) - deepEqual( - ( - await db.query('org').filter('name', 'includes', 'hello').get() - ).toObject(), - [ - { - id: 1, - status: 'ok', - x: 10, - name: 'hello', - }, - { - id: 3, - status: undefined, - x: 0, - name: 'hello ???????', - }, - ], - ) + deepEqual(await db.query2('org').filter('orgs', '=', [org, org2]).get(), [ + { + id: 3, + status: undefined, + x: 0, + name: 'hello ???????', + }, + ]) + deepEqual(await db.query2('org').filter('status', '=', 'error').get(), []) + deepEqual(await db.query2('org').filter('status', '=', 'ok').get(), [ + { + id: 1, + status: 'ok', + x: 10, + name: 'hello', + }, + { + id: 2, + status: 'ok', + x: 0, + name: 'x', + }, + ]) + deepEqual(await db.query2('org').filter('name', 'includes', '0').get(), []) + deepEqual(await db.query2('org').filter('name', 'includes', 'hello').get(), [ + { + id: 1, + status: 'ok', + x: 10, + name: 'hello', + }, + { + id: 3, + status: undefined, + x: 0, + name: 'hello ???????', + }, + ]) }) await test('simple', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - const status = ['error', 'danger', 'ok', '🦄'] - - await db.setSchema({ + const db = await testDb(t, { types: { org: { props: { @@ -214,7 +184,7 @@ await test('simple', async (t) => { const x = [300, 400, 10, 20, 1, 2, 99, 9999, 888, 6152] equal( - (await db.query('machine').filter('lastPing', '=', x).get()).length, + (await db.query2('machine').filter('lastPing', '=', x).get()).length, x.length, 'OR number', ) @@ -237,7 +207,7 @@ await test('simple', async (t) => { const rand = ~~(Math.random() * lastId) || 1 const derp = [make(), make(), make(), rand] return db - .query('env') + .query2('env') .include('*') .filter('machines', 'includes', derp) .get() @@ -245,7 +215,7 @@ await test('simple', async (t) => { ) for (const envs of res) { - mi += envs.toObject().length + mi += envs.length measure += envs.execTime } @@ -264,11 +234,11 @@ await test('simple', async (t) => { Array.from({ length: amount }).map(async () => { const rand = ~~(Math.random() * lastId) || 1 const envs = await db - .query('env') + .query2('env') .include('*') .filter('machines', 'includes', rand) .get() - mi += envs.toObject().length + mi += envs.length measure += envs.execTime }), ) @@ -283,33 +253,33 @@ await test('simple', async (t) => { equal( ( await db - .query('machine') + .query2('machine') .include('*') .filter('scheduled', '>', 'now + 694d + 10h') .get() - ).toObject().length, + ).length, 1, ) equal( ( await db - .query('machine') + .query2('machine') .include('*') .filter('scheduled', '<', 'now-694d-10h-15m') // Date, .get() - ).toObject().length, + ).length, 1, ) equal( ( await db - .query('machine') + .query2('machine') .include('*') .filter('scheduled', '<', '10/24/2000') // Date, .get() - ).toObject().length, + ).length, 0, 'parse date string', ) @@ -317,34 +287,34 @@ await test('simple', async (t) => { equal( ( await db - .query('machine') + .query2('machine') .include('*') .filter('requestsServed', '<', 1) .get() - ).toObject().length, + ).length, 1, ) equal( ( await db - .query('machine') + .query2('machine') .include('*') .filter('requestsServed', '<=', 1) .get() - ).toObject().length, + ).length, 2, ) equal( ( await db - .query('machine') + .query2('machine') .include('*') .filter('derp', '<=', 0) .filter('derp', '>', -5) .get() - ).toObject().length, + ).length, 5, 'Negative range', ) @@ -352,12 +322,12 @@ await test('simple', async (t) => { equal( ( await db - .query('machine') + .query2('machine') .include('*') .filter('temperature', '<=', 0) .filter('temperature', '>', -0.1) .get() - ).toObject().length < 500, + ).length < 500, true, 'Negative temperature (result amount)', ) @@ -365,25 +335,23 @@ await test('simple', async (t) => { equal( ( await db - .query('machine') + .query2('machine') .include('*') .filter('temperature', '<=', 0) .filter('temperature', '>', -0.1) .get() - ).toObject()[0].temperature < 0, + )[0].temperature < 0, true, 'Negative temperature (check value)', ) equal( - ( - await db - .query('machine') - .include('id') - .filter('env', '=', env) - .range(0, 10) - .get() - ).toObject(), + await db + .query2('machine') + .include('id') + .filter('env', '=', env) + .range(0, 10) + .get(), [ { id: 2 }, { id: 4 }, @@ -400,15 +368,13 @@ await test('simple', async (t) => { ) equal( - ( - await db - .query('machine') - .include('id') - .filter('lastPing', '>=', 1e5 - 1) // order optmization automaticly - .filter('env', '=', [emptyEnv, env]) - .range(0, 10) - .get() - ).toObject(), + await db + .query2('machine') + .include('id') + .filter('lastPing', '>=', 1e5 - 1) // order optmization automaticly + .filter('env', '=', [emptyEnv, env]) + .range(0, 10) + .get(), [{ id: 100000 }], 'Filter by reference (multiple)', ) @@ -440,7 +406,7 @@ await test('simple', async (t) => { ]) deepEqual( - await db.query('env').filter('machines', '<', 10).get(), + await db.query2('env').filter('machines', '<', 10).get(), [ { id: 2, @@ -459,7 +425,7 @@ await test('simple', async (t) => { ) deepEqual( - await db.query('env').filter('machines', '=', ids).get(), + await db.query2('env').filter('machines', '=', ids).get(), [ { id: 3, @@ -472,13 +438,11 @@ await test('simple', async (t) => { ) deepEqual( - ( - await db - .query('machine') - .include('env', '*') - .filter('env.status', '=', 5) - .get() - ).toObject(), + await db + .query2('machine') + .include('env', '*') + .filter('env.status', '=', 5) + .get(), [ { id: 100001, @@ -521,13 +485,11 @@ await test('simple', async (t) => { }) deepEqual( - ( - await db - .query('machine') - .filter('status', '=', '🦄') - .include('status') - .get() - ).toObject(), + await db + .query2('machine') + .filter('status', '=', '🦄') + .include('status') + .get(), [ { id: unicornMachine, @@ -536,18 +498,18 @@ await test('simple', async (t) => { ], ) - deepEqual((await db.query('env').filter('standby').get()).toObject(), []) + deepEqual(await db.query2('env').filter('standby').get(), []) await db.update('env', derpEnv, { standby: true, }) - deepEqual((await db.query('env').filter('standby').get()).toObject(), [ + deepEqual(await db.query2('env').filter('standby').get(), [ { id: 3, standby: true, status: 5, name: 'derp env' }, ]) let rangeResult = await db - .query('machine') + .query2('machine') .include('temperature') .filter('temperature', '..', [-0.1, 0]) .get() @@ -560,7 +522,7 @@ await test('simple', async (t) => { ) rangeResult = await db - .query('machine') + .query2('machine') .include('*') .range(0, 10) // .filter('temperature', '!..', [-0.1, 0]) @@ -589,15 +551,8 @@ await test('simple', async (t) => { }) await test('or', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - const status = ['error', 'danger', 'ok', '🦄'] - - await db.setSchema({ + const db = await testDb(t, { types: { machine: { props: { @@ -629,14 +584,12 @@ await test('or', async (t) => { await db.drain() deepEqual( - ( - await db - .query('machine') - .include('id', 'lastPing') - .filter('scheduled', '>', '01/01/2100') - .or('lastPing', '>', 1e6 - 2) - .get() - ).toObject(), + await db + .query2('machine') + .include('id', 'lastPing') + .filter('scheduled', '>', '01/01/2100') + .or('lastPing', '>', 1e6 - 2) + .get(), [ { id: 999999, @@ -650,30 +603,26 @@ await test('or', async (t) => { ) deepEqual( - ( - await db - .query('machine') - .include('id', 'lastPing') - .filter('scheduled', '>', '01/01/2100') - .or((f) => { - f.filter('lastPing', '>', 1e6 - 2) - }) - .get() - ).toObject(), - ( - await db - .query('machine') - .include('id', 'lastPing') - .filter('scheduled', '>', '01/01/2100') - .or('lastPing', '>', 1e6 - 2) - .get() - ).toObject(), + await db + .query2('machine') + .include('id', 'lastPing') + .filter('scheduled', '>', '01/01/2100') + .or((f) => { + f.filter('lastPing', '>', 1e6 - 2) + }) + .get(), + await db + .query2('machine') + .include('id', 'lastPing') + .filter('scheduled', '>', '01/01/2100') + .or('lastPing', '>', 1e6 - 2) + .get(), ) equal( ( await db - .query('machine') + .query2('machine') .include('id', 'lastPing') .filter('scheduled', '>', '01/01/2100') .or((f) => { @@ -681,47 +630,41 @@ await test('or', async (t) => { f.or('temperature', '<', -30) }) .get() - ).toObject().length > 10, + ).length > 10, true, 'Branch or', ) deepEqual( - ( - await db - .query('machine') - .include('id', 'lastPing') - .filter('scheduled', '>', '01/01/2100') - .or((f) => { - f.filter('lastPing', '>', 1e6 - 2) - f.or((f) => { - f.filter('temperature', '<', -30) - }) - }) - .get() - ).toObject(), - ( - await db - .query('machine') - .include('id', 'lastPing') - .filter('scheduled', '>', '01/01/2100') - .or((f) => { - f.filter('lastPing', '>', 1e6 - 2) - f.or('temperature', '<', -30) + await db + .query2('machine') + .include('id', 'lastPing') + .filter('scheduled', '>', '01/01/2100') + .or((f) => { + f.filter('lastPing', '>', 1e6 - 2) + f.or((f) => { + f.filter('temperature', '<', -30) }) - .get() - ).toObject(), + }) + .get(), + await db + .query2('machine') + .include('id', 'lastPing') + .filter('scheduled', '>', '01/01/2100') + .or((f) => { + f.filter('lastPing', '>', 1e6 - 2) + f.or('temperature', '<', -30) + }) + .get(), ) - const r = ( - await db - .query('machine') - .include('temperature') - .range(0, 15) - .filter('temperature', '>', 0) - .or('temperature', '<', -0.1) - .get() - ).toObject() + const r = await db + .query2('machine') + .include('temperature') + .range(0, 15) + .filter('temperature', '>', 0) + .or('temperature', '<', -0.1) + .get() equal( r @@ -737,13 +680,7 @@ await test('or', async (t) => { }) await test('or numerical', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { machine: { props: { @@ -760,15 +697,13 @@ await test('or numerical', async (t) => { } await db.drain() - const r = ( - await db - .query('machine') - .include('temperature') - .range(0, 1000) - .filter('temperature', '>', 150) - .or('temperature', '<', 50) - .get() - ).toObject() + const r = await db + .query2('machine') + .include('temperature') + .range(0, 1000) + .filter('temperature', '>', 150) + .or('temperature', '<', 50) + .get() equal( r @@ -795,7 +730,7 @@ await test('or numerical', async (t) => { equal( ( await db - .query('machine') + .query2('machine') .include('id', 'temperature') .filter('temperature', '>', 201) .or((f) => { @@ -803,7 +738,7 @@ await test('or numerical', async (t) => { f.or('temperature', '<', 10) }) .get() - ).toObject().length > 10, + ).length > 10, true, 'Branch or', ) @@ -816,7 +751,7 @@ await test('or numerical', async (t) => { await db.drain() deepEqual( - (await db.query('machine').include('id').range(0, 3).get()).node(-1), + (await db.query2('machine').include('id').range(0, 3).get()).node(-1), { id: 3, }, @@ -825,7 +760,7 @@ await test('or numerical', async (t) => { deepEqual( ( await db - .query('machine') + .query2('machine') .include('temperature') .filter('id', '<=', 20000) .range(10000, 20000) @@ -836,13 +771,7 @@ await test('or numerical', async (t) => { }) await test.skip('includes', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -882,7 +811,7 @@ await test.skip('includes', async (t) => { // filtering refs await db - .query('user') + .query2('user') .include('*') .filter('bestBud.name', 'includes', 'Jose') .get() @@ -890,7 +819,7 @@ await test.skip('includes', async (t) => { // filtering multi refs await db - .query('user') + .query2('user') .include( (q) => q('buddies').include('*').filter('name', 'includes', 'Jose'), '*', @@ -902,14 +831,35 @@ await test.skip('includes', async (t) => { JSON.stringify( ( await db - .query('user') + .query2('user') .include( (q) => q('buddies').include('*').filter('name', 'includes', 'Jose'), '*', ) .get() - .toObject() ).filter((u) => u.buddies.length > 0), ), ) }) + +await test('lt x leq', async (t) => { + const db = await testDb(t, { + types: { + bucket: { + red: 'uint8', + blue: 'uint8', + }, + }, + }) + + db.create('bucket', { + red: 1, + blue: 3, + }) + db.create('bucket', { + red: 4, + blue: 6, + }) + const b = await db.query2('bucket').filter('red', '<', 4).get() + equal(b.length, 1, 'lt must be different than leq') +}) diff --git a/test/filter/or.ts b/test/filter/or.ts index 2351ec72df..2e628fa584 100644 --- a/test/filter/or.ts +++ b/test/filter/or.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { deepEqual } from '../shared/assert.js' await test('filter or', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -24,9 +18,7 @@ await test('filter or', async (t) => { } deepEqual( - ( - await db.query('user').filter('nr', '>', 8).or('nr', '<', 1).get() - ).toObject(), + await db.query2('user').filter('nr', '>', 8).or('nr', '<', 1).get(), [ { id: 1, @@ -41,15 +33,13 @@ await test('filter or', async (t) => { ) deepEqual( - ( - await db - .query('user') - .filter('nr', '>', 8) - .or((t) => { - t.filter('nr', '<', 1).or('nr', '=', 5) - }) - .get() - ).toObject(), + await db + .query2('user') + .filter('nr', '>', 8) + .or((t) => { + t.filter('nr', '<', 1).or('nr', '=', 5) + }) + .get(), [ { id: 1, @@ -68,14 +58,12 @@ await test('filter or', async (t) => { ) deepEqual( - ( - await db - .query('user') - .filter('nr', '>', 8) - .or('nr', '<', 1) - .or('nr', '=', 5) - .get() - ).toObject(), + await db + .query2('user') + .filter('nr', '>', 8) + .or('nr', '<', 1) + .or('nr', '=', 5) + .get(), [ { id: 1, @@ -94,13 +82,11 @@ await test('filter or', async (t) => { ) deepEqual( - ( - await db - .query('user') - .filter('nr', '>', 8) - .or(() => {}) - .get() - ).toObject(), + await db + .query2('user') + .filter('nr', '>', 8) + .or(() => {}) + .get(), [ { id: 10, diff --git a/test/filter/references.ts b/test/filter/references.ts index 5377033abe..60849a64a0 100644 --- a/test/filter/references.ts +++ b/test/filter/references.ts @@ -1,15 +1,10 @@ import test from '../shared/test.js' import { BasedDb } from '../../src/index.js' import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' await test('filter references drones', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { workspace: { props: { @@ -52,7 +47,7 @@ await test('filter references drones', async (t) => { const user = 1 const drones = await db - .query('user') + .query2('user') .include((s) => s('workspaces').include((s) => s('drones').include('*').filter('workspace.users', 'includes', user), @@ -62,7 +57,7 @@ await test('filter references drones', async (t) => { .get() const drones2 = await db - .query('drone') + .query2('drone') .filter('workspace.users', 'includes', user) .get() diff --git a/test/filter/referencesField.ts b/test/filter/referencesField.ts index 854e6cfcbf..1cf2738d8e 100644 --- a/test/filter/referencesField.ts +++ b/test/filter/referencesField.ts @@ -1,15 +1,9 @@ import test from '../shared/test.js' -import { BasedDb } from '../../src/index.js' import { deepEqual } from '../shared/assert.js' +import {testDb} from '../shared/index.js' await test('filter references shortcut', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db= await testDb(t, { types: { user: { props: { @@ -34,7 +28,7 @@ await test('filter references shortcut', async (t) => { deepEqual( await db - .query('user') + .query2('user') .include('name', 'age', 'friends') .filter('friends.age', '<', 40) .get(), @@ -51,7 +45,7 @@ await test('filter references shortcut', async (t) => { deepEqual( await db - .query('user') + .query2('user') .include('name', 'age', 'friends') .filter('friends.age', '>', 40) .get(), @@ -68,7 +62,7 @@ await test('filter references shortcut', async (t) => { deepEqual( await db - .query('user') + .query2('user') .include('name', 'age', 'friends') .filter('friends[*].age', '>', 40) .get(), @@ -85,7 +79,7 @@ await test('filter references shortcut', async (t) => { deepEqual( await db - .query('user') + .query2('user') .include('name', 'age', 'friends') .filter('friends[0].age', '>', 40) .get(), @@ -106,7 +100,7 @@ await test('filter references shortcut', async (t) => { deepEqual( await db - .query('user', mrA) + .query2('user', mrA) .include('name', 'age', 'friends') .filter('friends[-1].age', '>', 100) .get(), @@ -133,7 +127,7 @@ await test('filter references shortcut', async (t) => { deepEqual( await db - .query('user', mrA) + .query2('user', mrA) .include('name', 'age', 'friends') .filter('friends[2].age', '=', 93) .get(), diff --git a/test/filter/string.ts b/test/filter/string.ts index aa9f08ea64..9f354feff3 100644 --- a/test/filter/string.ts +++ b/test/filter/string.ts @@ -1,24 +1,35 @@ -import { BasedDb, stringCompress as compress } from '../../src/index.js' import { ENCODER } from '../../src/utils/uint8.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { equal, deepEqual } from '../shared/assert.js' import { italy, sentence, readBible } from '../shared/examples.js' import { decompress } from '../../src/protocol/index.js' - +import { AutoSizedUint8Array } from '../../src/utils/AutoSizedUint8Array.js' +import { defs } from '../../src/schema/defs/index.js' +import { Modify } from '../../src/zigTsExports.js' + +const buf = new AutoSizedUint8Array() +const s = new defs.string({ type: 'string' }, [], { + id: 0, + name: '', + main: [], + separate: [], + props: new Map(), + tree: new Map(), + schema: { props: {} }, +}) +const compress = (str: string) => { + buf.length = 0 + s.pushValue(buf, str, Modify.create) + return buf.view.slice() +} const bible = readBible() const capitals = 'AAAAAAAAAA AAAAAAAAAAA AAAAAAAAAAAAAAAAAAAA AAA A AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' await test('variable size (string/binary)', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { article: { props: { @@ -43,7 +54,6 @@ await test('variable size (string/binary)', async (t) => { equal(decompress(compressedSentence), sentence, 'compress / decompress api') const compressedItaly = compress(italy) equal(decompress(compressedItaly), italy, 'compress / decompress api (large)') - for (let i = 0; i < 1000; i++) { const str = 'en' db.create('article', { @@ -59,14 +69,12 @@ await test('variable size (string/binary)', async (t) => { await db.drain() deepEqual( - ( - await db - .query('article') - .filter('stuff', '=', ENCODER.encode('#' + 2)) - .include('name', 'stuff', 'derp', 'type') - .range(0, 10) - .get() - ).toObject(), + await db + .query2('article') + .filter('stuff', '=', ENCODER.encode('#' + 2)) + .include('name', 'stuff', 'derp', 'type') + .range(0, 10) + .get(), [ { id: 3, @@ -81,7 +89,7 @@ await test('variable size (string/binary)', async (t) => { const len = ( await db - .query('article') + .query2('article') .filter('stuff', 'includes', new Uint8Array([55, 57])) .range(0, 100) .get() @@ -112,7 +120,7 @@ await test('variable size (string/binary)', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('derp', 'includes', ENCODER.encode('vitorio')) .include('id') .get() @@ -124,7 +132,7 @@ await test('variable size (string/binary)', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('derp', 'includes', ENCODER.encode('xx')) .include('id') .get() @@ -136,7 +144,7 @@ await test('variable size (string/binary)', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('derp', 'includes', q) .include('id') .get() @@ -148,7 +156,7 @@ await test('variable size (string/binary)', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('derp', '=', largeDerp) .include('id') .range(0, 1e3) @@ -161,7 +169,7 @@ await test('variable size (string/binary)', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('body', '=', italy) .include('id') .range(0, 1e3) @@ -173,12 +181,7 @@ await test('variable size (string/binary)', async (t) => { }) await test('has compressed', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await db.setSchema({ + const db = await testDb(t, { types: { italy: { props: { @@ -201,26 +204,18 @@ await test('has compressed', async (t) => { equal( ( await db - .query('italy') + .query2('italy') .filter('body', 'includes', n) .include('id') .range(0, 1e3) .get() - ).toObject().length, + ).length, 1, ) }) await test('has uncompressed', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { italy: { props: { @@ -252,35 +247,34 @@ await test('has uncompressed', async (t) => { equal( ( await db - .query('italy') + .query2('italy') .filter('body', 'includes', 'derp derp derp', { lowerCase: true }) .include('id') .range(0, 1e3) .get() - ).toObject().length, + ).length, 0, ) equal( ( await db - .query('italy') + .query2('italy') .filter('body', 'includes', 'derp derp derp') .include('id') .range(0, 1e3) .get() - ).toObject().length, + ).length, 0, ) deepEqual( await db - .query('italy') + .query2('italy') .filter('headline', 'includes', 'pager') .include('id', 'headline') .range(0, 1e3) - .get() - .then((v) => v.toObject()), + .get(), [ { id: 501, @@ -295,12 +289,11 @@ await test('has uncompressed', async (t) => { deepEqual( await db - .query('italy') + .query2('italy') .filter('headline', 'includes', 'Pager', { lowerCase: true }) .include('id', 'headline') .range(0, 1e3) - .get() - .then((v) => v.toObject()), + .get(), [ { id: 501, @@ -315,23 +308,21 @@ await test('has uncompressed', async (t) => { deepEqual( await db - .query('italy') + .query2('italy') .filter('headline', 'includes', 'refugee', { lowerCase: true }) .include('id', 'headline') .range(0, 1e3) - .get() - .then((v) => v.toObject()), + .get(), [], ) deepEqual( await db - .query('italy') + .query2('italy') .filter('headline', 'includes', 'gaza', { lowerCase: true }) .include('id', 'headline') .range(0, 1e3) - .get() - .then((v) => v.toObject()), + .get(), [ { id: 801, @@ -343,12 +334,7 @@ await test('has uncompressed', async (t) => { }) await test('main has (string/binary)', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await db.setSchema({ + const db = await testDb(t, { types: { article: { props: { @@ -368,30 +354,21 @@ await test('main has (string/binary)', async (t) => { stuff, derp: new Uint8Array([1, 2, 3, 4]), } - deepEqual((await db.query('article').get()).toObject(), [derpResult]) + deepEqual(await db.query2('article').get(), [derpResult]) + deepEqual(await db.query2('article').filter('stuff', '=', stuff).get(), [ + derpResult, + ]) deepEqual( - (await db.query('article').filter('stuff', '=', stuff).get()).toObject(), - [derpResult], - ) - deepEqual( - ( - await db - .query('article') - .filter('derp', 'includes', new Uint8Array([4])) - .get() - ).toObject(), + await db + .query2('article') + .filter('derp', 'includes', new Uint8Array([4])) + .get(), [derpResult], ) }) await test('has normalized uncompressed', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { italy: { props: { @@ -412,7 +389,7 @@ await test('has normalized uncompressed', async (t) => { equal( ( await db - .query('italy') + .query2('italy') .filter('body', 'includes', 'aaaaaa', { lowerCase: true }) .include('id') .range(0, 1e5) @@ -423,13 +400,7 @@ await test('has normalized uncompressed', async (t) => { }) await test('has normalized compressed', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { italy: { props: { @@ -448,7 +419,7 @@ await test('has normalized compressed', async (t) => { equal( ( await db - .query('italy') + .query2('italy') .filter('body', 'includes', 'aaaaa', { lowerCase: true }) .include('id', 'body') .range(0, 1e3) @@ -459,15 +430,7 @@ await test('has normalized compressed', async (t) => { }) await test('has OR uncompressed', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { italy: { props: { @@ -493,7 +456,7 @@ await test('has OR uncompressed', async (t) => { equal( ( await db - .query('italy') + .query2('italy') .filter('body', 'includes', ['aaaaaaaaaaa', 'bbbbbb'], { lowerCase: true, }) // ['aaa', 'bbb', 'ccc', 'eee'] @@ -506,12 +469,11 @@ await test('has OR uncompressed', async (t) => { deepEqual( await db - .query('italy') + .query2('italy') .filter('title', 'includes', ['gaza', 'tubbies'], { lowerCase: true }) .include('id', 'title') .range(0, 1e3) - .get() - .then((v) => v.toObject()), + .get(), [ { id: 501, @@ -523,26 +485,17 @@ await test('has OR uncompressed', async (t) => { deepEqual( await db - .query('italy') + .query2('italy') .filter('title', 'includes', ['crisis', 'refugee'], { lowerCase: true }) .include('id', 'title') .range(0, 1e3) - .get() - .then((v) => v.toObject()), + .get(), [], ) }) await test('has OR compressed', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { italy: { props: { @@ -565,7 +518,7 @@ await test('has OR compressed', async (t) => { equal( ( await db - .query('italy') + .query2('italy') .filter('body', 'includes', ['aaaaaaaaaaa', 'bbbbbbbb'], { lowerCase: true, }) // ['aaa', 'bbb', 'ccc', 'eee'] @@ -578,15 +531,7 @@ await test('has OR compressed', async (t) => { }) await test('OR equal', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { italy: { props: { @@ -610,7 +555,7 @@ await test('OR equal', async (t) => { equal( ( await db - .query('italy') + .query2('italy') .filter('body', '=', [derpItaly, 'derp', italy]) .range(0, 1e3) .get() @@ -620,15 +565,7 @@ await test('OR equal', async (t) => { }) await test('OR equal main', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { italy: { props: { @@ -647,7 +584,7 @@ await test('OR equal main', async (t) => { equal( ( await db - .query('italy') + .query2('italy') .filter('body', '=', ['xx', 'bb']) .range(0, 1e3) .get() @@ -657,12 +594,7 @@ await test('OR equal main', async (t) => { }) await test('includes and neq', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await db.setSchema({ + const db = await testDb(t, { types: { ent: { props: { @@ -704,7 +636,7 @@ await test('includes and neq', async (t) => { deepEqual( await db - .query('ent') + .query2('ent') .filter('country', 'includes', ['Italy', 'Germany']) .filter('city', '!=', 'Berlin') .get(), @@ -716,12 +648,7 @@ await test('includes and neq', async (t) => { }) await test('empty string', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -730,20 +657,20 @@ await test('empty string', async (t) => { }, }, }) - const user1 = db.create('user', { + await db.create('user', { potato: 'cool', }) - const user2 = db.create('user', {}) - const user3 = db.create('user', { potato: '' }) + const user2 = await db.create('user', {}) + const user3 = await db.create('user', { potato: '' }) deepEqual( - await db.query('user').filter('potato', '=', '').get(), + await db.query2('user').filter('potato', '=', '').get(), [ { - id: 2, + id: user2, potato: '', }, { - id: 3, + id: user3, potato: '', }, ], @@ -752,12 +679,7 @@ await test('empty string', async (t) => { }) await test('empty string fixed', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -768,24 +690,24 @@ await test('empty string fixed', async (t) => { }, }, }) - const user1 = db.create('user', { + const user1 = await db.create('user', { potato: 'cool', region: 'AA', }) - const user2 = db.create('user', { region: 'XX', city: 'Amsterdam' }) - const user3 = db.create('user', { potato: 'flap', city: 'Rome' }) + const user2 = await db.create('user', { region: 'XX', city: 'Amsterdam' }) + const user3 = await db.create('user', { potato: 'flap', city: 'Rome' }) deepEqual( - await db.query('user').filter('potato', '!=', '').get(), + await db.query2('user').filter('potato', '!=', '').get(), [ - { id: 1, region: 'AA', potato: 'cool', city: '' }, - { id: 3, region: '', potato: 'flap', city: 'Rome' }, + { id: user1, region: 'AA', potato: 'cool', city: '' }, + { id: user3, region: '', potato: 'flap', city: 'Rome' }, ], '!Empty string filter', ) deepEqual( await db - .query('user') + .query2('user') .filter('potato', '!=', '') .filter('region', '!=', '') .get(), @@ -793,15 +715,15 @@ await test('empty string fixed', async (t) => { 'Empty string filter + region', ) - const user4 = db.create('user', { region: 'YY', city: 'Denver' }) + db.create('user', { region: 'YY', city: 'Denver' }) deepEqual( await db - .query('user') + .query2('user') .filter('potato', '=', '') .filter('region', '!=', '') .filter('city', '=', 'Amsterdam') .get(), - [{ id: 2, region: 'XX', potato: '', city: 'Amsterdam' }], + [{ id: user2, region: 'XX', potato: '', city: 'Amsterdam' }], 'Empty string filter + region + city', ) }) diff --git a/test/flush.ts b/test/flush.ts index 6069b197a4..3322e7d1d9 100644 --- a/test/flush.ts +++ b/test/flush.ts @@ -1,15 +1,8 @@ -import { BasedDb } from '../src/index.js' +import { testDb } from './shared/index.js' import test from './shared/test.js' await test('too large payload should throw, correct size should not', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 80, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const client = await testDb(t, { types: { user: { props: { @@ -19,10 +12,12 @@ await test('too large payload should throw, correct size should not', async (t) }, }) - let error + let error: Error | null = null try { - db.create('user', { - name: 'cool string but too long for the max size unfortunately wow what the hell', + client.create('user', { + name: 'cool string but too long for the max size unfortunately wow what the hell'.repeat( + 4, + ), }) } catch (e) { error = e @@ -36,10 +31,10 @@ await test('too large payload should throw, correct size should not', async (t) let i = 10 while (i--) { - db.create('user', { + client.create('user', { name: 'user' + i, }) } - await db.drain() + await client.drain() }) diff --git a/test/hooks.ts b/test/hooks.ts index 9ee763108e..fbeb44cc07 100644 --- a/test/hooks.ts +++ b/test/hooks.ts @@ -1,18 +1,11 @@ import { BasedDb } from '../src/index.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' import { deepEqual, equal, notEqual, throws } from './shared/assert.js' import { wait } from '../src/utils/index.js' await test('hooks - undefined values', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { hooks: { @@ -67,7 +60,7 @@ await test('hooks - undefined values', async (t) => { age: 25, }) - deepEqual(await db.query('user').get(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, name: 'youzi', @@ -80,7 +73,7 @@ await test('hooks - undefined values', async (t) => { }, ]) - deepEqual(await db.query('user').filter('age', '<', 50).get(), [ + deepEqual(await db.query2('user').filter('age', '<', 50).get(), [ { id: 2, name: 'james', @@ -92,7 +85,7 @@ await test('hooks - undefined values', async (t) => { age: 31, }) - deepEqual(await db.query('user').get(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, name: 'youzi', @@ -105,7 +98,7 @@ await test('hooks - undefined values', async (t) => { }, ]) - deepEqual(await db.query('user').filter('age', '<', 50).get(), [ + deepEqual(await db.query2('user').filter('age', '<', 50).get(), [ { id: 1, name: 'youzi', @@ -120,15 +113,7 @@ await test('hooks - undefined values', async (t) => { }) await test('hooks - private nodes', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { hooks: { @@ -163,7 +148,7 @@ await test('hooks - private nodes', async (t) => { age: 25, }) - deepEqual(await db.query('user').get(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, private: true, @@ -181,7 +166,7 @@ await test('hooks - private nodes', async (t) => { private: false, }) - deepEqual(await db.query('user').get(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, name: 'youzi', @@ -198,15 +183,7 @@ await test('hooks - private nodes', async (t) => { }) await test('hooks - as SQL CHECK constraints', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { hooks: { @@ -233,19 +210,11 @@ await test('hooks - as SQL CHECK constraints', async (t) => { }), ) - deepEqual((await db.query('user').get()).length, 0) + deepEqual((await db.query2('user').get()).length, 0) }) await test('property modify hooks', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -287,7 +256,7 @@ await test('property modify hooks', async (t) => { city: 'wut', }) - deepEqual(await db.query('user').get(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, age: 21, name: 'youzi', city: 'Snurko' }, ]) @@ -295,7 +264,7 @@ await test('property modify hooks', async (t) => { city: 'Fail', }) - deepEqual(await db.query('user').get(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, age: 21, name: 'youzi', city: 'Success' }, ]) @@ -303,21 +272,13 @@ await test('property modify hooks', async (t) => { city: 'ignore', }) - deepEqual(await db.query('user').get(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, age: 21, name: 'youzi', city: 'Success' }, ]) }) await test('property read hooks', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { hooks: { @@ -355,7 +316,7 @@ await test('property read hooks', async (t) => { city: 'wut', }) - deepEqual(await db.query('user').get(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, age: 21 * 2, @@ -368,15 +329,7 @@ await test('property read hooks', async (t) => { }) await test('aggregate hooks', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { hooks: { @@ -414,19 +367,11 @@ await test('aggregate hooks', async (t) => { age: 100, }) - equal((await db.query('user').sum('age').get().toObject()).age.sum, 21) + equal((await db.query2('user').sum('age').get()).age.sum, 21) }) await test('search hooks', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { hooks: { @@ -464,19 +409,11 @@ await test('search hooks', async (t) => { age: 100, }) - equal((await db.query('user').search('youzi').get().toObject()).length, 1) + equal((await db.query2('user').search('youzi').get()).length, 1) }) await test('groupBy hooks', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { hooks: { @@ -514,21 +451,13 @@ await test('groupBy hooks', async (t) => { age: 100, }) - equal(await db.query('user').groupBy('name').sum('age').get().toObject(), { + equal(await db.query2('user').groupBy('name').sum('age').get(), { youzi: { age: { sum: 21 } }, }) }) await test('filter hooks', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb({ types: { user: { hooks: { @@ -570,21 +499,13 @@ await test('filter hooks', async (t) => { age: 100, }) - equal(await db.query('user').filter('name', '=', 'youzi').get().toObject(), [ + equal(await db.query2('user').filter('name', '=', 'youzi').get(), [ { id: 1, age: 21, name: 'youzi' }, ]) }) await test('include hooks', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { hooks: { @@ -622,21 +543,13 @@ await test('include hooks', async (t) => { age: 100, }) - equal(await db.query('user').include('name', 'age').get().toObject(), [ + equal(await db.query2('user').include('name', 'age').get(), [ { id: 1, age: 21, name: 'youzi' }, ]) }) await test('upsert calls create and/or update hooks', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { hooks: { @@ -657,12 +570,15 @@ await test('upsert calls create and/or update hooks', async (t) => { }, }) - await db.upsert('user', { - name: 'youzi', - age: 21, - }) + await db.upsert( + 'user', + { name: 'youzi' }, + { + age: 21, + }, + ) - const results1 = await db.query('user').get().toObject() + const results1 = await db.query2('user').get() equal(results1.length, 1) @@ -670,12 +586,15 @@ await test('upsert calls create and/or update hooks', async (t) => { equal(results1[0].updatedString != 0, true) await wait(1) - await db.upsert('user', { - name: 'youzi', - age: 45, - }) + await db.upsert( + 'user', + { name: 'youzi' }, + { + age: 45, + }, + ) - const results2 = await db.query('user').get().toObject() + const results2 = await db.query2('user').get() equal(results2.length, 1) equal(results2[0].createdString != 0, true) equal(results2[0].updatedString != 0, true) diff --git a/test/idOffset.ts b/test/idOffset.ts index bc4b53e266..d377420d22 100644 --- a/test/idOffset.ts +++ b/test/idOffset.ts @@ -1,15 +1,8 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' await test('idOffset', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 100, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -31,7 +24,7 @@ await test('idOffset', async (t) => { } await db.drain() - const allUsers1 = await db.query('user').get().toObject() + const allUsers1 = await db.query2('user').get() let id = 0 console.log(allUsers1.length) diff --git a/test/include/include.ts b/test/include/include.ts index b765928f9d..a82297ca37 100644 --- a/test/include/include.ts +++ b/test/include/include.ts @@ -1,15 +1,10 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { deepEqual, equal } from '../shared/assert.js' +import { checksum } from '../../src/db-client/query2/index.js' await test('include ', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -29,7 +24,7 @@ await test('include ', async (t) => { db.create('user', { nr: 3 }) deepEqual( - await db.query('user').include([]).range(0, 5).get(), + await db.query2('user').include([]).range(0, 5).get(), [ { id: 1, @@ -44,20 +39,14 @@ await test('include ', async (t) => { 'empty array should return no fields', ) - equal((await db.query('user', 1).get()).id, 1) - //equal((await db.query('user', 1).get()).queryId, 3978712180) - equal((await db.query('user').get()).checksum, 2149520223) - equal((await db.query('user').get()).version, 4507870634704934) + equal((await db.query2('user', 1).get()).id, 1) + //equal((await db.query2('user', 1).get()).queryId, 3978712180) + equal(checksum(await db.query2('user').get()), 2149520223) + equal((await db.query2('user').get()).version, 4507870634704934) }) await test('main', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -78,7 +67,7 @@ await test('main', async (t) => { }) deepEqual( - await db.query('user').range(0, 5).get(), + await db.query2('user').range(0, 5).get(), [{ id: 1, c: 10, d: 32, a: 'Derp!', b: 250 }], 'should return correct fields', ) diff --git a/test/include/includeMeta.ts b/test/include/includeMeta.ts index ff9bc6478a..9bcd71fafb 100644 --- a/test/include/includeMeta.ts +++ b/test/include/includeMeta.ts @@ -1,16 +1,10 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { deepEqual } from '../shared/assert.js' import { italy } from '../shared/examples.js' await test('meta for selva string', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, it: {}, @@ -42,7 +36,7 @@ await test('meta for selva string', async (t) => { }, }) - deepEqual(await db.query('item').include('name', { meta: true }).get(), [ + deepEqual(await db.query2('item').include('name', { meta: true }).get(), [ { id: 1, name: { @@ -57,7 +51,7 @@ await test('meta for selva string', async (t) => { await db.create('item', {}) - deepEqual(await db.query('item').include('name', { meta: true }).get(), [ + deepEqual(await db.query2('item').include('name', { meta: true }).get(), [ { id: 1, name: { @@ -74,7 +68,7 @@ await test('meta for selva string', async (t) => { }, ]) - deepEqual(await db.query('item').include('name', { meta: 'only' }).get(), [ + deepEqual(await db.query2('item').include('name', { meta: 'only' }).get(), [ { id: 1, name: { @@ -100,7 +94,7 @@ await test('meta for selva string', async (t) => { }) deepEqual( - await db.query('item').include('items.$edgeName', { meta: 'only' }).get(), + await db.query2('item').include('items.$edgeName', { meta: 'only' }).get(), [ { id: 1, @@ -134,7 +128,7 @@ await test('meta for selva string', async (t) => { 'Edge meta', ) - deepEqual(await db.query('item').include('email', { meta: 'only' }).get(), [ + deepEqual(await db.query2('item').include('email', { meta: 'only' }).get(), [ { id: 1, email: { @@ -150,7 +144,7 @@ await test('meta for selva string', async (t) => { }, ]) - deepEqual(await db.query('item').include('email', { meta: true }).get(), [ + deepEqual(await db.query2('item').include('email', { meta: true }).get(), [ { id: 1, email: { @@ -169,7 +163,7 @@ await test('meta for selva string', async (t) => { await db.update('item', 1, { name: italy }) - deepEqual(await db.query('item').include('name', { meta: true }).get(), [ + deepEqual(await db.query2('item').include('name', { meta: true }).get(), [ { id: 1, name: { @@ -186,7 +180,7 @@ await test('meta for selva string', async (t) => { }, ]) - deepEqual(await db.query('item').include('body', { meta: true }).get(), [ + deepEqual(await db.query2('item').include('body', { meta: true }).get(), [ { id: 1, body: { @@ -222,7 +216,7 @@ await test('meta for selva string', async (t) => { }) deepEqual( - await db.query('item').include('body', { meta: true }).get(), + await db.query2('item').include('body', { meta: true }).get(), [ { id: 1, @@ -261,7 +255,7 @@ await test('meta for selva string', async (t) => { ) deepEqual( - await db.query('item').include('body', { meta: 'only' }).get(), + await db.query2('item').include('body', { meta: 'only' }).get(), [ { id: 1, @@ -297,7 +291,11 @@ await test('meta for selva string', async (t) => { ) deepEqual( - await db.query('item').include('body', { meta: 'only' }).locale('it').get(), + await db + .query2('item') + .include('body', { meta: 'only' }) + .locale('it') + .get(), [ { id: 1, @@ -316,7 +314,11 @@ await test('meta for selva string', async (t) => { ) deepEqual( - await db.query('item').include('body', { meta: 'only' }).locale('en').get(), + await db + .query2('item') + .include('body', { meta: 'only' }) + .locale('en') + .get(), [ { id: 1, diff --git a/test/include/includeNested.ts b/test/include/includeNested.ts index 7d1c5993e9..7b0654fa2e 100644 --- a/test/include/includeNested.ts +++ b/test/include/includeNested.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { deepEqual } from '../shared/assert.js' await test('include */**', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -29,7 +23,7 @@ await test('include */**', async (t) => { db.create('user', { nr: 3 }) deepEqual( - await db.query('user').include('*', '**').range(0, 5).get(), + await db.query2('user').include('*', '**').range(0, 5).get(), [ { id: 1, @@ -63,7 +57,7 @@ await test('include */**', async (t) => { ) deepEqual( - await db.query('user').include('friends.*').range(0, 5).get(), + await db.query2('user').include('friends.*').range(0, 5).get(), [ { id: 1, diff --git a/test/include/includeSlice.ts b/test/include/includeSlice.ts index be86009e44..d93f21c29b 100644 --- a/test/include/includeSlice.ts +++ b/test/include/includeSlice.ts @@ -1,16 +1,10 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { italy } from '../shared/examples.js' import { deepEqual, equal } from '../shared/assert.js' await test('slice string / text', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, it: {}, @@ -60,12 +54,12 @@ await test('slice string / text', async (t) => { }, }) - const q = await db.query('item', 1).get() + const q = await db.query2('item', 1).get() equal(q.id, 1) deepEqual( await db - .query('item', id1) + .query2('item', id1) .include('name', { end: 1, meta: true, @@ -124,14 +118,14 @@ await test('slice string / text', async (t) => { ) deepEqual( - await db.query('item', id1).include('body', { end: 3 }).get(), + await db.query2('item', id1).include('body', { end: 3 }).get(), { id: 1, body: { en: '🤪🇺🇸🇿🇼', fi: 'fin', it: 'abc' } }, 'Text all + chars', ) deepEqual( await db - .query('item', id1) + .query2('item', id1) .include('body.fi', { end: 3 }, 'body.en', { end: 3 }) .get(), { id: 1, body: { en: '🤪🇺🇸🇿🇼', fi: 'fin' } }, @@ -139,14 +133,14 @@ await test('slice string / text', async (t) => { ) deepEqual( - await db.query('item', id1).include('body', { end: 3 }).locale('en').get(), + await db.query2('item', id1).include('body', { end: 3 }).locale('en').get(), { id: 1, body: '🤪🇺🇸🇿🇼' }, 'Text specific locale', ) deepEqual( await db - .query('item', id1) + .query2('item', id1) .include('body', { end: 4, bytes: true }) .locale('en') .get(), @@ -155,7 +149,10 @@ await test('slice string / text', async (t) => { ) deepEqual( - await db.query('item', id1).include('body.en', { end: 3 }, 'body.fi').get(), + await db + .query2('item', id1) + .include('body.en', { end: 3 }, 'body.fi') + .get(), { id: 1, body: { en: '🤪🇺🇸🇿🇼', fi: 'finland 🇫🇮! this is finland!' } }, 'Different ends per language', ) diff --git a/test/include/referencesField.ts b/test/include/referencesField.ts index 4fa1a827f7..87dd1445b4 100644 --- a/test/include/referencesField.ts +++ b/test/include/referencesField.ts @@ -1,15 +1,9 @@ import test from '../shared/test.js' -import { BasedDb } from '../../src/index.js' +import {testDb} from '../shared/index.js' import { deepEqual } from '../shared/assert.js' await test('references shortcut', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -37,27 +31,27 @@ await test('references shortcut', async (t) => { } deepEqual( - await db.query('user', mrA).include('name', 'age', 'friends[0].age').get(), + await db.query2('user', mrA).include('name', 'age', 'friends[0].age').get(), { id: 2, age: 50, name: 'Mr a', friends: [{ id: 1, age: 25 }] }, '[0]', ) deepEqual( - await db.query('user').at(0).get(), + await db.query2('user').at(0).get(), { id: 1, age: 25, name: 'Mr b' }, '.at(0)', ) deepEqual( - await db.query('user').at(3).get(), + await db.query2('user').at(3).get(), { id: 4, age: 93, name: 'Mr 1' }, '.at(3)', ) - // await db.query('user').range(-10, -1).get().inspect() + // await db.query2('user').range(-10, -1).get().inspect() // deepEqual( - // await db.query('user').range(-10, -1).get(), + // await db.query2('user').range(-10, -1).get(), // { id: 4, age: 93, name: 'Mr 1' }, // '.at(3)', // ) diff --git a/test/include/thread.perf.ts b/test/include/thread.perf.ts index e7ed1ffc5f..1e0f9e33e4 100644 --- a/test/include/thread.perf.ts +++ b/test/include/thread.perf.ts @@ -13,12 +13,11 @@ await test('include', async (t) => { console.log('STOP SERVER') await db.stop(true) }) - //t.after(() => t.backup(db)) t.after(() => db.stop(true)) // single ref + edge - await db.setSchema({ + const client = await db.setSchema({ locales: { en: true, de: { fallback: ['en'] }, @@ -46,28 +45,28 @@ await test('include', async (t) => { let d = Date.now() for (let i = 0; i < 5e6; i++) { - db.create('simple', { + client.create('simple', { nr: 67, // name: i % 2 ? 'b' : 'a', // nr: rand(0, 10), }) } - await db.drain() + await client.drain() let time = Date.now() - d console.log('create 5M', time, 'ms', (1000 / time) * 5e6, 'OPS per second') d = Date.now() for (let i = 0; i < 5e6; i++) { - db.update('simple', i + 1, { + client.update('simple', i + 1, { nr: 67, // name: i % 2 ? 'b' : 'a', // nr: rand(0, 10), }) } - await db.drain() + await client.drain() time = Date.now() - d @@ -75,7 +74,7 @@ await test('include', async (t) => { let q: any = [] - const x = db.query('simple', 1) + const x = client.query2('simple', 1) registerQuery(x) @@ -95,8 +94,8 @@ await test('include', async (t) => { for (let i = 0; i < 9; i++) { //.range(0, 1) q.push( - db - .query('simple') + client + .query2('simple') .range(0, 5e6 + i) // .include('id') .count() @@ -117,27 +116,27 @@ await test('include', async (t) => { d = Date.now() for (let i = 0; i < 5e6; i++) { - db.delete('simple', i + 1) + client.delete('simple', i + 1) } - await db.drain() + await client.drain() time = Date.now() - d console.log('DEL 5M', time, 'ms', (1000 / time) * 5e6, 'OPS per second') - const simple = await db.create('simple', { + const simple = await client.create('simple', { nr: 1001, }) - const simple2 = await db.create('simple', { + const simple2 = await client.create('simple', { nr: 1002, }) d = Date.now() for (let i = 0; i < 10e6; i++) { - db.create('simple', { + client.create('simple', { nr: 67, start: d + i * 1e3, end: d + i * 1e3 + 10e3, @@ -163,8 +162,27 @@ await test('include', async (t) => { const q: any[] = [] for (let i = 0; i < 5; i++) { q.push( - db - .query('simple') + client + .query2('simple') + .include('id', 'nr') + // .range(0, 10e6 + i) + .filter('nr', '=', 100 + i) + .get(), + ) + } + await Promise.all(q) + }, + '1M Nodes include', + { repeat: 100 }, + ) + + await perf( + async () => { + const q: any[] = [] + for (let i = 0; i < 5; i++) { + q.push( + client + .query2('simple') .include('id', 'nr') // .range(0, 10e6 + i) .filter('nr', '=', 100 + i) @@ -177,17 +195,61 @@ await test('include', async (t) => { { repeat: 100 }, ) - db.create('simple', { + client.create('simple', { nr: 100, // name: i % 2 ? 'b' : 'a', // nr: rand(0, 10), }) - await db - .query('simple') + await client + .query2('simple') .include('nr') // 'start', 'end', 'target' .filter('target.nr', '>', 1001) // .or('nr', '=', 100) + // .or('nr', '=', 200) + // .or((p) => { + // p.filter('nr', 10).and('nr', 20) + // }) + // .and((p) => { + // p.filter('y', 20) + // p.or('x', 10) + // }) + + /* + { + filter: { + props: {nr" '>' 10001} + and: { + props: { y: [10]} + or: { props: { x: 10}} + } + } + + } + + { + filter: { + props: {nr" '>' 10001} + or: { + props: { nr: [10,20]} + } + } + + } + + { + filter: { + props: {nr" '>' 10001} + or: { + or: { + } + } + } + + } + + */ + .range(0, 10e6) .get() .inspect() @@ -235,7 +297,7 @@ await test.skip('default', async (t) => { special: { type: 'vector', size: 4, - baseType: 'number', + baseType: 'float64', // TODO //default: new Uint8Array([0, 0, 0, 0]), }, @@ -259,7 +321,7 @@ await test.skip('default', async (t) => { await perf( async () => { await db - .query('user') + .query2('user') .include('name', 'bio', 'hack', 'hack2', 'book') .get() .inspect() diff --git a/test/insertOnly.ts b/test/insertOnly.ts index 2cc9b897ec..d69b8bd636 100644 --- a/test/insertOnly.ts +++ b/test/insertOnly.ts @@ -1,16 +1,10 @@ import { BasedDb } from '../src/index.js' -import { throws } from './shared/assert.js' +import { deepEqual, throws } from './shared/assert.js' +import { testDb } from './shared/index.js' import test from './shared/test.js' await test('insert only => no delete', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(async () => t.backup(db)) - - await db.setSchema({ + const client = await testDb(t, { types: { audit: { insertOnly: true, @@ -21,9 +15,10 @@ await test('insert only => no delete', async (t) => { }, }) - const a = await db.create('audit', { v: 100 }) - await db.create('audit', { v: 100 }) - await throws(() => db.delete('audit', a)) + const a = await client.create('audit', { v: 100 }) + await client.create('audit', { v: 100 }) + await throws(() => client.delete('audit', a)) + deepEqual(await client.query2('audit', a).get(), { id: 1, v: 100 }) }) await test('colvec requires insertOnly', async (t) => { diff --git a/test/instantModify.ts b/test/instantModify.ts index b709f1f070..e76532117c 100644 --- a/test/instantModify.ts +++ b/test/instantModify.ts @@ -4,6 +4,7 @@ import test from './shared/test.js' import { dirname, join } from 'path' import { fileURLToPath } from 'url' import { Worker } from 'node:worker_threads' + await test.skip('instantModify', async (t) => { const db = new BasedDb({ path: t.tmp, @@ -16,8 +17,8 @@ await test.skip('instantModify', async (t) => { await db.setSchema({ locales: { en: {}, - it: { fallback: 'en' }, - fi: { fallback: 'en' }, + it: { fallback: ['en'] }, + fi: { fallback: ['en'] }, }, types: { country: { @@ -108,12 +109,12 @@ await test.skip('instantModify', async (t) => { let j = 1000 await db2.start() while (j--) { - // db2.query('country').get().toObject() + // db2.query2('country').get() for (const update of updates) { db2.server.modify(update) } } - // console.log('AFTER:', await db2.query('country').get().toObject()) + // console.log('AFTER:', await db2.query2('country').get()) await db2.destroy() }) diff --git a/test/isModified.perf.ts b/test/isModified.perf.ts index fabd443afb..ae2353e25d 100644 --- a/test/isModified.perf.ts +++ b/test/isModified.perf.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { deepEqual } from './shared/assert.js' +import { testDb } from './shared/index.js' await test('isModified', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -26,13 +20,13 @@ await test('isModified', async (t) => { const q: any = [] for (let i = 0; i < 10; i++) { - q.push(db.query('user').range(0, 5).get()) + q.push(db.query2('user').range(0, 5).get()) } const r = await Promise.all(q) for (const result of r) { - deepEqual(result.toObject(), [ + deepEqual(result, [ { id: 1, nr: 0 }, { id: 2, nr: 1 }, { id: 3, nr: 2 }, diff --git a/test/json.ts b/test/json.ts index fd1b192099..e8787f32a2 100644 --- a/test/json.ts +++ b/test/json.ts @@ -1,16 +1,11 @@ import { notEqual } from 'node:assert' -import { BasedDb } from '../src/index.js' import { deepEqual } from './shared/assert.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' +import { checksum as q2checksum } from '../../src/db-client/query2/index.js' await test('json', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { jsonDerulo: { name: 'string', @@ -33,7 +28,7 @@ await test('json', async (t) => { await db.create('jsonDerulo', derulo) deepEqual( - await db.query('jsonDerulo').get(), + await db.query2('jsonDerulo').get(), [ { id: 1, @@ -48,7 +43,7 @@ await test('json', async (t) => { }) deepEqual( - await db.query('jsonDerulo').get(), + await db.query2('jsonDerulo').get(), [ { id: 1, ...derulo }, { id: 2, myJson: {}, name: '' }, @@ -56,13 +51,12 @@ await test('json', async (t) => { 'after empty object', ) - await db.update('jsonDerulo', { - id: jay, + await db.update('jsonDerulo', jay, { myJson: null, }) deepEqual( - await db.query('jsonDerulo').get(), + await db.query2('jsonDerulo').get(), [ { id: 1, ...derulo }, { id: 2, myJson: null, name: '' }, @@ -72,13 +66,7 @@ await test('json', async (t) => { }) await test('json and crc32', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { article: { @@ -92,13 +80,13 @@ await test('json and crc32', async (t) => { article: 'a', }) - const checksum = (await db.query('user', user1).get()).checksum + const checksum = q2checksum(await db.query2('user', user1).get()) await db.update('user', user1, { article: 'b', }) - const checksum2 = (await db.query('user', user1).get()).checksum + const checksum2 = q2checksum(await db.query2('user', user1).get()) notEqual(checksum, checksum2, 'Checksum is not the same') }) diff --git a/test/locales.ts b/test/locales.ts index aaabb96947..dd52b96f1f 100644 --- a/test/locales.ts +++ b/test/locales.ts @@ -1,23 +1,14 @@ -import assert from 'node:assert' -import { BasedDb } from '../src/index.js' -import native from '../src/native.js' import test from './shared/test.js' -import { langCodesMap, Schema } from '../src/schema/index.js' +import { testDb } from './shared/index.js' +import { LangCode } from '../src/zigTsExports.js' -await test('locales', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - const locales: Schema['locales'] = {} - const langs = [...langCodesMap.keys()].filter((val) => val !== 'none') - for (const key of langs) { - locales[key] = {} - } +const langs = [...Object.keys(LangCode)].filter((val) => val !== 'none') +const locales = Object.fromEntries( + langs.map((l: keyof typeof LangCode) => [l, {}]), +) - await db.setSchema({ +await test('locales', async (t) => { + const client = await testDb(t, { locales, types: { thing: { @@ -38,29 +29,23 @@ await test('locales', async (t) => { payload.text[key] = key } - db.create('thing', payload) + client.create('thing', payload) } - await db.drain() - - const things = await db.query('thing').get().toObject() + const things = await client.query2('thing').get() for (const thing of things) { - const payload: any = { + const payload: typeof thing = { string: null, - text: {}, + text: Object.fromEntries(Object.keys(thing.text).map((l) => [l, null])), } - for (const key of langs) { - payload.text[key] = null - } - - db.update('thing', thing.id, payload) + client.update('thing', thing.id, payload) } - await db.drain() + await client.drain() - const updatedThings = await db.query('thing').get().toObject() + const updatedThings = await client.query2('thing').get() for (const thing of updatedThings) { if (thing.string !== '') { @@ -73,16 +58,3 @@ await test('locales', async (t) => { } } }) - -await test('locales sanity check', async (t) => { - // prettier-ignore - const missingOnDarwin = new Set([ 'aa', 'ab', 'ak', 'sq', 'an', 'as', 'ae', 'ay', 'az', 'bn', 'bi', 'bs', 'br', 'my', 'km', 'ce', 'cv', 'kw', 'co', 'dv', 'dz', 'fo', 'ff', 'gd', 'gl', 'kl', 'gu', 'ht', 'ha', 'hi', 'ig', 'id', 'ia', 'iu', 'ik', 'ga', 'kn', 'ks', 'rw', 'ku', 'ky', 'lo', 'la', 'lv', 'lb', 'li', 'ln', 'mk', 'mg', 'ms', 'ml', 'mt', 'gv', 'mi', 'mn', 'ne', 'se', 'no', 'nb', 'nn', 'oc', 'or', 'om', 'os', 'pa', 'ps', 'fa', 'qu', 'rm', 'sm', 'sa', 'sc', 'sr', 'sd', 'si', 'so', 'st', 'nr', 'sw', 'ss', 'tl', 'tg', 'ta', 'tt', 'te', 'th', 'bo', 'ti', 'to', 'ts', 'tn', 'tk', 'ug', 'ur', 'uz', 've', 'vi', 'wa', 'cy', 'fy', 'wo', 'xh', 'yi', 'yo', 'zu', 'ka', 'cnr' ]) - const selvaLangs = new Set(native.selvaLangAll().split('\n')) - - langCodesMap.forEach((value, key) => { - if (value === 0) return - if (process.platform === 'darwin' && !missingOnDarwin.has(key)) { - assert(selvaLangs.has(key), `Lang '${key}' is found in selva`) - } - }) -}) diff --git a/test/mainAndEmptyStringFieldDelete.ts b/test/mainAndEmptyStringFieldDelete.ts index 7eae465843..b7aae22e02 100644 --- a/test/mainAndEmptyStringFieldDelete.ts +++ b/test/mainAndEmptyStringFieldDelete.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { deepEqual } from './shared/assert.js' +import { testDb } from './shared/index.js' await test('main + empty', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { role: ['admin', 'translator', 'viewer'], @@ -26,7 +20,7 @@ await test('main + empty', async (t) => { location: '', }) - deepEqual(await db.query('user').get().toObject(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, role: 'translator', location: '' }, ]) }) diff --git a/test/mem.ts b/test/mem.ts index 37344a3630..3a07ce2181 100644 --- a/test/mem.ts +++ b/test/mem.ts @@ -1,18 +1,10 @@ -import { fastPrng, wait } from '../src/utils/index.js' -import { BasedDb } from '../src/index.js' +import { fastPrng } from '../src/utils/index.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' import { equal } from './shared/assert.js' await test('mem', async (t) => { - const db = new BasedDb({ - path: t.tmp, - // low amount to force many flushes - maxModifySize: 10000, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const client = await testDb(t, { types: { data: { props: { @@ -27,12 +19,11 @@ await test('mem', async (t) => { const amount = 1e3 const repeat = 1e3 - // 2M inserts rmeoves const rnd = fastPrng() for (let j = 0; j < repeat; j++) { // To keep many different blocks - await db.create('data', { + await client.create('data', { age: 666, name: 'BASIC ' + j, }) @@ -45,7 +36,7 @@ await test('mem', async (t) => { cnt++ } ids.push( - db.create('data', { + client.create('data', { age: i, name: `Mr FLAP ${i}`, a: x ? { id: ids[rnd(0, ids.length - 1)], $derp: i } : null, @@ -53,16 +44,16 @@ await test('mem', async (t) => { ) } - await db.drain() - await db.create('data', { + await client.drain() + await client.create('data', { age: 667, name: 'BASIC2 ' + j, }) equal( ( - await db - .query('data') + await client + .query2('data') .include('b') .filter('b', 'exists') .range(0, amount) @@ -72,11 +63,14 @@ await test('mem', async (t) => { ) for (let i = 0; i < amount; i++) { - db.delete('data', ids[i]) + client.delete('data', ids[i]) } - await db.drain() + await client.drain() - equal((await db.query('data').range(0, 10e6).get()).length, (j + 1) * 2) + equal( + (await client.query2('data').range(0, 10e6).get()).length, + (j + 1) * 2, + ) } }) diff --git a/test/migration.ts b/test/migration.ts index 2e07059506..729bbae438 100644 --- a/test/migration.ts +++ b/test/migration.ts @@ -10,7 +10,7 @@ await test('migration', async (t) => { await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) await db.setSchema({ version: '1.0.0', @@ -85,7 +85,7 @@ await test('migration', async (t) => { await db.create('person', payload) } - // console.dir(await db.query('person').include('*', '**').get().toObject(), { + // console.dir(await db.query2('person').include('*', '**').get(), { // depth: null, // }) @@ -224,8 +224,8 @@ await test('migration', async (t) => { await db.setSchema(schema) } - const users = await db.query('user').get().toObject() - const people = await db.query('person').include('*', '**').get().toObject() + const users = await db.query2('user').get() + const people = await db.query2('person').include('*', '**').get() equal(users.length, 10) equal(people.length, 10) diff --git a/test/mixed.ts b/test/mixed.ts deleted file mode 100644 index c816c4b90d..0000000000 --- a/test/mixed.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { setTimeout } from 'timers/promises' -import { BasedDb } from '../src/index.js' -import test from './shared/test.js' - -await test.skip('mixed', async (t) => { - try { - // const populate = await import('./shared/tmp/populate/index.js') - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - // await populate.default(db) - await setTimeout(1e3) - await db.update('phase', 1, { - scenarios: { - add: [ - { - id: 1, - $sequence: 1, - }, - ], - }, - }) - await setTimeout(1e3) - } catch (e) { - console.info('skipping mixed test') - } -}) diff --git a/test/modify/delete.ts b/test/modify/delete.ts new file mode 100644 index 0000000000..7c7018f941 --- /dev/null +++ b/test/modify/delete.ts @@ -0,0 +1,28 @@ +import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' +import test from '../shared/test.js' + +await test('delete', async (t) => { + const db = await testDb(t, { + types: { + user: { + name: 'string', + }, + }, + }) + + // 1. Create + const id = await db.create('user', { name: 'hello' }) + const res = await db.query2('user', id).get() + deepEqual(res?.name, 'hello') + + // 2. Delete + await db.delete('user', id) + + // 3. Verify + const res2 = await db.query2('user', id).get() + deepEqual(res2, null, 'Should be null after delete') + + // 4. Delete again (should not throw, maybe return false?) + await db.delete('user', id) +}) diff --git a/test/modify/hooks/create.ts b/test/modify/hooks/create.ts new file mode 100644 index 0000000000..112b6016c2 --- /dev/null +++ b/test/modify/hooks/create.ts @@ -0,0 +1,38 @@ +import { deepEqual, testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - hooks - create', async (t) => { + const db = await testDb(t, { + types: { + user: { + props: { + rating: { + type: 'number', + hooks: { + create(value) { + if (value < 5) { + return value + 1 + } + }, + }, + }, + }, + hooks: { + create(payload) { + if (!payload.rating) { + payload.rating = 5 + } + }, + }, + }, + }, + }) + + { + const id = await db.create('user', {}) + deepEqual(await db.query2('user', id).get(), { + id, + rating: 5, + }) + } +}) diff --git a/test/modify/hooks/type_test.ts b/test/modify/hooks/type_test.ts new file mode 100644 index 0000000000..e01d62cd3b --- /dev/null +++ b/test/modify/hooks/type_test.ts @@ -0,0 +1,113 @@ +import { parseSchema } from '../../../src/schema.js' +import type { InferPayload } from '../../../src/schema/schema/payload.js' +import { testDb } from '../../shared/index.js' + +async function check() { + const schemaOut = parseSchema({ + types: { + user: { + props: { + other: { + ref: 'other', + prop: 'user', + }, + }, + hooks: { + create(userPayload) { + const other = userPayload.other + }, + }, + }, + other: { + user: { + ref: 'user', + prop: 'other', + }, + }, + }, + }) + + type UserPayload = InferPayload + const userPayload: UserPayload = { + other: 1, + // @ts-expect-error + nothing: 'oops', + } + + // @ts-expect-error + schemaOut.types.foo = {} + + const db = await testDb(null as any, { + types: { + user: { + other: { + ref: 'other', + prop: 'user', + }, + }, + other: { + user: { + ref: 'user', + prop: 'other', + }, + }, + }, + }) + + db.create('user', { + other: 1, + // @ts-expect-error + nothing: 'oops', + }) +} + +// import type { InferPayload } from '../../../src/db-client/modify/types.js' +// import { parseSchema, type SchemaIn } from '../../../src/schema.js' + +// // Simple test case to verify strict typing of create hook payload +// const bla = parseSchema({ +// types: { +// user: { +// props: { +// isNice: 'boolean', +// }, +// // testPayload: { +// // // @ts-expect-error +// // isNice: number, +// // }, +// hooks: { +// create(payload) { +// const isNice: boolean | null | undefined = payload!.isNice +// // @ts-expect-error +// const isWrong: boolean | null | undefined = payload.isWrong + +// return payload +// }, +// }, +// // testhooks: { +// // create(payload) { +// // const isNice: boolean | null | undefined = payload.isNice +// // // @ts-expect-error +// // const isWrong: boolean | null | undefined = payload.isWrong +// // }, +// // }, +// }, +// }, +// }) + +// bla.types.haha + +// const schema = { +// types: { +// user: { +// props: { +// isNice: 'boolean', +// }, +// }, +// }, +// } as const + +// const payload: InferPayload = {} +// const isNice: boolean | null | undefined = payload.isNice +// // @ts-expect-error +// const isWrong: boolean | null | undefined = payload.isWrong diff --git a/test/modify/insert.ts b/test/modify/insert.ts new file mode 100644 index 0000000000..df5070f235 --- /dev/null +++ b/test/modify/insert.ts @@ -0,0 +1,75 @@ +import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' +import test from '../shared/test.js' + +await test('insert', async (t) => { + const db = await testDb(t, { + types: { + user: { + email: 'alias', + uuid: 'alias', + isNice: 'boolean', + }, + }, + }) + + // 1. Create via insert + const id1 = await db.insert( + 'user', + { uuid: '9dg786' }, // target by alias + { email: 'youri@saulx.com', isNice: true }, + ) + + const res1 = await db.query2('user', id1).get() + deepEqual(res1, { + id: id1, + uuid: '9dg786', + email: 'youri@saulx.com', + isNice: true, + }) + + // 2. Insert with same alias (should NOT update) + const id2 = await db.insert('user', { uuid: '9dg786' }, { isNice: false }) + + deepEqual(id1, id2, 'Ids should be the same') + + const res2 = await db.query2('user', id1).get() + deepEqual(res2, { + id: id1, + uuid: '9dg786', + email: 'youri@saulx.com', + isNice: true, // Should still be true + }) + + // 3. Create another one via different alias field + const id3 = await db.insert( + 'user', + { email: 'bla@bla.com' }, + { uuid: 'unique-id-2', isNice: true }, + ) + + const res3 = await db.query2('user', id3).get() + deepEqual(res3, { + id: id3, + uuid: 'unique-id-2', + email: 'bla@bla.com', + isNice: true, + }) + + // 4. Insert via different alias field (should NOT update) + const id4 = await db.insert( + 'user', + { email: 'bla@bla.com' }, + { isNice: false }, + ) + + deepEqual(id3, id4, 'Ids should be the same 2') + + const res4 = await db.query2('user', id3).get() + deepEqual(res4, { + id: id3, + uuid: 'unique-id-2', + email: 'bla@bla.com', + isNice: true, // Should still be true + }) +}) diff --git a/test/modify/props/alias.ts b/test/modify/props/alias.ts new file mode 100644 index 0000000000..36231e0d90 --- /dev/null +++ b/test/modify/props/alias.ts @@ -0,0 +1,72 @@ +import { parseSchema } from '../../../src/schema.js' +import { deepEqual, throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify alias', async (t) => { + const db = await testDb(t, { + types: { + thing: { + myAlias: 'alias', + }, + }, + }) + + // Basic alias + const id1 = await db.create('thing', { + myAlias: 'my-alias-value', + }) + const id2 = await db.create('thing', { + myAlias: 'b-alias', + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + myAlias: 'my-alias-value', + }) + // Update + await db.update('thing', id1, { + myAlias: 'another-alias', + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + myAlias: 'another-alias', + }) + + await db.update('thing', id2, { + myAlias: 'another-alias', + }) + deepEqual(await db.query2('thing', { myAlias: 'b-alias' }).get(), null) + deepEqual(await db.query2('thing', { myAlias: 'another-alias' }).get(), { + id: id2, + myAlias: 'another-alias', + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + myAlias: '', + }) + // Delete + await db.update('thing', id2, { + myAlias: null, + }) + deepEqual((await db.query2('thing', id2).get())!.myAlias, '') +}) + +await test('schema alias on edge not allowed', async (t) => { + throws(async () => + parseSchema({ + types: { + thing: { + myAlias: 'alias', + }, + holder: { + // @ts-expect-error + toThing: { + ref: 'thing', + prop: 'holders', + $edgeAlias: 'alias', + }, + }, + }, + }), + ) +}) diff --git a/test/modify/props/binary.ts b/test/modify/props/binary.ts new file mode 100644 index 0000000000..938b780efb --- /dev/null +++ b/test/modify/props/binary.ts @@ -0,0 +1,78 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify binary', async (t) => { + const db = await testDb(t, { + types: { + thing: { + blob: 'binary', + }, + }, + }) + + const b1 = new Uint8Array([1, 2, 3]) + const id1 = await db.create('thing', { + blob: b1, + }) + const res1 = await db.query2('thing', id1).get() + + deepEqual(res1?.blob, b1) + + const b2 = new Uint8Array([4, 5, 6, 7]) + await db.update('thing', id1, { + blob: b2, + }) + + const res2 = await db.query2('thing', id1).get() + deepEqual(res2?.blob, b2) + + // Delete + await db.update('thing', id1, { + blob: null, + }) + const res3 = await db.query2('thing', id1).get() + deepEqual(res3?.blob, new Uint8Array()) +}) + +await test('modify binary on edge', async (t) => { + const db = await testDb(t, { + types: { + thing: { + blob: 'binary', + }, + holder: { + toThing: { + ref: 'thing', + prop: 'holders', + $edgeBlob: 'binary', + }, + }, + }, + }) + + const b1 = new Uint8Array([1, 2, 3]) + const targetId = await db.create('thing', { blob: b1 }) + const id1 = await db.create('holder', { + toThing: { + id: targetId, + $edgeBlob: b1, + }, + }) + + const res1 = await db.query2('holder', id1).include('toThing.$edgeBlob').get() + + deepEqual(res1?.toThing?.$edgeBlob, b1) + + const b2 = new Uint8Array([4, 5, 6, 7]) + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeBlob: b2, + }, + }) + + const res2 = await db.query2('holder', id1).include('toThing.$edgeBlob').get() + + deepEqual(res2?.toThing?.$edgeBlob, b2) +}) diff --git a/test/modify/props/boolean.ts b/test/modify/props/boolean.ts new file mode 100644 index 0000000000..4520bd9062 --- /dev/null +++ b/test/modify/props/boolean.ts @@ -0,0 +1,152 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify basic boolean', async (t) => { + const db = await testDb(t, { + types: { + user: { + isNice: 'boolean', + }, + }, + }) + + const a = db.create('user', {}) + const b = db.create('user', { isNice: true }) + const c = db.create('user', { isNice: false }) + + deepEqual(await db.query2('user').get(), [ + { id: 1, isNice: false }, + { id: 2, isNice: true }, + { id: 3, isNice: false }, + ]) + + db.update('user', a, { isNice: true }) + db.update('user', b, { isNice: true }) + db.update('user', c, { isNice: true }) + + deepEqual(await db.query2('user').get(), [ + { id: 1, isNice: true }, + { id: 2, isNice: true }, + { id: 3, isNice: true }, + ]) + + db.update('user', a, { isNice: false }) + db.update('user', b, { isNice: false }) + + deepEqual(await db.query2('user').get(), [ + { id: 1, isNice: false }, + { id: 2, isNice: false }, + { id: 3, isNice: true }, + ]) + + // Delete + db.update('user', a, { isNice: null }) + db.update('user', b, { isNice: null }) + db.update('user', c, { isNice: null }) + + deepEqual(await db.query2('user').get(), [ + { id: 1, isNice: false }, + { id: 2, isNice: false }, + { id: 3, isNice: false }, + ]) +}) + +await test('modify boolean on edge', async (t) => { + const db = await testDb(t, { + types: { + user: { + isNice: 'boolean', + }, + holder: { + toUser: { + ref: 'user', + prop: 'holders', + $edgeBool: 'boolean', + }, + }, + }, + }) + + const u1 = await db.create('user', { isNice: true }) + const a = db.create('holder', { + toUser: { + id: u1, + }, + }) + const b = await db.create('holder', { + toUser: { + id: u1, + $edgeBool: true, + }, + }) + const c = db.create('holder', { + toUser: { + id: u1, + $edgeBool: false, + }, + }) + + // Basic creates + // Check a (default false?) + const resA = await db + .query2('holder', await a) + .include('toUser.$edgeBool') + .get() + deepEqual(resA?.toUser?.$edgeBool, false) + + // Check b (true) + const resB = await db.query2('holder', b).include('toUser.$edgeBool').get() + resB?.toUser?.$edgeBool + deepEqual(resB?.toUser?.$edgeBool, true) + + // Check c (false) + const resC = await db + .query2('holder', await c) + .include('toUser.$edgeBool') + .get() + + deepEqual(resC?.toUser?.$edgeBool, false) + + // Updates to true + db.update('holder', await a, { toUser: { id: u1, $edgeBool: true } }) + db.update('holder', b, { toUser: { id: u1, $edgeBool: true } }) + db.update('holder', await c, { toUser: { id: u1, $edgeBool: true } }) + + const resA2 = await db + .query2('holder', await a) + .include('toUser.$edgeBool') + .get() + + deepEqual(resA2?.toUser?.$edgeBool, true) + const resB2 = await db.query2('holder', b).include('toUser.$edgeBool').get() + + deepEqual(resB2?.toUser?.$edgeBool, true) + const resC2 = await db + .query2('holder', await c) + .include('toUser.$edgeBool') + .get() + + deepEqual(resC2?.toUser?.$edgeBool, true) + + // Updates to false + db.update('holder', await a, { toUser: { id: u1, $edgeBool: false } }) + db.update('holder', b, { toUser: { id: u1, $edgeBool: false } }) + db.update('holder', await c, { toUser: { id: u1, $edgeBool: false } }) + + const resA3 = await db + .query2('holder', await a) + .include('toUser.$edgeBool') + .get() + + deepEqual(resA3?.toUser?.$edgeBool, false) + const resB3 = await db.query2('holder', b).include('toUser.$edgeBool').get() + + deepEqual(resB3?.toUser?.$edgeBool, false) + const resC3 = await db + .query2('holder', await c) + .include('toUser.$edgeBool') + .get() + + deepEqual(resC3?.toUser?.$edgeBool, false) +}) diff --git a/test/modify/props/cardinality.ts b/test/modify/props/cardinality.ts new file mode 100644 index 0000000000..5f16be3252 --- /dev/null +++ b/test/modify/props/cardinality.ts @@ -0,0 +1,120 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify cardinality basic', async (t) => { + const db = await testDb(t, { + types: { + thing: { + counter: 'cardinality', + }, + }, + }) + + // Cardinality is a probabilistic counter. + // We usually "add" values to it. + const id1 = await db.create('thing', { + counter: 'item1', + }) + + // Assuming we can read the count? Or the approximation. + // The query might return the count. + const res1 = await db.query2('thing', id1).get() + deepEqual(res1?.counter, 1) + + // Add another unique item + await db.update('thing', id1, { + counter: 'item2', + }) + const res2 = await db.query2('thing', id1).get() + deepEqual(res2?.counter, 2) + + // Add duplicate item (count shouldn't change) + await db.update('thing', id1, { + counter: 'item1', + }) + const res3 = await db.query2('thing', id1).get() + deepEqual(res3?.counter, 2) + + // Delete + await db.update('thing', id1, { + counter: null, + }) + + const res4 = await db.query2('thing', id1).get() + deepEqual(res4?.counter, 0) +}) + +await test('modify cardinality on edge', async (t) => { + const db = await testDb(t, { + types: { + thing: { + counter: 'cardinality', + }, + holder: { + toThing: { + ref: 'thing', + prop: 'holders', + $edgeCounter: 'cardinality', + }, + }, + }, + }) + + const targetId = await db.create('thing', { counter: 'a' }) + const id1 = await db.create('holder', { + toThing: { + id: targetId, + $edgeCounter: 'item1', + }, + }) + + const res1 = await db + .query2('holder', id1) + .include('toThing.$edgeCounter') + .get() + + deepEqual(res1?.toThing?.$edgeCounter, 1) + + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeCounter: 'item2', + }, + }) + + const res2 = await db + .query2('holder', id1) + .include('toThing.$edgeCounter') + .get() + + deepEqual(res2?.toThing?.$edgeCounter, 2) +}) + +await test('modify cardinality array', async (t) => { + const db = await testDb(t, { + types: { + thing: { + counter: 'cardinality', + }, + }, + }) + + // Create with array + const id1 = await db.create('thing', { + counter: ['item1', 'item2'], + }) + + const res1 = await db.query2('thing', id1).get() + // Should have 2 unique items + deepEqual(res1?.counter, 2) + + // Update with array (one new, one duplicate) + await db.update('thing', id1, { + counter: ['item2', 'item3'], + }) + + const res2 = await db.query2('thing', id1).get() + // item1, item2, item3 -> 3 unique items + deepEqual(res2?.counter, 3) +}) diff --git a/test/modify/props/default.ts b/test/modify/props/default.ts new file mode 100644 index 0000000000..9da5f96955 --- /dev/null +++ b/test/modify/props/default.ts @@ -0,0 +1,157 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - default values basic', async (t) => { + const db = await testDb(t, { + locales: { en: true }, + types: { + thing: { + name: { type: 'string', default: 'Untitled' }, + score: { type: 'number', default: 100 }, + isActive: { type: 'boolean', default: true }, + myEnum: { enum: ['a', 'b'], default: 'a' }, + myJson: { type: 'json', default: { foo: 'bar' } }, + myText: { type: 'text', default: { en: 'Hello' } }, + myTs: { type: 'timestamp', default: 1000 }, + }, + }, + }) + + // 1. Create with no values provided + const a = await db.create('thing', {}) + const resA: any = await db.query2('thing', a).get() + deepEqual(resA, { + id: a, + name: 'Untitled', + score: 100, + isActive: true, + myEnum: 'a', + myJson: { foo: 'bar' }, + myText: { en: 'Hello' }, + myTs: 1000, + }) + + // 2. Create with specific values (override default) + const b = await db.create('thing', { + name: 'Specific', + score: 10, + isActive: false, + myEnum: 'b', + myJson: { foo: 'baz' }, + myText: { en: 'Hi' }, + myTs: 2000, + }) + const resB = await db.query2('thing', b).get() + deepEqual(resB, { + id: b, + name: 'Specific', + score: 10, + isActive: false, + myEnum: 'b', + myJson: { foo: 'baz' }, + myText: { en: 'Hi' }, + myTs: 2000, + }) + + // 3. Create with mixed values + const c = await db.create('thing', { score: 50, myEnum: 'b' }) + const resC = await db.query2('thing', c).get() + deepEqual(resC, { + id: c, + name: 'Untitled', + score: 50, + isActive: true, + myEnum: 'b', + myJson: { foo: 'bar' }, + myText: { en: 'Hello' }, + myTs: 1000, + }) +}) + +await test('modify - default values on edge', async (t) => { + const db = await testDb(t, { + locales: { en: true }, + types: { + user: { + name: 'string', + }, + group: { + member: { + ref: 'user', + prop: 'groups', + $role: { type: 'string', default: 'member' }, + $level: { type: 'number', default: 1 }, + $edgeEnum: { enum: ['a', 'b'], default: 'a' }, + $edgeJson: { type: 'json', default: { foo: 'bar' } }, + $edgeText: { type: 'text', default: { en: 'Hello' } }, + }, + }, + }, + }) + + const u1 = await db.create('user', { name: 'u1' }) + + // 1. Create edge without edge props + const g1 = await db.create('group', { + member: { id: u1 }, + }) + + const resG1 = await db + .query2('group', g1) + .include('member.$role') + .include('member.$level') + .include('member.id') + .get() + + deepEqual(resG1?.member?.$role, 'member') + deepEqual(resG1?.member?.$level, 1) + + // 2. Create edge with edge props + const g2 = await db.create('group', { + member: { + id: u1, + $role: 'admin', + $level: 99, + $edgeEnum: 'b', + $edgeJson: { foo: 'baz' }, + $edgeText: { en: 'Hi' }, + }, + }) + + const resG2: any = await db + .query2('group', g2) + .include('member.$role') + .include('member.$level') + .include('member.id') + .include('member.$edgeEnum') + .include('member.$edgeJson') + .include('member.$edgeText') + .get() + + deepEqual(resG2.member?.$role, 'admin') + deepEqual(resG2.member?.$level, 99) + deepEqual(resG2.member?.$edgeEnum, 'b') + deepEqual(resG2.member?.$edgeJson, { foo: 'baz' }) + deepEqual(resG2.member?.$edgeText, { en: 'Hi' }) + + // 3. Check defaults on edge + const g3 = await db.create('group', { + member: { id: u1 }, + }) + + const resG3: any = await db + .query2('group', g3) + .include('member.$role') + .include('member.$level') + .include('member.$edgeEnum') + .include('member.$edgeJson') + .include('member.$edgeText') + .get() + + deepEqual(resG3.member?.$role, 'member') + deepEqual(resG3.member?.$level, 1) + deepEqual(resG3.member?.$edgeEnum, 'a') + deepEqual(resG3.member?.$edgeJson, { foo: 'bar' }) + deepEqual(resG3.member?.$edgeText, { en: 'Hello' }) +}) diff --git a/test/modify/props/enum.ts b/test/modify/props/enum.ts new file mode 100644 index 0000000000..7153d394aa --- /dev/null +++ b/test/modify/props/enum.ts @@ -0,0 +1,82 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify enum', async (t) => { + const db = await testDb(t, { + types: { + thing: { + option: { enum: ['first', 'second', 'third'] }, + }, + }, + }) + + const id1 = await db.create('thing', { + option: 'first', + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + option: 'first', + }) + + await db.update('thing', id1, { + option: 'second', + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + option: 'second', + }) + + // Delete + await db.update('thing', id1, { + option: null, + }) + deepEqual((await db.query2('thing', id1).get())?.option, undefined) +}) + +await test('modify enum on edge', async (t) => { + const db = await testDb(t, { + types: { + thing: { + option: { enum: ['a', 'b'] }, + }, + holder: { + toThing: { + ref: 'thing', + prop: 'holders', + $edgeOption: { enum: ['first', 'second'] }, + }, + }, + }, + }) + + const targetId = await db.create('thing', { option: 'a' }) + const id1 = await db.create('holder', { + toThing: { + id: targetId, + $edgeOption: 'first', + }, + }) + + const res1 = await db + .query2('holder', id1) + .include('toThing.$edgeOption') + .get() + + deepEqual(res1?.toThing?.$edgeOption, 'first') + + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeOption: 'second', + }, + }) + + const res2 = await db + .query2('holder', id1) + .include('toThing.$edgeOption') + .get() + deepEqual(res2?.toThing?.$edgeOption, 'second') +}) diff --git a/test/modify/props/json.ts b/test/modify/props/json.ts new file mode 100644 index 0000000000..4d78e2b1c2 --- /dev/null +++ b/test/modify/props/json.ts @@ -0,0 +1,80 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify json', async (t) => { + const db = await testDb(t, { + types: { + thing: { + data: 'json', + }, + }, + }) + + const obj = { foo: 'bar', baz: 123, list: [1, 2] } + const id1 = await db.create('thing', { + data: obj, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + data: obj, + }) + + const arr = ['a', 'b', 'c'] + await db.update('thing', id1, { + data: arr, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + data: arr, + }) + + // Delete + await db.update('thing', id1, { + data: null, + }) + deepEqual((await db.query2('thing', id1).get())?.data, null) +}) + +await test('modify json on edge', async (t) => { + const db = await testDb(t, { + types: { + thing: { + data: 'json', + }, + holder: { + toThing: { + ref: 'thing', + prop: 'holders', + $edgeData: 'json', + }, + }, + }, + }) + + const obj = { foo: 'bar' } + const targetId = await db.create('thing', { data: {} }) + const id1 = await db.create('holder', { + toThing: { + id: targetId, + $edgeData: obj, + }, + }) + + const res1 = await db.query2('holder', id1).include('toThing.$edgeData').get() + + deepEqual(res1?.toThing?.$edgeData, obj) + + const obj2 = { baz: 'qux' } + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeData: obj2, + }, + }) + + const res2 = await db.query2('holder', id1).include('toThing.$edgeData').get() + deepEqual(res2?.toThing?.$edgeData, obj2) +}) diff --git a/test/modify/props/mixed.ts b/test/modify/props/mixed.ts new file mode 100644 index 0000000000..d0e038a0ab --- /dev/null +++ b/test/modify/props/mixed.ts @@ -0,0 +1,70 @@ +import { ENCODER } from '../../../src/utils/index.js' +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('mixed props', async (t) => { + const db = await testDb(t, { + types: { + user: { + props: { + name: { type: 'string' }, + email: { type: 'string' }, + age: { type: 'uint32' }, + story: { type: 'string' }, + alias: { type: 'alias' }, + }, + }, + typeTest: { + props: { + q: { type: 'reference', ref: 'user', prop: 'test' }, + r: { type: 'enum', enum: ['a', 'b', 'c'] }, + }, + }, + }, + }) + + const u = await db.create('user', { + name: 'T', + email: 't@t.com', + age: 33, + story: 'hello', + alias: 't', + }) + + const t1 = await db.create('typeTest', { + q: u, + r: 'a', + }) + + console.log('-------a') + const typeTest = await db.query2('typeTest').include('*', '**').get() + deepEqual(typeTest, [ + { + id: 1, + r: 'a', + q: { + id: 1, + name: 'T', + email: 't@t.com', + age: 33, + story: 'hello', + alias: 't', + }, + }, + ]) + console.log('-------b') + const user = await db.query2('user').include('*', '**').get() + + deepEqual(user, [ + { + id: 1, + name: 'T', + email: 't@t.com', + age: 33, + story: 'hello', + alias: 't', + test: [{ id: 1, r: 'a' }], + }, + ]) +}) diff --git a/test/modify/props/numbers.ts b/test/modify/props/numbers.ts new file mode 100644 index 0000000000..f31c83ff72 --- /dev/null +++ b/test/modify/props/numbers.ts @@ -0,0 +1,370 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify numbers', async (t) => { + const db = await testDb(t, { + types: { + thing: { + n: 'number', + u8: 'uint8', + i8: 'int8', + u16: 'uint16', + i16: 'int16', + u32: 'uint32', + i32: 'int32', + }, + }, + }) + + const id1 = await db.create('thing', { + n: 1.5, + u8: 10, + i8: -10, + u16: 1000, + i16: -1000, + u32: 100000, + i32: -100000, + }) + + // Border values + const id2 = await db.create('thing', { + n: 100.5, + u8: 255, + i8: 127, + u16: 65535, + i16: 32767, + u32: 4294967295, + i32: 2147483647, + }) + + // Min values (for signed) + const id3 = await db.create('thing', { + n: -100.5, + u8: 0, + i8: -128, + u16: 0, + i16: -32768, + u32: 0, + i32: -2147483648, + }) + + deepEqual(await db.query2('thing').get(), [ + { + id: id1, + n: 1.5, + u8: 10, + i8: -10, + u16: 1000, + i16: -1000, + u32: 100000, + i32: -100000, + }, + { + id: id2, + n: 100.5, + u8: 255, + i8: 127, + u16: 65535, + i16: 32767, + u32: 4294967295, + i32: 2147483647, + }, + { + id: id3, + n: -100.5, + u8: 0, + i8: -128, + u16: 0, + i16: -32768, + u32: 0, + i32: -2147483648, + }, + ]) + + db.update('thing', id1, { + n: 2.5, + u8: 11, + i8: -11, + u16: 1001, + i16: -1001, + u32: 100001, + i32: -100001, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + n: 2.5, + u8: 11, + i8: -11, + u16: 1001, + i16: -1001, + u32: 100001, + i32: -100001, + }) + + db.update('thing', id1, { + n: { increment: 2.5 }, + u8: { increment: 1 }, + i8: { increment: 1 }, + u16: { increment: 1 }, + i16: { increment: 1 }, + u32: { increment: 1 }, + i32: { increment: 1 }, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + n: 5, + u8: 12, + i8: -10, + u16: 1002, + i16: -1000, + u32: 100002, + i32: -100000, + }) + + db.update('thing', id1, { + n: { increment: -2.5 }, + u8: { increment: -1 }, + i8: { increment: -1 }, + u16: { increment: -1 }, + i16: { increment: -1 }, + u32: { increment: -1 }, + i32: { increment: -1 }, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + n: 2.5, + u8: 11, + i8: -11, + u16: 1001, + i16: -1001, + u32: 100001, + i32: -100001, + }) + + // Delete + await db.update('thing', id1, { + n: null, + u8: null, + i8: null, + u16: null, + i16: null, + u32: null, + i32: null, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: 1, + n: 0, + i32: 0, + u32: 0, + i16: 0, + u16: 0, + u8: 0, + i8: 0, + }) +}) + +await test('modify numbers on edge', async (t) => { + const db = await testDb(t, { + types: { + thing: { + n: 'number', + }, + holder: { + toThing: { + ref: 'thing', + prop: 'holders', + $edgeN: 'number', + $edgeU8: 'uint8', + $edgeI8: 'int8', + $edgeU16: 'uint16', + $edgeI16: 'int16', + $edgeU32: 'uint32', + $edgeI32: 'int32', + }, + }, + }, + }) + + const targetId = await db.create('thing', { n: 1 }) + + // 1. Initial values + const id1 = await db.create('holder', { + toThing: { + id: targetId, + $edgeN: 1.5, + $edgeU8: 10, + $edgeI8: -10, + $edgeU16: 1000, + $edgeI16: -1000, + $edgeU32: 100000, + $edgeI32: -100000, + }, + }) + + // Border values + const id2 = await db.create('holder', { + toThing: { + id: targetId, + $edgeN: 100.5, + $edgeU8: 255, + $edgeI8: 127, + $edgeU16: 65535, + $edgeI16: 32767, + $edgeU32: 4294967295, + $edgeI32: 2147483647, + }, + }) + + // Min values + const id3 = await db.create('holder', { + toThing: { + id: targetId, + $edgeN: -100.5, + $edgeU8: 0, + $edgeI8: -128, + $edgeU16: 0, + $edgeI16: -32768, + $edgeU32: 0, + $edgeI32: -2147483648, + }, + }) + + // Helper to get edge props + const getEdgeProps = async (id: number) => { + const res = await db + .query2('holder', id) + .include( + 'toThing.$edgeN', + 'toThing.$edgeU8', + 'toThing.$edgeI8', + 'toThing.$edgeU16', + 'toThing.$edgeI16', + 'toThing.$edgeU32', + 'toThing.$edgeI32', + ) + .get() + + const toThing = res?.toThing + + if (!toThing || Array.isArray(toThing)) { + return {} + } + + return { + edgeN: toThing.$edgeN, + edgeU8: toThing.$edgeU8, + edgeI8: toThing.$edgeI8, + edgeU16: toThing.$edgeU16, + edgeI16: toThing.$edgeI16, + edgeU32: toThing.$edgeU32, + edgeI32: toThing.$edgeI32, + } + } + + deepEqual(await getEdgeProps(id1), { + edgeN: 1.5, + edgeU8: 10, + edgeI8: -10, + edgeU16: 1000, + edgeI16: -1000, + edgeU32: 100000, + edgeI32: -100000, + }) + + deepEqual(await getEdgeProps(id2), { + edgeN: 100.5, + edgeU8: 255, + edgeI8: 127, + edgeU16: 65535, + edgeI16: 32767, + edgeU32: 4294967295, + edgeI32: 2147483647, + }) + + deepEqual(await getEdgeProps(id3), { + edgeN: -100.5, + edgeU8: 0, + edgeI8: -128, + edgeU16: 0, + edgeI16: -32768, + edgeU32: 0, + edgeI32: -2147483648, + }) + + // Update + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeN: 2.5, + $edgeU8: 11, + $edgeI8: -11, + $edgeU16: 1001, + $edgeI16: -1001, + $edgeU32: 100001, + $edgeI32: -100001, + }, + }) + + deepEqual(await getEdgeProps(id1), { + edgeN: 2.5, + edgeU8: 11, + edgeI8: -11, + edgeU16: 1001, + edgeI16: -1001, + edgeU32: 100001, + edgeI32: -100001, + }) + + // Increment + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeN: { increment: 2.5 }, + $edgeU8: { increment: 1 }, + $edgeI8: { increment: 1 }, + $edgeU16: { increment: 1 }, + $edgeI16: { increment: 1 }, + $edgeU32: { increment: 1 }, + $edgeI32: { increment: 1 }, + }, + }) + + deepEqual(await getEdgeProps(id1), { + edgeN: 5, + edgeU8: 12, + edgeI8: -10, + edgeU16: 1002, + edgeI16: -1000, + edgeU32: 100002, + edgeI32: -100000, + }) + + // Decrement + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeN: { increment: -2.5 }, + $edgeU8: { increment: -1 }, + $edgeI8: { increment: -1 }, + $edgeU16: { increment: -1 }, + $edgeI16: { increment: -1 }, + $edgeU32: { increment: -1 }, + $edgeI32: { increment: -1 }, + }, + }) + + deepEqual(await getEdgeProps(id1), { + edgeN: 2.5, + edgeU8: 11, + edgeI8: -11, + edgeU16: 1001, + edgeI16: -1001, + edgeU32: 100001, + edgeI32: -100001, + }) +}) diff --git a/test/modify/props/object.ts b/test/modify/props/object.ts new file mode 100644 index 0000000000..e571295a5c --- /dev/null +++ b/test/modify/props/object.ts @@ -0,0 +1,127 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify object', async (t) => { + const db = await testDb(t, { + types: { + thing: { + info: { + type: 'object', + props: { + title: 'string', + count: 'number', + }, + }, + }, + }, + }) + + const id1 = await db.create('thing', { + info: { + title: 'my title', + count: 10, + }, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + info: { + title: 'my title', + count: 10, + }, + }) + + // Partial update of object + await db.update('thing', id1, { + info: { + count: 20, + }, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + info: { + title: 'my title', + count: 20, + }, + }) + + // Delete nested prop + await db.update('thing', id1, { + info: { + title: null, + }, + }) + deepEqual((await db.query2('thing', id1).get()).info, { + count: 20, + title: '', + }) + + // Delete whole object + await db.update('thing', id1, { + info: null, + }) + deepEqual((await db.query2('thing', id1).get()).info, { + count: 0, + title: '', + }) +}) + +await test('modify object on edge', async (t) => { + const db = await testDb(t, { + types: { + thing: { + info: { type: 'object', props: { a: 'string' } }, + }, + holder: { + toThing: { + ref: 'thing', + prop: 'holders', + $edgeInfo: { + type: 'object', + props: { + title: 'string', + count: 'number', + }, + }, + }, + }, + }, + }) + + const targetId = await db.create('thing', { info: { a: 'a' } }) + const id1 = await db.create('holder', { + toThing: { + id: targetId, + $edgeInfo: { + title: 'edge title', + count: 5, + }, + }, + }) + + const res1 = await db.query2('holder', id1).include('toThing.$edgeInfo').get() + + deepEqual(res1.toThing?.$edgeInfo, { + title: 'edge title', + count: 5, + }) + + // Partial update + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeInfo: { + count: 15, + }, + }, + }) + + const res2 = await db.query2('holder', id1).include('toThing.$edgeInfo').get() + + deepEqual(res2.toThing?.$edgeInfo, { + title: 'edge title', + count: 15, + }) +}) diff --git a/test/modify/props/references.ts b/test/modify/props/references.ts new file mode 100644 index 0000000000..283081b4ea --- /dev/null +++ b/test/modify/props/references.ts @@ -0,0 +1,309 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify single reference', async (t) => { + const db = await testDb(t, { + types: { + thing: { + name: 'string', + }, + holder: { + dest: { type: 'reference', ref: 'thing', prop: 'refHolders' }, + }, + }, + }) + + // Test passing BasedModify (promise) directly + const t1 = db.create('thing', { name: 't1' }) + const t2 = db.create('thing', { name: 't2' }) + const h1 = await db.create('holder', { dest: t1 }) + + const realT1 = await t1 + const realT2 = await t2 + + { + const res = await db.query2('holder', h1).include('dest.id').get() + + deepEqual(res, { + id: h1, + dest: { id: realT1 }, + }) + } + + // Update with promise + await db.update('holder', h1, { dest: t2 }) + + { + const res = await db.query2('holder', h1).include('dest.id').get() + + deepEqual(res, { + id: h1, + dest: { id: realT2 }, + }) + } + + // Update with object format containing promise + await db.update('holder', h1, { dest: { id: t1 } }) + + { + const res = await db.query2('holder', h1).include('dest.id').get() + + deepEqual(res, { + id: h1, + dest: { id: realT1 }, + }) + } + + // Delete + await db.update('holder', h1, { dest: null }) + deepEqual(await db.query2('holder', h1).include('dest').get(), { + id: h1, + dest: null, + }) +}) + +await test('modify references', async (t) => { + const db = await testDb(t, { + types: { + thing: { + name: 'string', + }, + holder: { + dests: { + type: 'references', + items: { + ref: 'thing', + prop: 'refsHolders', + }, + }, + }, + }, + }) + + // Mixed awaited and not awaited + const t1 = await db.create('thing', { name: 't1' }) + const t2Promise = db.create('thing', { name: 't2' }) + const t2 = await t2Promise + const t3Promise = db.create('thing', { name: 't3' }) + const t3 = await t3Promise + + // Test set (create) with mixed + const h1 = await db.create('holder', { dests: [t1, t2Promise] }) + + const check = async (ids: number[], msg) => { + const res = await db.query2('holder', h1).include('dests').get() + const currentIds = res?.dests?.map((v: any) => v.id) || [] + currentIds.sort() + ids.sort() + deepEqual(currentIds, ids, msg) + } + + await check([t1, t2], 'simple') + + // Test add with promise + await db.update('holder', h1, { dests: { add: [t3Promise] } }) + await check([t1, t2, t3], 'add') + + // Test delete with promise + await db.update('holder', h1, { dests: { delete: [t2Promise] } }) + await check([t1, t3], 'delete') + + // Test replace (array) with promise + await db.update('holder', h1, { dests: [t2Promise] }) + await check([t2], 'replace') + + // Test update (acts as add/upsert for references list) with promise + await db.update('holder', h1, { dests: { update: [t3Promise] } }) + await check([t2, t3], 'update') + + // Delete all + await db.update('holder', h1, { dests: null }) + await check([], 'delete all') +}) + +await test('modify references no await', async (t) => { + const db = await testDb(t, { + types: { + thing: { + name: 'string', + }, + holder: { + dests: { + type: 'references', + items: { + ref: 'thing', + prop: 'refsHolders', + }, + }, + }, + }, + }) + + // No await on creates + const t1 = db.create('thing', { name: 't1' }) + const t2 = db.create('thing', { name: 't2' }) + const t3 = db.create('thing', { name: 't3' }) + + // Use unawaited t1, t2 in create + const h1 = db.create('holder', { dests: [t1, t2] }) + + // Use unawaited t3 in update, on unawaited h1 + const updateP = db.update('holder', h1, { dests: { add: [t3] } }) + + // Also delete t2 (unawaited) from unawaited h1 + const deleteP = db.update('holder', h1, { dests: { delete: [t2] } }) + + // Now we wait for the final state to settle. + await Promise.all([t1, t2, t3, h1, updateP, deleteP]) + // Get real IDs for assertion + const id1 = await t1 + const id3 = await t3 // t2 was deleted + + // Verify + const res = await db + .query2('holder', await h1) + .include('dests.id') + .get() + + const currentIds = res?.dests?.map((v: any) => v.id) || [] + currentIds.sort() + const expected = [id1, id3] + expected.sort() + + deepEqual(currentIds, expected, 'no await sequence') +}) + +await test('modify single reference on edge', async (t) => { + const db = await testDb(t, { + types: { + thing: { + name: 'string', + }, + holder: { + toThing: { + ref: 'thing', + prop: 'holders', + $edgeRef: { + type: 'reference', + ref: 'thing', + }, + }, + }, + }, + }) + + const t1 = await db.create('thing', { name: 't1' }) + const t2 = await db.create('thing', { name: 't2' }) + const target = await db.create('thing', { name: 'target' }) + + const h1 = await db.create('holder', { + toThing: { + id: target, + $edgeRef: t1, + }, + }) + + // Verify + const getEdgeRef = async (id: number) => { + const res = await db + .query2('holder', id) + .include('toThing.$edgeRef.id') + .get() + + return res?.toThing && !Array.isArray(res?.toThing) + ? res?.toThing.$edgeRef + : undefined + } + + deepEqual((await getEdgeRef(h1))?.id, t1) + + // Update + await db.update('holder', h1, { + toThing: { + id: target, + $edgeRef: t2, + }, + }) + deepEqual((await getEdgeRef(h1))?.id, t2) + + // Update with object format + await db.update('holder', h1, { + toThing: { + id: target, + $edgeRef: { id: t1 }, + }, + }) + deepEqual((await getEdgeRef(h1))?.id, t1) +}) + +await test('modify references on edge', async (t) => { + const db = await testDb(t, { + types: { + thing: { + name: 'string', + }, + holder: { + toThing: { + ref: 'thing', + prop: 'holders', + $edgeRefs: { + type: 'references', + items: { + ref: 'thing', + }, + }, + }, + }, + }, + }) + + // Mixed awaited and not awaited + const t1 = await db.create('thing', { name: 't1' }) + const t2Promise = db.create('thing', { name: 't2' }) + const t2 = await t2Promise + const t3Promise = db.create('thing', { name: 't3' }) + const t3 = await t3Promise + const target = await db.create('thing', { name: 'target' }) + + const h1 = await db.create('holder', { + toThing: { + id: target, + $edgeRefs: [t1, t2Promise], + }, + }) + + const check = async (ids: number[], msg) => { + const res = await db + .query2('holder', h1) + .include('toThing.$edgeRefs.id') + .get() + + const edge: any = + res?.toThing && !Array.isArray(res.toThing) ? res.toThing : {} + const currentIds = edge.$edgeRefs?.map((v: any) => v.id) || [] + currentIds.sort() + ids.sort() + deepEqual(currentIds, ids, msg) + } + + await check([t1, t2], 'simple') + + // Test add with promise + await db.update('holder', h1, { + toThing: { id: target, $edgeRefs: { add: [t3Promise] } }, + }) + await check([t1, t2, t3], 'add') + + // Test delete with promise + await db.update('holder', h1, { + toThing: { id: target, $edgeRefs: { delete: [t2Promise] } }, + }) + await check([t1, t3], 'delete') + + // Test replace (array) with promise + await db.update('holder', h1, { + toThing: { id: target, $edgeRefs: [t2Promise] }, + }) + await check([t2], 'replace') +}) diff --git a/test/modify/props/string.ts b/test/modify/props/string.ts new file mode 100644 index 0000000000..d00026cc3b --- /dev/null +++ b/test/modify/props/string.ts @@ -0,0 +1,275 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify string', async (t) => { + const db = await testDb(t, { + types: { + thing: { + name: 'string', + }, + }, + }) + + // Basic string + const s1 = 'hello' + const id1 = await db.create('thing', { + name: s1, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s1, + }) + + // Update to another string + const s2 = 'world' + await db.update('thing', id1, { + name: s2, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s2, + }) + + // String with spaces + const s3 = 'foo bar' + await db.update('thing', id1, { + name: s3, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s3, + }) + + // Empty string + const s4 = '' + await db.update('thing', id1, { + name: s4, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s4, + }) + + // Unicode / Special characters + const s5 = 'ñàéïô SPECIAL !@#$%^&*()_+ 123' + await db.update('thing', id1, { + name: s5, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s5.normalize('NFD'), + }) + + // Long string + const s6 = 'a'.repeat(1000) + await db.update('thing', id1, { + name: s6, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s6, + }) + + // Delete + await db.update('thing', id1, { + name: null, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: '', // Should probably be empty string for string props? + }) +}) + +await test('modify string on edge', async (t) => { + const db = await testDb(t, { + types: { + thing: { + name: 'string', + }, + holder: { + toThing: { + ref: 'thing', + prop: 'holders', + $edgeName: 'string', + }, + }, + }, + }) + + // Basic string + const s1 = 'hello' + const targetId = await db.create('thing', { name: 'target' }) + const id1 = await db.create('holder', { + toThing: { + id: targetId, + $edgeName: s1, + }, + }) + + const res1 = await db.query2('holder', id1).include('toThing.$edgeName').get() + + deepEqual(res1?.toThing?.$edgeName, s1) + + // Update to another string + const s2 = 'world' + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeName: s2, + }, + }) + const res2 = await db.query2('holder', id1).include('toThing.$edgeName').get() + deepEqual(res2?.toThing?.$edgeName, s2) + + // String with spaces + const s3 = 'foo bar' + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeName: s3, + }, + }) + const res3 = await db.query2('holder', id1).include('toThing.$edgeName').get() + deepEqual(res3?.toThing?.$edgeName, s3) + + // Empty string + const s4 = '' + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeName: s4, + }, + }) + const res4 = await db.query2('holder', id1).include('toThing.$edgeName').get() + deepEqual(res4?.toThing?.$edgeName, s4) + + // Unicode / Special characters + const s5 = 'ñàéïô SPECIAL !@#$%^&*()_+ 123' + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeName: s5, + }, + }) + const res5 = await db.query2('holder', id1).include('toThing.$edgeName').get() + deepEqual(res5?.toThing?.$edgeName, s5.normalize('NFD')) + + // Long string + const s6 = 'a'.repeat(1000) + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeName: s6, + }, + }) + const res6 = await db.query2('holder', id1).include('toThing.$edgeName').get() + deepEqual(res6?.toThing?.$edgeName, s6) +}) + +await test('modify fixed string', async (t) => { + const db = await testDb(t, { + types: { + thing: { + name: { + type: 'string', + max: 16, + }, + }, + }, + }) + + // Basic string + const s1 = 'hello' + const id1 = await db.create('thing', { + name: s1, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s1, + }) + + // Update to another string + const s2 = 'world' + await db.update('thing', id1, { + name: s2, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s2, + }) + + // String with spaces + const s3 = 'foo bar' + await db.update('thing', id1, { + name: s3, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s3, + }) + + // Max length string + const s4 = 'a'.repeat(16) + await db.update('thing', id1, { + name: s4, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s4, + }) + + // Delete + await db.update('thing', id1, { + name: null, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: '', + }) +}) + +await test('modify long string', async (t) => { + const db = await testDb(t, { + types: { + thing: { + name: 'string', + }, + longCompressedString: { + name: { + type: 'string', + }, + }, + }, + }) + + // String > 200 chars (triggers compression if enabled) + const s1 = 'a'.repeat(250) + const id1 = await db.create('thing', { + name: s1, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s1, + }) + + // Mixed content string > 200 chars + const s2 = 'abcdefghijklmnopqrstuvwxyz'.repeat(10) // 260 chars + await db.update('thing', id1, { + name: s2, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + name: s2, + }) + + // Test no compression option + const id2 = await db.create('longCompressedString', { + name: s1, + }) + deepEqual(await db.query2('longCompressedString', id2).get(), { + id: id2, + name: s1, + }) +}) diff --git a/test/modify/props/text.ts b/test/modify/props/text.ts new file mode 100644 index 0000000000..efea478928 --- /dev/null +++ b/test/modify/props/text.ts @@ -0,0 +1,119 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify text', async (t) => { + const db = await testDb(t, { + locales: { + en: true, + de: true, + nl: true, + }, + types: { + thing: { + content: 'text', + }, + }, + }) + + const id1 = await db.create('thing', { + content: { + en: 'Hello', + de: 'Hallo', + }, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + content: { + en: 'Hello', + de: 'Hallo', + nl: '', + }, + }) + + // Update specific locale + await db.update('thing', id1, { + content: { + nl: 'Hallo', + }, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + content: { + en: 'Hello', + de: 'Hallo', + nl: 'Hallo', + }, + }) + + // Overwrite + await db.update('thing', id1, { + content: { + en: 'Hi', + }, + }) + + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + content: { + en: 'Hi', + de: 'Hallo', + nl: 'Hallo', + }, + }) + + // Delete + await db.update('thing', id1, { + content: null, + }) + deepEqual((await db.query2('thing', id1).get()).content, { + nl: '', + en: '', + de: '', + }) +}) + +await test('modify text on edge', async (t) => { + const db = await testDb(t, { + locales: { + en: true, + }, + types: { + thing: { + content: 'text', + }, + holder: { + toThing: { + ref: 'thing', + prop: 'holders', + $edgeText: 'text', + }, + }, + }, + }) + + const targetId = await db.create('thing', { content: { en: 'a' } as any }) + const id1 = await db.create('holder', { + toThing: { + id: targetId, + $edgeText: { en: 'edge hello' } as any, + }, + }) + + const res1 = await db.query2('holder', id1).include('toThing.$edgeText').get() + + deepEqual(res1.toThing?.$edgeText, { en: 'edge hello' }) + + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeText: { en: 'edge hi' } as any, + }, + }) + + const res2 = await db.query2('holder', id1).include('toThing.$edgeText').get() + + deepEqual(res2.toThing?.$edgeText, { en: 'edge hi' }) +}) diff --git a/test/modify/props/timestamp.ts b/test/modify/props/timestamp.ts new file mode 100644 index 0000000000..68539a2b24 --- /dev/null +++ b/test/modify/props/timestamp.ts @@ -0,0 +1,217 @@ +import { deepEqual } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify timestamp', async (t) => { + const db = await testDb(t, { + types: { + event: { + ts: 'timestamp', + }, + }, + }) + + const t1 = Date.now() + const t2 = t1 + 1000 + const t3 = t1 + 2000 + + const id1 = await db.create('event', { + ts: t1, + }) + + deepEqual(await db.query2('event', id1).get(), { + id: id1, + ts: t1, + }) + + await db.update('event', id1, { + ts: t2, + }) + + deepEqual(await db.query2('event', id1).get(), { + id: id1, + ts: t2, + }) + + await db.update('event', id1, { + ts: t3, + }) + + deepEqual(await db.query2('event', id1).get(), { + id: id1, + ts: t3, + }) + + // Edge cases + await db.update('event', id1, { ts: 0 }) + deepEqual(await db.query2('event', id1).get(), { id: id1, ts: 0 }) + + const farFuture = 8640000000000000 // Max JS Date timestamp + await db.update('event', id1, { ts: farFuture }) + deepEqual(await db.query2('event', id1).get(), { id: id1, ts: farFuture }) + + // Increment + await db.update('event', id1, { ts: 1000 }) + await db.update('event', id1, { + ts: { increment: 1000 }, + }) + deepEqual(await db.query2('event', id1).get(), { id: id1, ts: 2000 }) + + await db.update('event', id1, { + ts: { increment: -500 }, + }) + deepEqual(await db.query2('event', id1).get(), { id: id1, ts: 1500 }) + + // String formats + const now = Date.now() + await db.update('event', id1, { ts: 'now' }) + const r1: any = await db.query2('event', id1).get() + if (Math.abs(r1.ts - now) > 200) { + throw new Error(`Timestamp 'now' is too far off: ${r1.ts} vs ${now}`) + } + + await db.update('event', id1, { ts: 'now + 1h' }) + const r2: any = await db.query2('event', id1).get() + const t2Expr = now + 1000 * 60 * 60 + if (Math.abs(r2.ts - t2Expr) > 200) { + throw new Error( + `Timestamp 'now + 1h' is too far off: ${r2.ts} vs ${t2Expr}`, + ) + } + + await db.update('event', id1, { ts: 'now - 1d' }) + const r3: any = await db.query2('event', id1).get() + const t3Expr = now - 1000 * 60 * 60 * 24 + if (Math.abs(r3.ts - t3Expr) > 200) { + throw new Error( + `Timestamp 'now - 1d' is too far off: ${r3.ts} vs ${t3Expr}`, + ) + } + + // Explicit date string + const dateStr = '2025-01-01T00:00:00.000Z' + const dateTs = new Date(dateStr).valueOf() + await db.update('event', id1, { ts: dateStr }) + const r4: any = await db.query2('event', id1).get() + deepEqual(r4, { id: id1, ts: dateTs }) + + // Delete + await db.update('event', id1, { ts: null }) + deepEqual((await db.query2('event', id1).get()).ts, 0) +}) + +await test('modify timestamp on edge', async (t) => { + const db = await testDb(t, { + types: { + event: { + ts: 'timestamp', + }, + holder: { + toEvent: { + ref: 'event', + prop: 'holders', + $edgeTs: 'timestamp', + }, + }, + }, + }) + + const eventId = await db.create('event', { ts: Date.now() }) + + const t1 = Date.now() + const t2 = t1 + 1000 + const t3 = t1 + 2000 + + const id1 = await db.create('holder', { + toEvent: { + id: eventId, + $edgeTs: t1, + }, + }) + + // Helper + const getEdgeTs = async (id: number) => { + const res = await db.query2('holder', id).include('toEvent.$edgeTs').get() + + return res.toEvent?.$edgeTs || 0 + } + + deepEqual(await getEdgeTs(id1), t1) + + // Update + await db.update('holder', id1, { + toEvent: { + id: eventId, + $edgeTs: t2, + }, + }) + deepEqual(await getEdgeTs(id1), t2) + + await db.update('holder', id1, { + toEvent: { + id: eventId, + $edgeTs: t3, + }, + }) + deepEqual(await getEdgeTs(id1), t3) + + // Edge cases + await db.update('holder', id1, { toEvent: { id: eventId, $edgeTs: 0 } }) + deepEqual(await getEdgeTs(id1), 0) + + const farFuture = 8640000000000000 + await db.update('holder', id1, { + toEvent: { id: eventId, $edgeTs: farFuture }, + }) + deepEqual(await getEdgeTs(id1), farFuture) + + // Increment + await db.update('holder', id1, { toEvent: { id: eventId, $edgeTs: 1000 } }) + await db.update('holder', id1, { + toEvent: { + id: eventId, + $edgeTs: { increment: 1000 }, + }, + }) + deepEqual(await getEdgeTs(id1), 2000) + + await db.update('holder', id1, { + toEvent: { + id: eventId, + $edgeTs: { increment: -500 }, + }, + }) + deepEqual(await getEdgeTs(id1), 1500) + + // String formats + const now = Date.now() + await db.update('holder', id1, { toEvent: { id: eventId, $edgeTs: 'now' } }) + const r1 = await getEdgeTs(id1) + if (Math.abs(r1 - now) > 200) { + throw new Error(`Timestamp 'now' is too far off: ${r1} vs ${now}`) + } + + await db.update('holder', id1, { + toEvent: { id: eventId, $edgeTs: 'now + 1h' }, + }) + const r2 = await getEdgeTs(id1) + const t2Expr = now + 1000 * 60 * 60 + if (Math.abs(r2 - t2Expr) > 200) { + throw new Error(`Timestamp 'now + 1h' is too far off: ${r2} vs ${t2Expr}`) + } + + await db.update('holder', id1, { + toEvent: { id: eventId, $edgeTs: 'now - 1d' }, + }) + const r3 = await getEdgeTs(id1) + const t3Expr = now - 1000 * 60 * 60 * 24 + if (Math.abs(r3 - t3Expr) > 200) { + throw new Error(`Timestamp 'now - 1d' is too far off: ${r3} vs ${t3Expr}`) + } + + // Explicit date string + const dateStr = '2025-01-01T00:00:00.000Z' + const dateTs = new Date(dateStr).valueOf() + await db.update('holder', id1, { toEvent: { id: eventId, $edgeTs: dateStr } }) + deepEqual(await getEdgeTs(id1), dateTs) +}) diff --git a/test/modify/props/vector.ts b/test/modify/props/vector.ts new file mode 100644 index 0000000000..bb0ee55301 --- /dev/null +++ b/test/modify/props/vector.ts @@ -0,0 +1,244 @@ +import { deepEqual, testDb, throws } from '../../shared/index.js' +import test from '../../shared/test.js' +import assert from 'node:assert' + +await test('incorrect values', async (t) => { + const db = await testDb(t, { + types: { + thing: { + vec: { type: 'vector', size: 3, baseType: 'float32' }, + }, + }, + }) + + throws(() => + db.create('thing', { + vec: new Float64Array([1.1, 2.2, 3.3]), + }), + ) + + throws(() => + db.create('thing', { + vec: new Float32Array([1.1, 2.2, 3.3, 4, 2]), + }), + ) +}) + +await test('default', async (t) => { + const db = await testDb(t, { + types: { + thing: { + vec: { + type: 'vector', + size: 3, + baseType: 'float32', + default: new Float32Array([1, 2.5, 3]), + }, + }, + }, + }) + + const id1 = await db.create('thing', {}) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + vec: new Float32Array([1, 2.5, 3]), + }) +}) + +await test('default colvec', async (t) => { + const db = await testDb(t, { + types: { + thing: { + insertOnly: true, + props: { + vec: { + type: 'colvec', + size: 3, + baseType: 'float32', + default: new Float32Array([1, 2.5, 3]), + }, + }, + }, + }, + }) + + const id1 = await db.create('thing', {}) + deepEqual(await db.query2('thing', id1).get(), { + id: id1, + vec: new Float32Array([1, 2.5, 3]), + }) +}) + +await test.skip('incorrect default', async (t) => { + throws(() => + testDb(t, { + types: { + thing: { + vec: { + type: 'vector', + size: 3, + baseType: 'float32', + default: new Float32Array([1, 2.5, 3, 4.3]), + }, + }, + }, + }), + ) +}) + +await test('modify vector', async (t) => { + const db = await testDb(t, { + types: { + thing: { + vec: { type: 'vector', size: 3, baseType: 'float32' }, + }, + }, + }) + + const v1 = new Float32Array([1.1, 2.2, 3.3]) + const id1 = await db.create('thing', { + vec: v1, + }) + + // Float precision might require approximate equality or strict check if implementation preserves bits + // For now assuming deepEqual works or we might need a tolerance check + const res = await db.query2('thing', id1).get() + + // Convert result back to array if it is returned as TypedArray + const vecArr = Array.from(res.vec) as number[] + + // Check approximate values + assert(Math.abs(vecArr[0] - v1[0]) < 0.0001) + assert(Math.abs(vecArr[1] - v1[1]) < 0.0001) + assert(Math.abs(vecArr[2] - v1[2]) < 0.0001) + + const v2 = new Float32Array([4.4, 5.5, 6.6]) + await db.update('thing', id1, { + vec: v2, + }) + + const res2 = await db.query2('thing', id1).get() + const vecArr2 = Array.from(res2.vec) as number[] + + assert(Math.abs(vecArr2[0] - v2[0]) < 0.0001) + assert(Math.abs(vecArr2[1] - v2[1]) < 0.0001) + assert(Math.abs(vecArr2[2] - v2[2]) < 0.0001) + + // Delete + await db.update('thing', id1, { + vec: null, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: 1, + vec: new Float32Array([0, 0, 0]), + }) + + // Undefined + const id3 = await db.create('thing', {}) + deepEqual(await db.query2('thing', id1).get(), { + id: 1, + vec: new Float32Array([0, 0, 0]), + }) +}) + +await test('modify colvec', async (t) => { + const db = await testDb(t, { + types: { + thing: { + insertOnly: true, + props: { + vec: { type: 'colvec', size: 3, baseType: 'float64' }, + }, + }, + }, + }) + + // colvec behaves similarly to vector in terms of setting/getting from client perspective + // but internally stored differently (columnar). + const v1 = new Float64Array([1.1, 2.2, 3.3]) + const id1 = await db.create('thing', { + vec: v1, + }) + + const res = await db.query2('thing', id1).get() + const vecArr = Array.from(res.vec) as number[] + + assert(Math.abs(vecArr[0] - v1[0]) < 0.0001) + assert(Math.abs(vecArr[1] - v1[1]) < 0.0001) + assert(Math.abs(vecArr[2] - v1[2]) < 0.0001) + + const v2 = new Float64Array([4.4, 5.5, 6.6]) + await db.update('thing', id1, { + vec: v2, + }) + + const res2 = await db.query2('thing', id1).get() + const vecArr2 = Array.from(res2.vec) as number[] + + assert(Math.abs(vecArr2[0] - v2[0]) < 0.0001) + assert(Math.abs(vecArr2[1] - v2[1]) < 0.0001) + assert(Math.abs(vecArr2[2] - v2[2]) < 0.0001) + + // Delete + await db.update('thing', id1, { + vec: null, + }) + deepEqual(await db.query2('thing', id1).get(), { + id: 1, + vec: new Float64Array([0, 0, 0]), + }) +}) + +await test('modify vector on edge', async (t) => { + const db = await testDb(t, { + types: { + thing: { + vec: { type: 'vector', size: 3, baseType: 'float32' }, + }, + holder: { + toThing: { + ref: 'thing', + prop: 'holders', + $edgeVec: { type: 'vector', size: 3, baseType: 'float32' }, + }, + }, + }, + }) + + const v1 = new Float32Array([1.1, 2.2, 3.3]) + const targetId = await db.create('thing', { vec: v1 }) + const id1 = await db.create('holder', { + toThing: { + id: targetId, + $edgeVec: v1, + }, + }) + + const res = await db.query2('holder', id1).include('toThing.$edgeVec').get() + + if (res.toThing) { + const vecArr = Array.from(res.toThing.$edgeVec) as number[] + assert(Math.abs(vecArr[0] - v1[0]) < 0.0001) + assert(Math.abs(vecArr[1] - v1[1]) < 0.0001) + assert(Math.abs(vecArr[2] - v1[2]) < 0.0001) + } else { + assert.fail('toThing not found') + } + + const v2 = new Float32Array([4.4, 5.5, 6.6]) + await db.update('holder', id1, { + toThing: { + id: targetId, + $edgeVec: v2, + }, + }) + + const res2 = await db.query2('holder', id1).include('toThing.$edgeVec').get() + + if (res2.toThing) { + const vecArr2 = Array.from(res2.toThing.$edgeVec) as number[] + assert(Math.abs(vecArr2[0] - v2[0]) < 0.0001) + assert(Math.abs(vecArr2[1] - v2[1]) < 0.0001) + assert(Math.abs(vecArr2[2] - v2[2]) < 0.0001) + } +}) diff --git a/test/modify/upsert.ts b/test/modify/upsert.ts new file mode 100644 index 0000000000..31ed5a9716 --- /dev/null +++ b/test/modify/upsert.ts @@ -0,0 +1,88 @@ +import { deepEqual } from '../shared/assert.js' +import { testDb } from '../shared/index.js' +import test from '../shared/test.js' + +await test('upsert', async (t) => { + const db = await testDb(t, { + types: { + user: { + email: 'alias', + uuid: 'alias', + isNice: 'boolean', + }, + }, + }) + + // 1. Create via upsert + const id1 = await db.upsert( + 'user', + { uuid: '9dg786' }, // target by alias + { email: 'youri@saulx.com', isNice: true }, + ) + + const res1 = await db.query2('user', id1).get() + deepEqual(res1, { + id: id1, + uuid: '9dg786', + email: 'youri@saulx.com', + isNice: true, + }) + + // 2. Update via upsert (same alias target) + const id2 = await db.upsert('user', { uuid: '9dg786' }, { isNice: false }) + + deepEqual(id1, id2, 'Ids should be the same') + + const res2 = await db.query2('user', id1).get() + deepEqual(res2, { + id: id1, + uuid: '9dg786', + email: 'youri@saulx.com', // Should be preserved + isNice: false, // Should be updated + }) + + // 3. Create another one via different alias field + const id3 = await db.upsert( + 'user', + { email: 'bla@bla.com' }, + { uuid: 'unique-id-2', isNice: true }, + ) + + const res3 = await db.query2('user', id3).get() + deepEqual(res3, { + id: id3, + uuid: 'unique-id-2', + email: 'bla@bla.com', + isNice: true, + }) + + // 4. Update via different alias field + const id4 = await db.upsert( + 'user', + { email: 'bla@bla.com' }, + { isNice: false }, + ) + + deepEqual(id3, id4, 'Ids should be the same 2') + + const res4 = await db.query2('user', id3).get() + deepEqual(res4, { + id: id3, + uuid: 'unique-id-2', + email: 'bla@bla.com', + isNice: false, + }) + + const id5 = await db.create('user', { + email: 'newkid@block.com', + isNice: true, + }) + + const res5 = await db.upsert( + 'user', + { email: 'newkid@block.com' }, + { isNice: false }, + ) + + deepEqual(id5, res5) +}) diff --git a/test/modify/validation/alias.ts b/test/modify/validation/alias.ts new file mode 100644 index 0000000000..2e6a37811f --- /dev/null +++ b/test/modify/validation/alias.ts @@ -0,0 +1,40 @@ +import { throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - validation - alias', async (t) => { + const db = await testDb(t, { + types: { + thing: { + myAlias: { type: 'alias' }, + }, + }, + }) + + // Alias + await throws( + // @ts-expect-error + () => db.create('thing', { myAlias: 123 }), + 'alias fail with number', + ) + + // Extended validation + await throws( + () => db.create('thing', { myAlias: '' }), + 'alias fail with empty string', + ) + await throws( + () => db.create('thing', { myAlias: ' ' }), + 'alias fail with spaces string', // often desirable to prevent + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myAlias: ['a'] }), + 'alias fail with array', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myAlias: {} }), + 'alias fail with object', + ) +}) diff --git a/test/modify/validation/binary.ts b/test/modify/validation/binary.ts new file mode 100644 index 0000000000..7a5e503a23 --- /dev/null +++ b/test/modify/validation/binary.ts @@ -0,0 +1,49 @@ +import { throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - validation - binary', async (t) => { + const db = await testDb(t, { + types: { + thing: { + myBlob: { type: 'binary', maxBytes: 10 }, + }, + }, + }) + + // Binary + await throws( + // @ts-expect-error + () => db.create('thing', { myBlob: 'not a buffer' }), + 'binary fail with string', + ) + await throws( + () => db.create('thing', { myBlob: new Uint8Array(20) }), + 'binary maxBytes', + ) + await db.create('thing', { myBlob: new Uint8Array(5) }) + + // MaxBytes validation (configured maxBytes: 10) + await throws( + () => db.create('thing', { myBlob: new Uint8Array(11) }), + 'binary fail with maxBytes exceeded', + ) + await db.create('thing', { myBlob: new Uint8Array(10) }) + + // Extended validation + await throws( + // @ts-expect-error + () => db.create('thing', { myBlob: 123 }), + 'binary fail with number', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myBlob: [1, 2, 3] }), + 'binary fail with array of numbers', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myBlob: {} }), + 'binary fail with object', + ) +}) diff --git a/test/modify/validation/boolean.ts b/test/modify/validation/boolean.ts new file mode 100644 index 0000000000..be97eebfd5 --- /dev/null +++ b/test/modify/validation/boolean.ts @@ -0,0 +1,44 @@ +import { throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - validation - boolean', async (t) => { + const db = await testDb(t, { + types: { + thing: { + isActive: { type: 'boolean' }, + }, + }, + }) + + // Boolean + await throws( + // @ts-expect-error + () => db.create('thing', { isActive: 'true' }), + 'boolean should fail with string', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { isActive: 1 }), + 'boolean should fail with number', + ) + await db.create('thing', { isActive: true }) + + // Extended validation + + await throws( + // @ts-expect-error + () => db.create('thing', { isActive: {} }), + 'boolean should fail with object', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { isActive: [] }), + 'boolean should fail with array', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { isActive: 'false' }), + 'boolean should fail with "false" string', + ) +}) diff --git a/test/modify/validation/enum.ts b/test/modify/validation/enum.ts new file mode 100644 index 0000000000..5d7d536e7a --- /dev/null +++ b/test/modify/validation/enum.ts @@ -0,0 +1,67 @@ +import { throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - validation - enum', async (t) => { + const db = await testDb(t, { + types: { + thing: { + myEnum: { enum: ['a', 'b'] }, + numEnum: { enum: [1, 2, 3] }, + mixedEnum: { enum: ['a', 1] }, + }, + }, + }) + + // Enum (string) + await throws( + // @ts-expect-error + () => db.create('thing', { myEnum: 'c' }), + 'enum should fail with invalid value', + ) + await db.create('thing', { myEnum: 'b' }) + + // Enum (number) + await throws( + // @ts-expect-error + () => db.create('thing', { numEnum: 4 }), + 'numEnum should fail with invalid value', + ) + await db.create('thing', { numEnum: 2 }) + + // Enum (mixed) + await throws( + // @ts-expect-error + () => db.create('thing', { mixedEnum: 'b' }), + 'mixedEnum should fail with invalid string value', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { mixedEnum: 2 }), + 'mixedEnum should fail with invalid number value', + ) + await db.create('thing', { mixedEnum: 'a' }) + await db.create('thing', { mixedEnum: 1 }) + + // Extended validation + await throws( + // @ts-expect-error + () => db.create('thing', { myEnum: 'A' }), + 'enum should be case sensitive', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myEnum: 1 }), + 'enum should fail with number', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myEnum: true }), + 'enum should fail with boolean', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myEnum: [] }), + 'enum should fail with array', + ) +}) diff --git a/test/modify/validation/integers.ts b/test/modify/validation/integers.ts new file mode 100644 index 0000000000..ff169bf1a0 --- /dev/null +++ b/test/modify/validation/integers.ts @@ -0,0 +1,92 @@ +import { throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - validation - integers', async (t) => { + const db = await testDb(t, { + types: { + thing: { + myInt8: { type: 'int8', min: -10, max: 10 }, + myUint8: { type: 'uint8', min: 10, max: 20 }, + myInt16: { type: 'int16', min: 1000, max: 2000 }, + myUint16: { type: 'uint16', min: 1000, max: 2000 }, + myInt32: { type: 'int32', min: 100000, max: 200000 }, + myUint32: { type: 'uint32', min: 100000, max: 200000 }, + }, + }, + }) + + // Int8 (-128 to 127) + await throws( + // @ts-expect-error + () => db.create('thing', { myInt8: '1' }), + 'int8 fail string', + ) + await throws(() => db.create('thing', { myInt8: 1.5 }), 'int8 fail float') + await throws(() => db.create('thing', { myInt8: 128 }), 'int8 overflow') + await throws(() => db.create('thing', { myInt8: -129 }), 'int8 underflow') + await db.create('thing', { myInt8: 10 }) + // This check fails because -11 is < -10, so it should throw + await throws(() => db.create('thing', { myInt8: -11 }), 'int8 below min') + await throws(() => db.create('thing', { myInt8: 11 }), 'int8 above max') + + // Uint8 (0 to 255) + await throws(() => db.create('thing', { myUint8: -1 }), 'uint8 negative') + // Uint8 (min 10 max 20) + await db.create('thing', { myUint8: 10 }) + await throws(() => db.create('thing', { myUint8: 9 }), 'uint8 below min') + await throws(() => db.create('thing', { myUint8: 21 }), 'uint8 above max') + + // Int16 (-32768 to 32767) + // Int16 (min 1000 max 2000) + await db.create('thing', { myInt16: 1000 }) + await throws(() => db.create('thing', { myInt16: 999 }), 'int16 below min') + await throws(() => db.create('thing', { myInt16: 2001 }), 'int16 above max') + + // Uint16 (0 to 65535) + // Uint16 (min 1000 max 2000) + await db.create('thing', { myUint16: 1000 }) + await throws(() => db.create('thing', { myUint16: 999 }), 'uint16 below min') + await throws(() => db.create('thing', { myUint16: 2001 }), 'uint16 above max') + + // Int32 + // Int32 (min 100000 max 200000) + await db.create('thing', { myInt32: 100000 }) + await throws(() => db.create('thing', { myInt32: 99999 }), 'int32 below min') + await throws(() => db.create('thing', { myInt32: 200001 }), 'int32 above max') + + // Uint32 (min 100000 max 200000) + await db.create('thing', { myUint32: 100000 }) + await throws( + () => db.create('thing', { myUint32: 99999 }), + 'uint32 below min', + ) + await throws( + () => db.create('thing', { myUint32: 200001 }), + 'uint32 above max', + ) + + // Extended validation (General invalid types for integers) + await throws(() => db.create('thing', { myInt8: NaN }), 'int8 fail NaN') + await throws( + () => db.create('thing', { myInt8: Infinity }), + 'int8 fail Infinity', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myInt8: true }), + 'int8 fail boolean', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myInt8: [] }), + 'int8 fail array', + ) + + // Test float fail for others + await throws(() => db.create('thing', { myUint8: 1.5 }), 'uint8 fail float') + await throws(() => db.create('thing', { myInt16: 1.5 }), 'int16 fail float') + await throws(() => db.create('thing', { myUint16: 1.5 }), 'uint16 fail float') + await throws(() => db.create('thing', { myInt32: 1.5 }), 'int32 fail float') + await throws(() => db.create('thing', { myUint32: 1.5 }), 'uint32 fail float') +}) diff --git a/test/modify/validation/json.ts b/test/modify/validation/json.ts new file mode 100644 index 0000000000..acda8e2be8 --- /dev/null +++ b/test/modify/validation/json.ts @@ -0,0 +1,29 @@ +import { throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - validation - json', async (t) => { + const db = await testDb(t, { + types: { + thing: { + myJson: { type: 'json' }, + }, + }, + }) + + // Json Valid Values + await db.create('thing', { myJson: { a: 1 } }) + await db.create('thing', { myJson: [1, 2] }) + await db.create('thing', { myJson: 'string' }) + await db.create('thing', { myJson: 123 }) + await db.create('thing', { myJson: true }) + await db.create('thing', { myJson: null }) + + // Json Invalid structure (Circular) + const circular: any = { a: 1 } + circular.b = circular + await throws( + () => db.create('thing', { myJson: circular }), + 'json should fail with circular structure', + ) +}) diff --git a/test/modify/validation/number.ts b/test/modify/validation/number.ts new file mode 100644 index 0000000000..27681adcc7 --- /dev/null +++ b/test/modify/validation/number.ts @@ -0,0 +1,67 @@ +import { throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - validation - number', async (t) => { + const db = await testDb(t, { + types: { + thing: { + score: { type: 'number', min: 10, max: 20 }, + myNumber: { type: 'number' }, + }, + }, + }) + + // Number + await throws( + // @ts-expect-error + () => db.create('thing', { score: '123' }), + 'number should fail with string', + ) + await throws( + () => db.create('thing', { score: 9 }), + 'number should fail if too small', + ) + await throws( + () => db.create('thing', { score: 21 }), + 'number should fail if too large', + ) + await db.create('thing', { score: 15 }) + + // Range validation + await throws( + () => db.create('thing', { score: 9 }), + 'number should fail if below min (10)', + ) + await throws( + () => db.create('thing', { score: 21 }), + 'number should fail if above max (20)', + ) + await db.create('thing', { score: 10 }) + await db.create('thing', { score: 20 }) + + // Extended validation + await throws( + () => db.create('thing', { score: NaN }), + 'number should fail with NaN', + ) + await throws( + () => db.create('thing', { score: Infinity }), + 'number should fail with Infinity', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { score: true }), + 'number should fail with boolean', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { score: [] }), + 'number should fail with array', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { score: {} }), + 'number should fail with object', + ) +}) diff --git a/test/modify/validation/required.ts b/test/modify/validation/required.ts new file mode 100644 index 0000000000..2a6c969eda --- /dev/null +++ b/test/modify/validation/required.ts @@ -0,0 +1,99 @@ +import { throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - validation - required', async (t) => { + const db = await testDb(t, { + types: { + thing: { + name: { type: 'string', required: true }, + description: { type: 'string' }, + nested: { + type: 'object', + required: true, + props: { + reqInNested: { type: 'string', required: true }, + }, + }, + optionalNested: { + type: 'object', + props: { + reqInOptional: { type: 'string', required: true }, + }, + }, + }, + }, + }) + // 1. Top-level required + const throwsMatch = async (fn: () => Promise, re: RegExp) => { + try { + await fn() + } catch (err: any) { + if (re.test(err.message)) return + throw new Error(`Error message "${err.message}" does not match ${re}`) + } + throw new Error('Function should have thrown') + } + + // 1. Top-level required + await throwsMatch( + () => + // @ts-expect-error + db.create('thing', { + description: 'stuff', + nested: { reqInNested: 'yes' }, + }), + /Field name is required/, + ) + + // 2. Required object itself missing + await throwsMatch( + () => + // @ts-expect-error + db.create('thing', { + name: 'cool', + // nested is missing + }), + /Field nested is required/, + ) + + // 3. Required nested field missing (in required object) + await throwsMatch( + () => + db.create('thing', { + name: 'cool', + // @ts-expect-error + nested: {}, // reqInNested missing + optionalNested: { + reqInOptional: 'xx', + }, + }), + /Field nested\.reqInNested is required/, + ) + + // 4. Required nested field missing (in optional object, if object is provided) + await throwsMatch( + () => + db.create('thing', { + name: 'cool', + nested: { reqInNested: 'yes' }, + // @ts-expect-error + optionalNested: {}, // reqInOptional missing + }), + /Field optionalNested\.reqInOptional is required/, + ) + + const id1 = await db.create('thing', { + name: 'cool', + nested: { reqInNested: 'yes' }, + optionalNested: { reqInOptional: 'also yes' }, + }) + + // 6. Update should not trigger required validation + // @ts-ignore + await db.update('thing', id1, { description: 'updated' }) + + // Update nested field without others + // @ts-ignore + await db.update('thing', id1, { nested: { reqInNested: 'updated' } }) +}) diff --git a/test/modify/validation/string.ts b/test/modify/validation/string.ts new file mode 100644 index 0000000000..dfe585f420 --- /dev/null +++ b/test/modify/validation/string.ts @@ -0,0 +1,59 @@ +import { throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - validation - string', async (t) => { + const db = await testDb(t, { + types: { + thing: { + name: { type: 'string', min: 2, max: 5 }, + myString: { type: 'string' }, + }, + }, + }) + + // String + await throws( + // @ts-expect-error + () => db.create('thing', { name: 123 }), + 'string should fail with number', + ) + await throws( + () => db.create('thing', { name: 'a' }), + 'string should fail if too short', + ) + await throws( + () => db.create('thing', { name: 'aaaaaa' }), + 'string should fail if too long', + ) + await db.create('thing', { name: 'abc' }) + + // Length validation (min: 2, max: 5) + await throws( + () => db.create('thing', { name: 'a' }), + 'string should fail if too short (1 char)', + ) + await throws( + () => db.create('thing', { name: 'aaaaaa' }), + 'string should fail if too long (6 chars)', + ) + await db.create('thing', { name: 'ab' }) + await db.create('thing', { name: 'abcde' }) + + // Extended validation + await throws( + // @ts-expect-error + () => db.create('thing', { name: ['a'] }), + 'string should fail with array', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { name: {} }), + 'string should fail with object', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { name: true }), + 'string should fail with boolean', + ) +}) diff --git a/test/modify/validation/text.ts b/test/modify/validation/text.ts new file mode 100644 index 0000000000..986ff178d1 --- /dev/null +++ b/test/modify/validation/text.ts @@ -0,0 +1,56 @@ +import { throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - validation - text', async (t) => { + const db = await testDb(t, { + locales: { + en: true, + de: true, + }, + types: { + thing: { + myText: 'text', + }, + }, + }) + + // Text + await throws( + // @ts-expect-error + () => db.create('thing', { myText: 123 }), + 'text should fail with number', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myText: { en: 123 } }), + 'text value should fail with number', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myText: { xx: 'hello' } }), + 'text should fail with invalid locale', + ) + await db.create('thing', { myText: { en: 'works' } }) + + // Extended validation + await throws( + () => db.create('thing', { myText: 'hello' }), + 'text should fail if passed as string not object', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myText: { en: {} } }), + 'text value should fail with object', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myText: { en: [] } }), + 'text value should fail with array', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myText: [] }), + 'text should fail with array', + ) +}) diff --git a/test/modify/validation/timestamp.ts b/test/modify/validation/timestamp.ts new file mode 100644 index 0000000000..b98f2e4f93 --- /dev/null +++ b/test/modify/validation/timestamp.ts @@ -0,0 +1,54 @@ +import { throws } from '../../shared/assert.js' +import { testDb } from '../../shared/index.js' +import test from '../../shared/test.js' + +await test('modify - validation - timestamp', async (t) => { + const db = await testDb(t, { + types: { + thing: { + myTs: { type: 'timestamp', min: 1000, max: 2000 }, + }, + }, + }) + + // Timestamp + await throws(() => db.create('thing', { myTs: 500 }), 'timestamp too small') + await throws(() => db.create('thing', { myTs: 3000 }), 'timestamp too large') + await db.create('thing', { myTs: 1500 }) + + // Range validation (min: 1000, max: 2000) + await throws( + () => db.create('thing', { myTs: 999 }), + 'timestamp should fail below min', + ) + await throws( + () => db.create('thing', { myTs: 2001 }), + 'timestamp should fail above max', + ) + await db.create('thing', { myTs: 1000 }) + await db.create('thing', { myTs: 2000 }) + + // Extended validation + await throws( + () => db.create('thing', { myTs: '2022-01-01' }), + 'timestamp should fail with date string', + ) + await throws( + () => db.create('thing', { myTs: NaN }), + 'timestamp should fail with NaN', + ) + await throws( + () => db.create('thing', { myTs: Infinity }), + 'timestamp should fail with Infinity', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myTs: true }), + 'timestamp should fail with boolean', + ) + await throws( + // @ts-expect-error + () => db.create('thing', { myTs: {} }), + 'timestamp should fail with object', + ) +}) diff --git a/test/number.perf.ts b/test/number.perf.ts index 1451359166..64944f67c8 100644 --- a/test/number.perf.ts +++ b/test/number.perf.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { perf } from './shared/assert.js' +import { testDb } from './shared/index.js' await test('create 1m uint32', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { test: { uint32: 'uint32', diff --git a/test/number.ts b/test/number.ts index b807adc8f1..b323d9897e 100644 --- a/test/number.ts +++ b/test/number.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../src/index.js' +import { testDb } from './shared/index.js' import test from './shared/test.js' import { deepEqual } from './shared/assert.js' -await test('kkk', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ +await test('basic', async (t) => { + const db = await testDb(t, { types: { user: { props: { @@ -70,7 +64,7 @@ await test('kkk', async (t) => { await db.drain() // will become async deepEqual( - (await db.query('user').get()).toObject(), + await db.query2('user').get(), payloads.map((payload, index) => { return { id: index + 1, @@ -78,7 +72,6 @@ await test('kkk', async (t) => { } }), ) - console.log('==========================') const newThing = await db.create('user', { number: { increment: 12, @@ -103,7 +96,7 @@ await test('kkk', async (t) => { }, }) - deepEqual((await db.query('user', newThing).get()).toObject(), { + deepEqual(await db.query2('user', newThing).get(), { id: newThing, number: 12, int8: 12, @@ -138,7 +131,7 @@ await test('kkk', async (t) => { }, }) - deepEqual((await db.query('user', newThing).get()).toObject(), { + deepEqual(await db.query2('user', newThing).get(), { id: newThing, number: 13, int8: 14, @@ -155,7 +148,7 @@ await test('kkk', async (t) => { }, }) - deepEqual((await db.query('user', newThing).get()).toObject(), { + deepEqual(await db.query2('user', newThing).get(), { id: newThing, number: 13, int8: 14, @@ -172,7 +165,7 @@ await test('kkk', async (t) => { }, }) - deepEqual((await db.query('user', newThing).get()).toObject(), { + deepEqual(await db.query2('user', newThing).get(), { id: newThing, number: 13, int8: 14, @@ -187,7 +180,7 @@ await test('kkk', async (t) => { uint16: 100, }) - deepEqual((await db.query('user', newThing).get()).toObject(), { + deepEqual(await db.query2('user', newThing).get(), { id: newThing, number: 13, int8: 14, diff --git a/test/query-ast/aggregates.ts b/test/query-ast/aggregates.ts new file mode 100644 index 0000000000..39803067a9 --- /dev/null +++ b/test/query-ast/aggregates.ts @@ -0,0 +1,299 @@ +import { QueryAst } from '../../src/db-query/ast/ast.js' +import { astToQueryCtx } from '../../src/db-query/ast/toCtx.js' +import { AutoSizedUint8Array } from '../../src/utils/AutoSizedUint8Array.js' +import { BasedDb, debugBuffer } from '../../src/sdk.js' +import { + resultToObject, + serializeReaderSchema, +} from '../../src/protocol/index.js' +import { deepEqual } from 'assert' + +import test from '../shared/test.js' + +await test('basic', async (t) => { + const db = new BasedDb({ path: t.tmp }) + await db.start({ clean: true }) + t.after(() => db.destroy()) + + const client = await db.setSchema({ + types: { + user: { + age: 'uint8', + balance: 'number', + }, + }, + }) + + const a = client.create('user', { + age: 18, + balance: -130.2, + }) + const b = client.create('user', { + age: 30, + balance: 0, + }) + const c = client.create('user', { + age: 41, + balance: 1500.5, + }) + + await db.drain() + const ast: QueryAst = { + type: 'user', + sum: { + props: ['age', 'balance'], + }, + stddev: { + props: ['age'], + samplingMode: 'population', + }, + variance: { + props: ['age'], + }, + // count: { props: 'age' }, // not implementd yet + count: {}, + } + const ctx = astToQueryCtx(client.schema!, ast, new AutoSizedUint8Array(1000)) + const result = await db.server.getQueryBuf(ctx.query) + // debugBuffer(result) + + const readSchemaBuf = serializeReaderSchema(ctx.readSchema) + + const obj = resultToObject(ctx.readSchema, result, result.byteLength - 4) + // console.dir(obj, { depth: 10 }) + + deepEqual( + obj, + { + age: { sum: 89, stddev: 9.392668535736911, variance: 88.22222222222217 }, + balance: { sum: 1370.3 }, + count: 3, + }, + 'basic accum, no groupby, no refs', + ) + + // console.log(JSON.stringify(obj), readSchemaBuf.byteLength, result.byteLength) +}) + +await test('group by', async (t) => { + const db = new BasedDb({ + path: t.tmp, + }) + await db.start({ clean: true }) + t.after(() => db.stop()) + + const tripClass = ['Cupper', 'Silver', 'Gold'] + + const client = await db.setSchema({ + types: { + trip: { + pickup: 'timestamp', + dropoff: 'timestamp', + distance: 'number', + vendorIduint8: 'uint8', + vendorIdint8: 'int8', + vendorIduint16: 'uint16', + vendorIdint16: 'int16', + vendorIduint32: 'int32', + vendorIdint32: 'int32', + vendorIdnumber: 'number', + vendorName: 'string', + class: tripClass, + }, + }, + }) + + db.create('trip', { + vendorIduint8: 13, + vendorIdint8: 13, + vendorIduint16: 813, + vendorIdint16: 813, + vendorIduint32: 813, + vendorIdint32: 813, + vendorIdnumber: 813.813, + vendorName: 'Derp taxis', + pickup: new Date('2024-12-11T11:00-03:00'), + dropoff: new Date('2024-12-11T11:10-03:00'), + distance: 513.44, + class: 'Cupper', + }) + + db.create('trip', { + vendorIduint8: 13, + vendorIdint8: 13, + vendorIduint16: 813, + vendorIdint16: 813, + vendorIduint32: 813, + vendorIdint32: 813, + vendorIdnumber: 813.813, + vendorName: 'Derp taxis', + pickup: new Date('2024-12-11T13:00-03:00'), + dropoff: new Date('2024-12-11T13:30-03:00'), + distance: 100.1, + class: 'Gold', + }) + + await db.drain() + + let ast: any + let ctx: any + let result: any + let readSchemaBuf: any + let obj: any + + // --------------- Group By string key --------------- // + + ast = { + type: 'trip', + sum: { + props: ['distance'], + }, + groupBy: { + prop: 'vendorName', + }, + } as QueryAst + ctx = astToQueryCtx(client.schema!, ast, new AutoSizedUint8Array(1000)) + result = await db.server.getQueryBuf(ctx.query) + // debugBuffer(result) + + readSchemaBuf = await serializeReaderSchema(ctx.readSchema) + + obj = resultToObject(ctx.readSchema, result, result.byteLength - 4) + + // console.dir(obj, { depth: 10 }) + deepEqual( + obj, + { 'Derp taxis': { distance: { sum: 613.5400000000001 } } }, + 'Group By string key', + ) // TODO: rounding check + + // --------------- Group By numeric key --------------- // + ast = { + type: 'trip', + sum: { + props: ['distance'], + }, + count: {}, + groupBy: { + prop: 'vendorIduint32', + }, + } + ctx = astToQueryCtx(client.schema!, ast, new AutoSizedUint8Array(1000)) + result = await db.server.getQueryBuf(ctx.query) + + readSchemaBuf = await serializeReaderSchema(ctx.readSchema) + + obj = resultToObject(ctx.readSchema, result, result.byteLength - 4) + + deepEqual( + obj, + { '813': { count: 2, distance: { sum: 613.5400000000001 } } }, + 'Group By numeric key', + ) + + // --------------- Group By named interval --------------- // + + const dtFormat = new Intl.DateTimeFormat('pt-BR', { + dateStyle: 'short', + timeStyle: 'short', + timeZone: 'America/Sao_Paulo', + }) + + ast = { + type: 'trip', + sum: { + props: ['distance'], + }, + count: {}, + groupBy: { + prop: 'pickup', + step: 'day', + timeFormat: dtFormat, + timeZone: 'America/Sao_Paulo', + }, + } as QueryAst + ctx = astToQueryCtx(client.schema!, ast, new AutoSizedUint8Array(1000)) + result = await db.server.getQueryBuf(ctx.query) + + readSchemaBuf = await serializeReaderSchema(ctx.readSchema) + + obj = resultToObject(ctx.readSchema, result, result.byteLength - 4) + + deepEqual( + obj, + { '11': { count: 2, distance: { sum: 613.5400000000001 } } }, + 'Group By named interval', + ) + + // --------------- Group By range interval with output format --------------- // + ast = { + type: 'trip', + sum: { + props: ['distance'], + }, + count: {}, + groupBy: { + prop: 'pickup', + step: 2.5 * 60 * 60, // 2:30h = 2.5 * 3600s + display: dtFormat, + timeZone: 'America/Sao_Paulo', + }, + } as QueryAst + ctx = astToQueryCtx(client.schema!, ast, new AutoSizedUint8Array(1000)) + result = await db.server.getQueryBuf(ctx.query) + + readSchemaBuf = await serializeReaderSchema(ctx.readSchema) + + obj = resultToObject(ctx.readSchema, result, result.byteLength - 4) + + deepEqual( + obj, + { + '11/12/2024 11:00 – 13:30': { + count: 2, + distance: { sum: 613.5400000000001 }, + }, + }, + 'Group By range interval with output format', + ) + + // --------------- Group By enum keys --------------- // + + ast = { + type: 'trip', + sum: { + props: ['distance'], + }, + count: {}, + groupBy: { + prop: 'class', + }, + } as QueryAst + ctx = astToQueryCtx(client.schema!, ast, new AutoSizedUint8Array(1000)) + result = await db.server.getQueryBuf(ctx.query) + + readSchemaBuf = await serializeReaderSchema(ctx.readSchema) + + obj = resultToObject(ctx.readSchema, result, result.byteLength - 4) + + deepEqual( + obj, + { + Cupper: { count: 1, distance: { sum: 513.44 } }, + Gold: { count: 1, distance: { sum: 100.1 } }, + }, + 'Group By enum keys', + ) + + // console.log('🙈🙈🙈 ------------------------------- 🙈🙈🙈') + + // const r = await db + // .query2('trip') + // // .count() + // .sum('distance') + // .groupBy('class', {}) + // .get() + + // r.debug() + // console.dir(r, { depth: 10 }) +}) diff --git a/test/query-ast/include.ts b/test/query-ast/include.ts new file mode 100644 index 0000000000..198cdd3376 --- /dev/null +++ b/test/query-ast/include.ts @@ -0,0 +1,238 @@ +import { deflate } from 'fflate' +import { EdgeStrategy, QueryAst } from '../../src/db-query/ast/ast.js' +import { astToQueryCtx } from '../../src/db-query/ast/toCtx.js' +import { + resultToObject, + serializeReaderSchema, +} from '../../src/protocol/index.js' +import { BasedDb, debugBuffer } from '../../src/sdk.js' +import { AutoSizedUint8Array } from '../../src/utils/AutoSizedUint8Array.js' +import { writeUint16, writeUint32 } from '../../src/utils/uint8.js' +import wait from '../../src/utils/wait.js' +import { perf } from '../shared/perf.js' +import test from '../shared/test.js' +import { deflateSync } from 'zlib' +import { fastPrng } from '../../src/utils/fastPrng.js' + +await test('include', async (t) => { + const db = new BasedDb({ path: t.tmp }) + await db.start({ clean: true }) + t.after(() => db.destroy()) + const client = await db.setSchema({ + types: { + friend: { + y: 'uint32', + }, + user: { + name: 'string', + x: 'boolean', + flap: 'uint32', + y: 'uint32', + cook: { + type: 'object', + props: { + cookie: 'number', + }, + }, + mrFriend: { + ref: 'user', + prop: 'mrFriend', + $level: 'uint32', + }, + friends: { + items: { + ref: 'user', + prop: 'friends', + $level: 'number', + }, + }, + }, + }, + }) + + const a = client.create('user', { + name: 'mr jim', + y: 4, + x: true, + flap: 9999, + cook: { + cookie: 1234, + }, + }) + + const b = await client.create('user', { + name: 'mr snurf b', + y: 15, + x: true, + flap: 9999, + cook: { + cookie: 1234, + }, + mrFriend: { id: a, $level: 67 }, + }) + + let d = Date.now() + + const rand = fastPrng() + + for (let i = 0; i < 1e6; i++) { + client.create('user', { + name: `mr snurf ${i}`, + y: i, + x: true, + flap: 9999, + cook: { + cookie: 1234, + }, + friends: [ + { id: a, $level: rand(0, 200) }, + { id: b, $level: rand(0, 200) }, + ], + }) + } + + await db.drain() + + console.log(Date.now() - d, 'ms') + + // filter: RE-ADD REFERENCE + // filter: REFERENCES + + // GET REFERENCEs + // SORT REFERENCES + // FILTER REFENRRENS + // FILTER REFS BY EDGE + // ALIAS + + const ast: QueryAst = { + type: 'user', + range: { start: 0, end: 1e6 }, + // target: b, + // order: 'desc', + // sort: { prop: 'y' }, + + // filter: { + // props: { + // flap: { ops: [{ op: '=', val: 9999 }] }, + // }, + // and: { + // props: { + // y: { ops: [{ op: '=', val: 100 }] }, + // }, + // or: { + // props: { + // y: { ops: [{ op: '=', val: 3 }] }, + // }, + // or: { + // props: { + // y: { ops: [{ op: '=', val: 4 }] }, + // }, + // }, + // }, + // }, + // or: { + // props: { + // y: { ops: [{ op: '=', val: 670 }] }, + // }, + // or: { + // props: { + // y: { ops: [{ op: '=', val: 15 }] }, + // }, + // }, + // }, + // }, + + props: { + y: { include: {} }, + name: { include: {} }, + friends: { + // order: 'desc', + // sort: { prop: '$level' }, // can just be the prop? + props: { + name: { include: {} }, + y: { include: {} }, + }, + // edges: { + // props: { + // $level: { include: {} }, + // }, + // }, + filter: { + // wrong include (if no edges provided) + edgeStrategy: EdgeStrategy.edgeAndProps, + props: { + y: { + ops: [{ op: '>', val: 5 }], + }, + }, + edges: { + props: { + $level: { + ops: [{ op: '>', val: 100 }], + }, + }, + }, + }, + }, + // mrFriend: { + // props: { + // y: { include: {} }, + // }, + // edges: { + // props: { + // $level: { include: {} }, + // }, + // }, + // }, + }, + } + + console.dir(ast, { depth: 10 }) + + const ctx = astToQueryCtx(client.schema!, ast, new AutoSizedUint8Array(1000)) + + debugBuffer(ctx.query) + + console.log(deflateSync(ctx.query).byteLength) + + debugBuffer(deflateSync(ctx.query).toString('hex')) + + const queries: any = [] + for (let i = 0; i < 10; i++) { + const x = ctx.query.slice(0) + writeUint32(x, i + 1, 0) + queries.push(x) + } + + await perf( + async () => { + const q: any = [] + for (let i = 0; i < 10; i++) { + q.push(db.server.getQueryBuf(queries[i])) + } + const x = await Promise.all(q) + // console.log(x) + }, + 'filter speed', + { + repeat: 10, + }, + ) + + // const readSchemaBuf = serializeReaderSchema(ctx.readSchema) + // console.log(result.byteLength) + const result = await db.server.getQueryBuf(ctx.query) + + const obj = resultToObject(ctx.readSchema, result, result.byteLength - 4) + + // console.dir(obj, { depth: 10 }) + + await wait(1000) + + // RETURN NULL FOR UNDEFINED + + console.log( + // JSON.stringify(obj).length, + result.byteLength, + ) +}) diff --git a/test/query-ast/validate.perf.ts b/test/query-ast/validate.perf.ts new file mode 100644 index 0000000000..178da5ae1e --- /dev/null +++ b/test/query-ast/validate.perf.ts @@ -0,0 +1,189 @@ +import * as v from 'valibot' +import { type } from 'arktype' +import test from '../shared/test.js' + +// ========================================== +// 1. DATA GENERATION +// ========================================== +const validData = { + captchaToken: 'valid_token_123', + metadata: { + votes: { ddi_1: 100, ddi_2: 200, ddi_3: 300, ddi_4: 400 }, + editionId: 999, + }, +} + +// ========================================== +// 2. RAW JS VALIDATOR +// ========================================== +function manualValidator(data: any): boolean { + if (!data || typeof data !== 'object') return false + + if (typeof data.captchaToken !== 'string' && data.captchaToken !== null) + return false + + const metadata = data.metadata + if (!metadata || typeof metadata !== 'object') return false + + if ( + typeof metadata.editionId !== 'number' || + metadata.editionId < 0 || + metadata.editionId > 4294967295 || + !Number.isInteger(metadata.editionId) + ) + return false + + const votes = metadata.votes + if (!votes || typeof votes !== 'object') return false + + for (const key in votes) { + const val = votes[key] + if ( + typeof val !== 'number' || + val < 0 || + val > 4294967295 || + !Number.isInteger(val) + ) + return false + } + + return true +} + +// ========================================== +// 3. SCHEMA FACTORIES +// ========================================== + +const createValibotSchema = () => { + return v.object({ + captchaToken: v.nullable(v.string()), + metadata: v.object({ + editionId: v.pipe( + v.number(), + v.integer(), + v.minValue(0), + v.maxValue(4294967295), + ), + votes: v.record( + v.string(), + v.pipe(v.number(), v.integer(), v.minValue(0), v.maxValue(4294967295)), + ), + }), + }) +} + +const createArkTypeSchema = () => { + return type({ + captchaToken: 'string|null', + metadata: { + editionId: '0<=number<=4294967295%1', + votes: { + '[string]': '0<=number<=4294967295%1', + }, + }, + }) +} + +// ========================================== +// 4. BENCH RUNNERS +// ========================================== + +function measureCreation(name: string, factory: () => any, iterations: number) { + if (global.gc) global.gc() + const startMem = process.memoryUsage().heapUsed + const start = process.hrtime.bigint() + + const schemas: any[] = [] + for (let i = 0; i < iterations; i++) { + schemas.push(factory()) + } + + const end = process.hrtime.bigint() + const endMem = process.memoryUsage().heapUsed + const totalTimeNs = Number(end - start) + const totalMemDiff = endMem - startMem + const memPerOp = totalMemDiff / iterations // bytes + + const opsPerSec = (iterations / totalTimeNs) * 1e9 + const formatter = new Intl.NumberFormat('en-US', { maximumFractionDigits: 0 }) + + // Keep schemas alive to prevent GC during measurement + // console.log(schemas.length) + + console.log( + `${name.padEnd(10)} | Creation: ${formatter.format( + opsPerSec, + )} ops/sec | Mem: ${memPerOp.toFixed(2)} bytes/inst`, + ) +} + +function runValidationBenchmark( + name: string, + fn: () => void, + iterations: number = 500_000, +) { + process.stdout.write(`Running validation ${name}... `) + + // 1. Warmup (Trigger JIT optimization) + for (let i = 0; i < 1e5; i++) { + fn() + } + + // 2. Garbage Collection (Optional: helps stability if using standard node flags) + if (global.gc) global.gc() + + // 3. Measure + const start = Date.now() + for (let i = 0; i < iterations; i++) { + fn() + } + const end = Date.now() + + const totalTimeMs = end - start + const opsPerSec = (iterations / totalTimeMs) * 1000 + + // Format Output + const formatter = new Intl.NumberFormat('en-US', { + maximumFractionDigits: 0, + }) + console.log(`${formatter.format(opsPerSec)} ops/sec (${totalTimeMs}ms)`) +} + +test('bench validator', async () => { + const ValibotSchema = createValibotSchema() + const ArkTypeSchema = createArkTypeSchema() + + console.log('--- CREATION BENCHMARK ---\n') + const CREATION_ITERATIONS = 10_000 + measureCreation('ArkType', createArkTypeSchema, CREATION_ITERATIONS) + measureCreation('Valibot', createValibotSchema, CREATION_ITERATIONS) + + console.log('\n--- VALIDATION BENCHMARK ---\n') + + // Sanity Checks + if (!manualValidator(validData)) throw new Error('Manual Failed') + if (!v.safeParse(ValibotSchema, validData).success) + throw new Error('Valibot Failed') + if (ArkTypeSchema(validData) instanceof type.errors) + throw new Error('ArkType Failed') + + const ITERATIONS = 10_000_000 + + runValidationBenchmark( + 'Raw JS ', + () => manualValidator(validData), + ITERATIONS, + ) + runValidationBenchmark( + 'ArkType ', + () => ArkTypeSchema(validData), + ITERATIONS, + ) + runValidationBenchmark( + 'Valibot ', + () => v.parse(ValibotSchema, validData), + ITERATIONS, + ) + + console.log('\n--- DONE ---') +}) diff --git a/test/query/ast.ts b/test/query/ast.ts new file mode 100644 index 0000000000..02a69ef75e --- /dev/null +++ b/test/query/ast.ts @@ -0,0 +1,329 @@ +import { query } from '../../src/db-client/query2/index.js' +import { deepEqual } from '../shared/index.js' +import test from '../shared/test.js' + +await test('query ast creation', async (t) => { + type Schema = { + locales: { + en: true + nl: true + } + types: { + user: { + friend: { + ref: 'user' + prop: 'friend' + $rating: 'uint32' + } + friends: { + items: { + ref: 'user' + prop: 'friend' + $rating: 'uint32' + } + } + name: 'string' + isNice: 'boolean' + age: 'number' + } + } + } + + { + const q = query('user') + .filter('isNice', '=', false) + .and('name', '=', 'youzi') + + deepEqual(q.ast, { + type: 'user', + filter: { + props: { + isNice: { ops: [{ op: '=', val: false }] }, + name: { ops: [{ op: '=', val: 'youzi' }] }, + }, + }, + }) + } + + { + const q = query('user') + .filter('isNice', '=', false) + .and('name', '=', 'youzi') + .or('name', '=', 'james') + + deepEqual(q.ast, { + type: 'user', + filter: { + props: { + isNice: { ops: [{ op: '=', val: false }] }, + name: { ops: [{ op: '=', val: 'youzi' }] }, + }, + or: { + props: { + name: { ops: [{ op: '=', val: 'james' }] }, + }, + }, + }, + }) + } + + { + const q = query('user') + .filter('isNice', '=', false) + .and('name', '=', 'youzi') + .or('name', '=', 'james') + .and('isNice', '=', false) + + deepEqual(q.ast, { + type: 'user', + filter: { + props: { + isNice: { ops: [{ op: '=', val: false }] }, + name: { ops: [{ op: '=', val: 'youzi' }] }, + }, + or: { + props: { + name: { ops: [{ op: '=', val: 'james' }] }, + isNice: { ops: [{ op: '=', val: false }] }, + }, + }, + }, + }) + } + + { + const q = query('user') + .filter((filter) => filter('name', '=', 'youzi').or('isNice', '=', true)) + .or((filter) => filter('name', '=', 'james').or('isNice', '=', false)) + + deepEqual(q.ast, { + type: 'user', + filter: { + and: { + props: { + name: { ops: [{ op: '=', val: 'youzi' }] }, + }, + or: { + props: { + isNice: { ops: [{ op: '=', val: true }] }, + }, + }, + }, + or: { + props: { + name: { ops: [{ op: '=', val: 'james' }] }, + }, + or: { + props: { + isNice: { ops: [{ op: '=', val: false }] }, + }, + }, + }, + }, + }) + } + + { + const q = query('user') + .filter((filter) => filter('name', '=', 'youzi').or('isNice', '=', true)) + + .or((filter) => + filter((filter) => + filter('name', '=', 'james').or('isNice', '!=', true), + ) + .or('isNice', '=', false) + .and('name', '!=', 'olli'), + ) + + deepEqual(q.ast, { + type: 'user', + filter: { + and: { + props: { + name: { ops: [{ op: '=', val: 'youzi' }] }, + }, + or: { + props: { + isNice: { ops: [{ op: '=', val: true }] }, + }, + }, + }, + or: { + and: { + props: { + name: { ops: [{ op: '=', val: 'james' }] }, + }, + or: { + props: { + isNice: { ops: [{ op: '!=', val: true }] }, + }, + }, + }, + or: { + props: { + isNice: { ops: [{ op: '=', val: false }] }, + name: { ops: [{ op: '!=', val: 'olli' }] }, + }, + }, + }, + }, + }) + } + + { + const q = query('user').sum('age') + deepEqual(q.ast, { + type: 'user', + sum: { props: ['age'] }, + }) + } + + { + const q = query('user') + .count() + .cardinality('name') + .avg('age') + .hmean('age') + .max('age') + .min('age') + .stddev('age', { mode: 'population' }) + .var('age', { mode: 'sample' }) + .groupBy('name') + + deepEqual(q.ast, { + type: 'user', + count: {}, + cardinality: { props: ['name'] }, + avg: { props: ['age'] }, + hmean: { props: ['age'] }, + max: { props: ['age'] }, + min: { props: ['age'] }, + stddev: { props: ['age'], samplingMode: 'population' }, + variance: { props: ['age'], samplingMode: 'sample' }, + groupBy: { prop: 'name' }, + }) + } + + { + const q = query('user').groupBy('age', 10) + deepEqual(q.ast, { + type: 'user', + groupBy: { prop: 'age', step: 10 }, + }) + } + { + const q1 = query('user').sort('age') + deepEqual(q1.ast, { + type: 'user', + order: 'asc', + sort: { prop: 'age' }, + }) + + const q2 = query('user').sort('age', 'desc') + deepEqual(q2.ast, { + type: 'user', + order: 'desc', + sort: { prop: 'age' }, + }) + + const q3 = query('user').order('desc') + deepEqual(q3.ast, { + type: 'user', + order: 'desc', + }) + + const q4 = query('user').sort('age').order('desc') + deepEqual(q4.ast, { + type: 'user', + order: 'desc', + sort: { prop: 'age' }, + }) + } + + { + const res = query('user').include((select) => + select('friend').sum('age'), + ) + deepEqual(res.ast, { + type: 'user', + props: { + friend: { sum: { props: ['age'] } }, + }, + }) + } + + { + const res = query('user').sum((select) => + select('friends').sum('age'), + ) + console.dir(res.ast, { depth: null }) + // deepEqual(res.ast, { + // type: 'user', + // sum: { + // props: { + // friends: { sum: { props: ['age'] } }, + // }, + // }, + // }) + } + + { + const res = query('user') + .include('friends.$rating') + .filter('friends.$rating', '>', 5) + deepEqual(res.ast, { + type: 'user', + props: { + friends: { + edges: { props: { $rating: { include: {} } } }, + }, + }, + filter: { + props: { + friends: { + edges: { + props: { + $rating: { ops: [{ op: '>', val: 5 }] }, + }, + }, + }, + }, + }, + }) + } + + { + const res = query('user').include((select) => + select('friends').include('$rating'), + ) + deepEqual(res.ast, { + type: 'user', + props: { + friends: { + edges: { props: { $rating: { include: {} } } }, + }, + }, + }) + } + + { + const res = query('user').include((select) => + select('friends').include('$rating').filter('$rating', '>', 5), + ) + deepEqual(res.ast, { + type: 'user', + props: { + friends: { + edges: { props: { $rating: { include: {} } } }, + filter: { + edges: { + props: { + $rating: { ops: [{ op: '>', val: 5 }] }, + }, + }, + }, + }, + }, + }) + } +}) diff --git a/test/query/db.ts b/test/query/db.ts new file mode 100644 index 0000000000..98eb2629b0 --- /dev/null +++ b/test/query/db.ts @@ -0,0 +1,228 @@ +import { deepEqual, testDb } from '../shared/index.js' +import test from '../shared/test.js' + +await test('query db', async (t) => { + const db = await testDb(t, { + locales: { + en: true, + nl: true, + }, + types: { + user: { + name: 'string', + isNice: 'boolean', + age: 'number', + address: { + props: { + street: 'string', + }, + }, + story: 'text', + friend: { + ref: 'user', + prop: 'friend', + }, + friends: { + items: { + ref: 'user', + prop: 'friends', + $rating: 'uint32', + $rank: 'number', + $friendRef: { + ref: 'user', + }, + }, + }, + }, + }, + }) + + const john = db.create('user', { + name: 'john', + isNice: false, + age: 21, + address: { + street: 'Cool street', + }, + }) + + db.create('user', { + name: 'billy', + isNice: true, + age: 49, + friend: john, + friends: [ + { + id: john, + $friendRef: john, + }, + ], + address: { + street: 'Mega street', + }, + }) + + { + const res = await db.query2('user').include('address.street').get() + + deepEqual(res, [ + { id: 1, address: { street: 'Cool street' } }, + { id: 2, address: { street: 'Mega street' } }, + ]) + } + + { + const res = await db + .query2('user') + .include('name') + .filter('isNice', '=', false) + .get() + + deepEqual(res, [{ id: 1, name: 'john' }]) + } + + { + const res = await db + .query2('user') + .include('name') + .filter('isNice', '=', true) + .or('age', '=', 21) + .get() + deepEqual(res, [ + { id: 1, name: 'john' }, + { id: 2, name: 'billy' }, + ]) + } + + // { + // const res = await db + // .query2('user') + // .include('name') + // .filter('isNice', '=', true) + // .or('age', '=', 21) + // .sort('name') + // .get() + // deepEqual(res, [ + // { id: 2, name: 'billy' }, + // { id: 1, name: 'john' }, + // ]) + // } + + { + const res = await db + .query2('user') + .include('name') + .filter('isNice', '=', true) + .or('age', '=', 21) + .range(0, 1) + .get() + + deepEqual(res, [{ id: 1, name: 'john' }]) + } + + { + const res = await db.query2('user').sum('age').get() + deepEqual(res, { age: { sum: 70 } } as any) + } + + // TODO wait for marco to check these + // { + // const res = await db.query2('user').sum('friend.age').get() + // deepEqual(res, { friend: { age: { sum: 70 } } }) + // } + + // { + // const res = await db.query2('user').sum('friends.age').get() + // deepEqual(res, { friends: { age: { sum: 70 } } }) + // } + + // { + // const res = await db + // .query2('user') + // .sum((select) => select('friends').sum('age')) + // .get() + // deepEqual(res, { friends: { age: { sum: 21 } } }) + // } + + // // { + // // const res = await db + // // .query2('user') + // // .include((select) => select('friend').sum('age')) + // // .get() + + // // deepEqual(res, [{ id: 1, friend: { age: { sum: 70 } } }]) + // // } + + // { + // const res = await db.query2('user').sum('age').groupBy('name').get() + // deepEqual(res, { john: { age: { sum: 21 } }, billy: { age: { sum: 49 } } }) + // } + + // { + // const res = await db + // .query2('user') + // .filter('isNice', '=', true) + // .sum('age') + // .groupBy('name') + // .get() + // deepEqual(res, { billy: { age: { sum: 49 } } }) + // } + + { + const q = db.query2('user').include('friends.$rank', 'friends.$rating') + const res = await q.get() + deepEqual(res, [ + { id: 1, friends: [{ id: 2, $rank: 0, $rating: 0 }] }, + { id: 2, friends: [{ id: 1, $rank: 0, $rating: 0 }] }, + ]) + } + + { + const q = db.query2('user').include('friends.$friendRef') + const res = await q.get() + deepEqual(res, [ + { + id: 1, + friends: [ + { + id: 2, + $friendRef: { + id: 1, + name: 'john', + isNice: false, + age: 21, + address: { + street: 'Cool street', + }, + story: { + en: '', + nl: '', + }, + }, + }, + ], + }, + { + id: 2, + friends: [ + { + id: 1, + $friendRef: { + id: 1, + name: 'john', + isNice: false, + age: 21, + address: { + street: 'Cool street', + }, + story: { + en: '', + nl: '', + }, + }, + }, + ], + }, + ]) + } +}) diff --git a/test/query/expire.ts b/test/query/expire.ts new file mode 100644 index 0000000000..af827a3831 --- /dev/null +++ b/test/query/expire.ts @@ -0,0 +1,35 @@ +import wait from '../../src/utils/wait.js' +import { deepEqual, testDb } from '../shared/index.js' +import test from '../shared/test.js' + +await test('query db', async (t) => { + const db = await testDb(t, { + locales: { + en: true, + nl: true, + }, + types: { + user: { + name: 'string', + expiresAt: { + type: 'timestamp', + expire: true, + }, + }, + }, + }) + + const expiresAt = Date.now() + 1e3 + const john = await db.create('user', { + name: 'john', + expiresAt, + }) + + deepEqual(await db.query2('user', john).get(), { + id: 1, + name: 'john', + expiresAt, + }) + await wait(1e3) + deepEqual(await db.query2('user', john).get(), null) +}) diff --git a/test/query/types.ts b/test/query/types.ts new file mode 100644 index 0000000000..f6959850fe --- /dev/null +++ b/test/query/types.ts @@ -0,0 +1,328 @@ +import { testDb } from '../shared/index.js' +import test from '../shared/test.js' + +await test.skip('query types', async (t) => { + const db = await testDb(t, { + locales: { en: true }, + types: { + user: { + isNice: 'boolean', + }, + everything: { + s: 'string', + n: 'number', + i8: 'int8', + u8: 'uint8', + i16: 'int16', + u16: 'uint16', + i32: 'int32', + u32: 'uint32', + b: 'boolean', + txt: 'text', + js: 'json', + ts: 'timestamp', + bin: 'binary', + als: 'alias', + // vec: 'vector', + // col: 'colvec', + card: 'cardinality', + myEnum: ['a', 'b'], + nested: { + props: { + a: 'string', + }, + }, + myRef: { type: 'reference', ref: 'user', prop: 'backRef' }, + myRefs: { + type: 'references', + items: { ref: 'user', prop: 'backRefs' }, + }, + }, + }, + }) + + const id1 = await db.create('everything', {}) + + const id = await db.create('user', { + isNice: true, + }) + + const everythingId = await db.create('everything', { + s: 'some string', + n: 123, + i8: 1, + u8: 1, + i16: 1, + u16: 1, + i32: 1, + u32: 1, + b: true, + txt: { en: 'some text' }, + js: { a: 1 }, + ts: Date.now(), + bin: new Uint8Array(1), + als: 'alias', + myEnum: 'a', + nested: { + a: 'nested string', + }, + myRef: id, + myRefs: [id], + }) + + // Wait for consistency + await new Promise((resolve) => setTimeout(resolve, 100)) + + const query = db.query2('user') + const data = await query.get() + + if (data.length > 0) { + const user = data[0] + // Should be strictly boolean, not boolean | null | undefined + const isNice: boolean = user.isNice + const id: number = user.id + // @ts-expect-error + const wrong: string = user.isNice + // @ts-expect-error + const unknown = user.something + } + + const query2 = db.query2('everything') + const res = await query2.get() + const everything = res[0] + + if (res.length > 0) { + const s: string = everything.s + const n: number = everything.n + const i8: number = everything.i8 + const u8: number = everything.u8 + const i16: number = everything.i16 + const u16: number = everything.u16 + const i32: number = everything.i32 + const u32: number = everything.u32 + const b: boolean = everything.b + // const txt: string = everything.txt + const js: any = everything.js + const ts: number = everything.ts + const bin: Uint8Array = everything.bin + const als: string = everything.als + const card: number = everything.card + const myEnum: 'a' | 'b' = everything.myEnum + const nestedA: string = everything.nested.a + const id: number = everything.id + + // @ts-expect-error + const wrongEnum: 'c' = everything.myEnum + // @ts-expect-error + const wrongRef: string = everything.myRef + // @ts-expect-error + const wrongRefs: number = everything.myRefs + } + + { + const query = db.query2('everything').include('myEnum') + const data = await query.get() + if (data.length > 0) { + const res = data[0] + const myEnum: 'a' | 'b' = res.myEnum + const id: number = res.id + // @ts-expect-error + const n: number = res.n + } + } + + { + const query = db.query2('everything').include('*') + const data = await query.get() + if (data.length > 0) { + const res = data[0] + const n: number = res.n + const s: string = res.s + const myEnum: 'a' | 'b' = res.myEnum + // @ts-expect-error + const myRef = res.myRef + } + } + { + const query = db.query2('everything').include('**') + const data = await query.get() + if (data.length > 0) { + const res = data[0] + + // references + // const myRef: { id: number } = res.myRef + const myRefs: { id: number }[] = res.myRefs + const id: number = res.id + + // Scalars should be missing + // @ts-expect-error + const n: number = res.n + // @ts-expect-error + const s: string = res.s + // @ts-expect-error + const myEnum: 'a' | 'b' = res.myEnum + } + } + + { + // Combine explicit field + wildcard + const query = db.query2('everything').include('myEnum', '**') + const data = await query.get() + if (data.length > 0) { + const res = data[0] + + const myEnum: 'a' | 'b' = res.myEnum + // const myRef: { id: number } = res.myRef + const myRefs: { id: number }[] = res.myRefs + + // Other scalars missing + // @ts-expect-error + const n: number = res.n + } + } + + { + // Multiple explicit fields + const query = db.query2('everything').include('n', 's', 'nested') + const data = await query.get() + if (data.length > 0) { + const res = data[0] + + const n: number = res.n + const s: string = res.s + const nestedA: string = res.nested.a + + // Missing + // @ts-expect-error + const myEnum: 'a' | 'b' = res.myEnum + } + } + + { + // Scalar wildcard + explicit ref + const query = db.query2('everything').include('*', 'myRefs') + const data = await query.get() + if (data.length > 0) { + const res = data[0] + + const n: number = res.n + const myRefs: { id: number }[] = res.myRefs + + // Excluded ref + // @ts-expect-error + const myRef: number = res.myRef + } + } + + { + // Target specific id + const query = db.query2('everything', 1).include('*', 'myRefs') + const data = await query.get() + + // if ('n' in data) { + // // Check it's a single item (not array) + // const n: number = data.n + // const myRefs: { id: number }[] = data.myRefs + + // // @ts-expect-error + // data.map + + // // @ts-expect-error + // const myRef: number = data.myRef + // } + } + + { + const query = db + .query2('everything', 1) + .include((select) => select('myRefs').include('isNice')) + const data = await query.get() + // if ('myRefs' in data) { + // for (const item of data.myRefs) { + // const isNice: boolean = item.isNice + // } + // } + } + + { + const query = db + .query2('user', 1) + .include((select) => select('backRefs').include('myEnum')) + const data = await query.get() + + // for (const { id, myEnum, nonExistent } of data.backRefs) { + // console.log({ id, myEnum, nonExistent }) + // } + } + + { + // Sum aggregates + const query = db.query2('everything').sum('n', 'i8').sum('card') + const queryInvalid = db.query2('everything') + // @ts-expect-error + queryInvalid.sum('s') + // @ts-expect-error + queryInvalid.sum('b') + + query.avg('n').hmean('n').max('n').min('n').stddev('n').var('n') + + query.cardinality('s') + query.groupBy('s') + + // @ts-expect-error + queryInvalid.avg('s') + // @ts-expect-error + queryInvalid.hmean('s') + // @ts-expect-error + queryInvalid.max('s') + // @ts-expect-error + queryInvalid.min('s') + // @ts-expect-error + queryInvalid.stddev('s') + // @ts-expect-error + queryInvalid.var('s') + } + + { + // Aggregate return type check + const query = db.query2('everything').sum('n') + const res = await query.get() + + if (res) { + const n: number = res.n.sum + // @ts-expect-error + const s = res.s + } + + const queryGroup = db.query2('everything').groupBy('s').sum('n') + const resGroup = await queryGroup.get() + + // resGroup should be Record + if (resGroup) { + const group = resGroup['some-group'] + if (group) { + const n: number = group.n.sum + // @ts-expect-error + const s = group.s + } + } + } +}) + +await test('query types', async (t) => { + const db = await testDb(t, { + types: { + user: { + isNice: 'boolean', + name: { + type: 'string', + required: true, + }, + }, + }, + }) + + // const id = await db.create('user', { + // isNice: true, + // }) +}) diff --git a/test/queryResponse.ts b/test/queryResponse.ts index 123f45b9fe..6f249dc7a8 100644 --- a/test/queryResponse.ts +++ b/test/queryResponse.ts @@ -1,17 +1,12 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' import { equal } from './shared/assert.js' import { notEqual } from 'assert' import { extractNumber } from '../src/utils/index.js' +import { checksum } from '../src/db-client/query2/index.js' await test('correct version', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -25,11 +20,11 @@ await test('correct version', async (t) => { status: 'a', }) - const response = await db.query('user', user1).get() + const response = await db.query2('user', user1).get() equal( extractNumber(response.version), - response.checksum, + checksum(response), 'Checksum is recoverable from the 53 bit js version number', ) @@ -37,7 +32,7 @@ await test('correct version', async (t) => { status: 'b', }) - const response2 = await db.query('user', user1).get() + const response2 = await db.query2('user', user1).get() notEqual(response.version, response2.version) }) diff --git a/test/range.ts b/test/range.ts index 51216f4b3d..1c81a6d6bf 100644 --- a/test/range.ts +++ b/test/range.ts @@ -1,16 +1,10 @@ import { BasedDb } from '../src/index.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' import { deepEqual, equal } from './shared/assert.js' await test('range', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - // schema - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -62,30 +56,17 @@ await test('range', async (t) => { await db.drain() - const result = await db.query('user').include('nr').range(1, 2).get() - - deepEqual(result.toObject(), [{ id: 2, nr: 2 }]) - - const result2 = await db - .query('user') - .include('nr') - .sort('email') - .range(1, 2) - .get() - - deepEqual(result2.toObject(), [{ id: 2, nr: 2 }]) + deepEqual(await db.query2('user').include('nr').range(1, 2).get(), [ + { id: 2, nr: 2 }, + ]) + deepEqual( + await db.query2('user').include('nr').sort('email').range(1, 2).get(), + [{ id: 2, nr: 2 }], + ) }) await test('default range: 1000', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - // schema - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -102,6 +83,6 @@ await test('default range: 1000', async (t) => { }) } await db.drain() - const res = await db.query('user').get().toObject() + const res = await db.query2('user').get() equal(res.length, 1_000) }) diff --git a/test/raw.ts b/test/raw.ts index 6dfa5898cb..8ef867e275 100644 --- a/test/raw.ts +++ b/test/raw.ts @@ -1,17 +1,11 @@ import { BasedDb } from '../src/index.js' import { deepEqual } from './shared/assert.js' +import { italy } from './shared/examples.js' +import { testDb } from './shared/index.js' import test from './shared/test.js' -await test('raw', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ +await test.skip('cardinality', async (t) => { + const db = await testDb(t, { types: { user: { uniqueSkills: 'cardinality', @@ -23,15 +17,47 @@ await test('raw', async (t) => { uniqueSkills: ['juggling', 'cabaret'], }) const { uniqueSkills } = await db - .query('user', one) + .query2('user', one) .include('uniqueSkills', { raw: true }) .get() - .toObject() await db.create('user', { uniqueSkills, }) - const [a, b] = await db.query('user').get().toObject() + const [a, b] = await db.query2('user').get() deepEqual(a.uniqueSkills, b.uniqueSkills) }) + +await test('string', async (t) => { + const db = await testDb(t, { + types: { + user: { + name: 'string', + role: { type: 'string', maxBytes: 4 }, + resume: { type: 'string', compression: 'deflate' }, + }, + }, + }) + + const one = await db.create('user', { + name: 'user', + role: 'root', + resume: italy, + }) + const { name, role, resume } = await db + .query2('user', one) + .include(['name', 'role', 'resume'], { raw: true }) + .get() + + await db.create('user', { + name, + role, + resume, + }) + + const [a, b] = await db.query2('user').get() + deepEqual(a.name, b.name) + deepEqual(a.role, b.role) + deepEqual(a.resume, b.resume) +}) diff --git a/test/references/create.perf.ts b/test/references/create.perf.ts index 9b274da2c0..e20421a905 100644 --- a/test/references/create.perf.ts +++ b/test/references/create.perf.ts @@ -1,15 +1,10 @@ import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' -import { deepEqual, equal, throws, perf } from '../shared/assert.js' +import { testDb } from '../shared/index.js' +import { perf } from '../shared/assert.js' await test('create 1m items with 1 reference(s)', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { test: { refs: { @@ -37,13 +32,7 @@ await test('create 1m items with 1 reference(s)', async (t) => { }) await test('create 1m items with 100 reference(s)', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { test: { refs: { diff --git a/test/references/reference.perf.ts b/test/references/reference.perf.ts index df84e4ca06..ad7f427104 100644 --- a/test/references/reference.perf.ts +++ b/test/references/reference.perf.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' import { perf } from '../shared/assert.js' +import {testDb} from '../shared/index.js' await test('create 1m single refs', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { test: { ref: { diff --git a/test/references/references.ts b/test/references/references.ts index 9ce87c9d74..fb7bd06659 100644 --- a/test/references/references.ts +++ b/test/references/references.ts @@ -1,16 +1,12 @@ -import { BasedDb } from '../../src/index.js' +import { BasedDb, getDefaultHooks } from '../../src/index.js' import test from '../shared/test.js' import { deepEqual } from '../shared/assert.js' import { wait } from '../../src/utils/index.js' +import {testDb} from '../shared/index.js' +import {DbClientClass} from '../../src/db-client/index.js' await test('references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - t.after(() => t.backup(db)) - await db.start({ clean: true }) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -62,46 +58,34 @@ await test('references', async (t) => { await db.drain() - deepEqual( - (await db.query('article').include('contributors.name').get()).toObject(), - [ - { - id: await strudelArticle, - contributors: [{ id: await mrSnurp, name: 'Mr snurp' }], - }, - { - id: await piArticle, - contributors: [ - { id: await mrSnurp, name: 'Mr snurp' }, - { id: await flippie, name: 'Flippie' }, - ], - }, - ], - ) + deepEqual(await db.query2('article').include('contributors.name').get(), [ + { + id: await strudelArticle, + contributors: [{ id: await mrSnurp, name: 'Mr snurp' }], + }, + { + id: await piArticle, + contributors: [ + { id: await mrSnurp, name: 'Mr snurp' }, + { id: await flippie, name: 'Flippie' }, + ], + }, + ]) - deepEqual( - (await db.query('user').include('articles.name').get()).toObject(), - [ - { - id: 1, - articles: [ - { id: 1, name: 'The wonders of Strudel' }, - { id: 2, name: 'Apple Pie is a Lie' }, - ], - }, - { id: 2, articles: [{ id: 2, name: 'Apple Pie is a Lie' }] }, - ], - ) + deepEqual(await db.query2('user').include('articles.name').get(), [ + { + id: 1, + articles: [ + { id: 1, name: 'The wonders of Strudel' }, + { id: 2, name: 'Apple Pie is a Lie' }, + ], + }, + { id: 2, articles: [{ id: 2, name: 'Apple Pie is a Lie' }] }, + ]) }) await test('one to many', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -143,7 +127,7 @@ await test('one to many', async (t) => { } await db.drain() - deepEqual((await db.query('user').include('resources').get()).toObject(), [ + deepEqual(await db.query2('user').include('resources').get(), [ { id: 1, resources: [ @@ -171,42 +155,33 @@ await test('one to many', async (t) => { }, ]) - deepEqual( - (await db.query('user').include('resources.name').get()).toObject(), - [ - { - id: 1, - resources: [ - { - id: 1, - name: 'thing 0', - }, - { - id: 2, - name: 'thing 1', - }, - { - id: 3, - name: 'thing 2', - }, - { - id: 4, - name: 'thing 3', - }, - ], - }, - ], - ) + deepEqual(await db.query2('user').include('resources.name').get(), [ + { + id: 1, + resources: [ + { + id: 1, + name: 'thing 0', + }, + { + id: 2, + name: 'thing 1', + }, + { + id: 3, + name: 'thing 2', + }, + { + id: 4, + name: 'thing 3', + }, + ], + }, + ]) }) await test('one to many really', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -240,166 +215,148 @@ await test('one to many really', async (t) => { resources: [cpu, kbd, mouse, fd], }) await db.drain() - deepEqual( - await db.query('user', user).include('resources').get().toObject(), - { - id: 1, - resources: [ - { - id: 1, - name: 'cpu', - }, - { - id: 2, - name: 'keyboard', - }, - { - id: 3, - name: 'mouse', - }, - { - id: 4, - name: 'floppy', - }, - ], - }, - ) + deepEqual(await db.query2('user', user).include('resources').get(), { + id: 1, + resources: [ + { + id: 1, + name: 'cpu', + }, + { + id: 2, + name: 'keyboard', + }, + { + id: 3, + name: 'mouse', + }, + { + id: 4, + name: 'floppy', + }, + ], + }) await db.update('user', user, { resources: [cpu, kbd, mouse], }) - deepEqual( - await db.query('user', user).include('resources').get().toObject(), - { - id: 1, - resources: [ - { - id: 1, - name: 'cpu', - }, - { - id: 2, - name: 'keyboard', - }, - { - id: 3, - name: 'mouse', - }, - ], - }, - ) + deepEqual(await db.query2('user', user).include('resources').get(), { + id: 1, + resources: [ + { + id: 1, + name: 'cpu', + }, + { + id: 2, + name: 'keyboard', + }, + { + id: 3, + name: 'mouse', + }, + ], + }) await db.update('user', user, { resources: [cpu, kbd, mouse], }) - deepEqual( - await db.query('user', user).include('resources').get().toObject(), - { - id: 1, - resources: [ - { - id: 1, - name: 'cpu', - }, - { - id: 2, - name: 'keyboard', - }, - { - id: 3, - name: 'mouse', - }, - ], - }, - ) + deepEqual(await db.query2('user', user).include('resources').get(), { + id: 1, + resources: [ + { + id: 1, + name: 'cpu', + }, + { + id: 2, + name: 'keyboard', + }, + { + id: 3, + name: 'mouse', + }, + ], + }) await db.update('user', user, { resources: [cpu, kbd, mouse, fd], }) - deepEqual( - await db.query('user', user).include('resources').get().toObject(), - { - id: 1, - resources: [ - { - id: 1, - name: 'cpu', - }, - { - id: 2, - name: 'keyboard', - }, - { - id: 3, - name: 'mouse', - }, - { - id: 4, - name: 'floppy', - }, - ], - }, - ) + deepEqual(await db.query2('user', user).include('resources').get(), { + id: 1, + resources: [ + { + id: 1, + name: 'cpu', + }, + { + id: 2, + name: 'keyboard', + }, + { + id: 3, + name: 'mouse', + }, + { + id: 4, + name: 'floppy', + }, + ], + }) await db.update('user', user, { resources: [kbd, cpu, fd, mouse], }) - deepEqual( - await db.query('user', user).include('resources').get().toObject(), - { - id: 1, - resources: [ - { - id: 2, - name: 'keyboard', - }, - { - id: 1, - name: 'cpu', - }, - { - id: 4, - name: 'floppy', - }, - { - id: 3, - name: 'mouse', - }, - ], - }, - ) + deepEqual(await db.query2('user', user).include('resources').get(), { + id: 1, + resources: [ + { + id: 2, + name: 'keyboard', + }, + { + id: 1, + name: 'cpu', + }, + { + id: 4, + name: 'floppy', + }, + { + id: 3, + name: 'mouse', + }, + ], + }) const joy = await db.create('resource', { name: 'joystick', owner: user }) await db.update('resource', joy, { owner: user }) await db.update('resource', joy, { owner: user }) await db.update('resource', joy, { owner: user }) - deepEqual( - await db.query('user', user).include('resources').get().toObject(), - { - id: 1, - resources: [ - { - id: 2, - name: 'keyboard', - }, - { - id: 1, - name: 'cpu', - }, - { - id: 4, - name: 'floppy', - }, - { - id: 3, - name: 'mouse', - }, - { - id: 5, - name: 'joystick', - }, - ], - }, - ) + deepEqual(await db.query2('user', user).include('resources').get(), { + id: 1, + resources: [ + { + id: 2, + name: 'keyboard', + }, + { + id: 1, + name: 'cpu', + }, + { + id: 4, + name: 'floppy', + }, + { + id: 3, + name: 'mouse', + }, + { + id: 5, + name: 'joystick', + }, + ], + }) await db.update('user', user, { resources: [kbd, cpu, fd, mouse], @@ -409,78 +366,66 @@ await test('one to many really', async (t) => { update: [joy], }, }) - deepEqual( - await db.query('user', user).include('resources').get().toObject(), - { - id: 1, - resources: [ - { - id: 2, - name: 'keyboard', - }, - { - id: 1, - name: 'cpu', - }, - { - id: 4, - name: 'floppy', - }, - { - id: 3, - name: 'mouse', - }, - { - id: 5, - name: 'joystick', - }, - ], - }, - ) + deepEqual(await db.query2('user', user).include('resources').get(), { + id: 1, + resources: [ + { + id: 2, + name: 'keyboard', + }, + { + id: 1, + name: 'cpu', + }, + { + id: 4, + name: 'floppy', + }, + { + id: 3, + name: 'mouse', + }, + { + id: 5, + name: 'joystick', + }, + ], + }) await db.update('user', user, { resources: { update: [joy, kbd, cpu, fd, mouse], }, }) - deepEqual( - await db.query('user', user).include('resources').get().toObject(), - { - id: 1, - resources: [ - { - id: 2, - name: 'keyboard', - }, - { - id: 1, - name: 'cpu', - }, - { - id: 4, - name: 'floppy', - }, - { - id: 3, - name: 'mouse', - }, - { - id: 5, - name: 'joystick', - }, - ], - }, - ) + deepEqual(await db.query2('user', user).include('resources').get(), { + id: 1, + resources: [ + { + id: 2, + name: 'keyboard', + }, + { + id: 1, + name: 'cpu', + }, + { + id: 4, + name: 'floppy', + }, + { + id: 3, + name: 'mouse', + }, + { + id: 5, + name: 'joystick', + }, + ], + }) }) await test('update', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -529,31 +474,22 @@ await test('update', async (t) => { contributors: [flippie], }) await db.drain() - deepEqual( - (await db.query('article').include('contributors.name').get()).toObject(), - [ - { - id: 1, - contributors: [ - { - name: 'Flippie', - id: await flippie, - }, - ], - }, - ], - ) + deepEqual(await db.query2('article').include('contributors.name').get(), [ + { + id: 1, + contributors: [ + { + name: 'Flippie', + id: await flippie, + }, + ], + }, + ]) await wait(1000) }) await test('filter', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -611,9 +547,7 @@ await test('filter', async (t) => { await db.drain() deepEqual( - ( - await db.query('article', strudelArticle).include('contributors').get() - ).toObject(), + await db.query2('article', strudelArticle).include('contributors').get(), { id: 1, contributors: [ @@ -627,14 +561,10 @@ await test('filter', async (t) => { ) deepEqual( - ( - await db - .query('article', strudelArticle) - .include((q) => - q('contributors').include('name').filter('flap', '>', 25), - ) - .get() - ).toObject(), + await db + .query2('article', strudelArticle) + .include((q) => q('contributors').include('name').filter('flap', '>', 25)) + .get(), { id: 1, contributors: [ @@ -646,17 +576,15 @@ await test('filter', async (t) => { ) deepEqual( - ( - await db - .query('article', strudelArticle) - .include((q) => { - q('contributors').include('flap') - q('contributors').include('name') - q('contributors').filter('flap', '>', 25) - q('contributors').filter('flap', '<', 35) - }) - .get() - ).toObject(), + await db + .query2('article', strudelArticle) + .include((q) => { + q('contributors').include('flap') + q('contributors').include('name') + q('contributors').filter('flap', '>', 25) + q('contributors').filter('flap', '<', 35) + }) + .get(), { id: 1, contributors: [{ id: 3, name: 'Derpie', flap: 30 }], @@ -665,18 +593,16 @@ await test('filter', async (t) => { ) deepEqual( - ( - await db - .query('article', strudelArticle) - .include((select) => { - select('contributors') - .include('name') - .include('flap') - .filter('flap', '>', 25) - .sort('flap', 'desc') - }) - .get() - ).toObject(), + await db + .query2('article', strudelArticle) + .include((select) => { + select('contributors') + .include('name') + .include('flap') + .filter('flap', '>', 25) + .sort('flap', 'desc') + }) + .get(), { id: 1, contributors: [ @@ -689,15 +615,7 @@ await test('filter', async (t) => { }) // await test('cross reference', async (t) => { -// const db = new BasedDb({ -// path: t.tmp, -// }) - -// await db.start({ clean: true }) - -// t.after(() =>db.destroy()) - -// await db.setSchema({ +// const db = await testDb(t, { // locales: { // en: { required: true }, // fr: { required: true }, @@ -784,7 +702,7 @@ await test('filter', async (t) => { // }) // console.dir( -// await db.query('contestant').include('*', '**').get().toObject(), +// await db.query2('contestant').include('*', '**').get(), // { // depth: null, // }, @@ -793,7 +711,7 @@ await test('filter', async (t) => { // const contestant1 = await db.create('contestant') // console.dir( -// await db.query('contestant').include('*', '**').get().toObject(), +// await db.query2('contestant').include('*', '**').get(), // { // depth: null, // }, @@ -802,7 +720,7 @@ await test('filter', async (t) => { // const country1 = await db.create('country', { name: 'xxx' }) // console.dir( -// await db.query('contestant').include('*', '**').get().toObject(), +// await db.query2('contestant').include('*', '**').get(), // { // depth: null, // }, @@ -811,10 +729,10 @@ await test('filter', async (t) => { // console.log( // '--->', // await db -// .query('contestant', contestant1) +// .query2('contestant', contestant1) // .include('*', '**') // .get() -// .toObject(), +// , // ) // await db.update('contestant', contestant1, { @@ -824,11 +742,11 @@ await test('filter', async (t) => { // console.log( // '--->', -// await db.query('country', country1).include('*', '**').get().toObject(), +// await db.query2('country', country1).include('*', '**').get(), // ) // console.dir( -// await db.query('contestant').include('*', '**').get().toObject(), +// await db.query2('contestant').include('*', '**').get(), // { // depth: null, // }, @@ -838,7 +756,7 @@ await test('filter', async (t) => { // '--->', // await db // // @ts-ignore -// .query('contestant', { +// .query2('contestant', { // id: 1, // maxVotes: 0, // price: 0, @@ -849,7 +767,7 @@ await test('filter', async (t) => { // }) // .include('*', '**') // .get() -// .toObject(), +// , // ) // }) @@ -870,12 +788,12 @@ await test('single ref save and load', async (t) => { }, } as const - await db.setSchema(schema) + let client = await db.setSchema(schema) const users = [{ email: '1@saulx.com' }, { email: '2@saulx.com' }] for (const user of users) { - await db.upsert('user', user) + await client.upsert('user', { email: user.email }, {}) } await db.stop() @@ -883,39 +801,31 @@ await test('single ref save and load', async (t) => { db = new BasedDb({ path: t.tmp, }) - await db.start() - await db.create('user', { + client = new DbClientClass({ + hooks: getDefaultHooks(db.server), + }) + + await client.create('user', { email: '3@saulx.com', invitedBy: 2, }) - deepEqual( - await db.query('user').include('email', 'invitedBy').get().toObject(), - [ - { id: 1, email: '1@saulx.com', invitedBy: null }, - { id: 2, email: '2@saulx.com', invitedBy: null }, - { - id: 3, - email: '3@saulx.com', - invitedBy: { id: 2, email: '2@saulx.com', name: '' }, - }, - ], - ) + deepEqual(await client.query2('user').include('email', 'invitedBy').get(), [ + { id: 1, email: '1@saulx.com', invitedBy: null }, + { id: 2, email: '2@saulx.com', invitedBy: null }, + { + id: 3, + email: '3@saulx.com', + invitedBy: { id: 2, email: '2@saulx.com', name: '' }, + }, + ]) await db.stop() }) await test('single2many - update refs', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { product: { props: { @@ -964,8 +874,8 @@ await test('single2many - update refs', async (t) => { reviews: [review1, review2, review3], }) - const products = await db.query('product').include('*', '**').get().toObject() - const reviews = await db.query('review').include('*', '**').get().toObject() + const products = await db.query2('product').include('*', '**').get() + const reviews = await db.query2('review').include('*', '**').get() deepEqual(products, [ { id: 1, reviews: [] }, @@ -986,13 +896,7 @@ await test('single2many - update refs', async (t) => { }) await test('reference to a non-existing node', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - t.after(() => t.backup(db)) - await db.start({ clean: true }) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -1021,15 +925,15 @@ await test('reference to a non-existing node', async (t) => { articles: [1], }) // RFE Is this the correct behavior - deepEqual(await db.query('user', mrSnurp).include('**').get(), { + deepEqual(await db.query2('user', mrSnurp).include('**').get(), { id: 1, articles: [], }) - const article = await db.create('article') + const article = await db.create('article', {}) deepEqual(article, 1) - deepEqual(await db.query('user', mrSnurp).include('**').get(), { + deepEqual(await db.query2('user', mrSnurp).include('**').get(), { id: 1, articles: [], }) diff --git a/test/references/referencesIndex.ts b/test/references/referencesIndex.ts index a1a49d6bf7..0a30d87c66 100644 --- a/test/references/referencesIndex.ts +++ b/test/references/referencesIndex.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' -import test from '../shared/test.js' import { deepEqual } from '../shared/assert.js' +import test from '../shared/test.js' +import { testDb } from '../shared/index.js' await test('references modify', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -33,24 +27,21 @@ await test('references modify', async (t) => { name: 'marie', }) - const john = db.create('user', { + const john = await db.create('user', { name: 'john', friends: [bob, marie], }) await db.drain() - deepEqual( - (await db.query('user', john).include('*', 'friends').get()).toObject(), - { - id: 3, - name: 'john', - friends: [ - { id: 1, name: 'bob' }, - { id: 2, name: 'marie' }, - ], - }, - ) + deepEqual(await db.query2('user', john).include('*', 'friends').get(), { + id: 3, + name: 'john', + friends: [ + { id: 1, name: 'bob' }, + { id: 2, name: 'marie' }, + ], + }) await db.update('user', john, { friends: { @@ -74,17 +65,14 @@ await test('references modify', async (t) => { }, }) - deepEqual( - (await db.query('user', john).include('*', 'friends').get()).toObject(), - { - id: 3, - name: 'john', - friends: [ - { id: 2, name: 'marie' }, - { id: 1, name: 'bob' }, - ], - }, - ) + deepEqual(await db.query2('user', john).include('*', 'friends').get(), { + id: 3, + name: 'john', + friends: [ + { id: 2, name: 'marie' }, + { id: 1, name: 'bob' }, + ], + }) const billy = db.create('user', { name: 'billy', @@ -101,18 +89,15 @@ await test('references modify', async (t) => { }, }) - deepEqual( - (await db.query('user', john).include('*', 'friends').get()).toObject(), - { - id: 3, - name: 'john', - friends: [ - { id: 4, name: 'billy' }, - { id: 2, name: 'marie' }, - { id: 1, name: 'bob' }, - ], - }, - ) + deepEqual(await db.query2('user', john).include('*', 'friends').get(), { + id: 3, + name: 'john', + friends: [ + { id: 4, name: 'billy' }, + { id: 2, name: 'marie' }, + { id: 1, name: 'bob' }, + ], + }) const malcolm = db.create('user', { name: 'malcolm', @@ -129,19 +114,16 @@ await test('references modify', async (t) => { }, }) - deepEqual( - (await db.query('user', john).include('*', 'friends').get()).toObject(), - { - id: 3, - name: 'john', - friends: [ - { id: 4, name: 'billy' }, - { id: 2, name: 'marie' }, - { id: 5, name: 'malcolm' }, - { id: 1, name: 'bob' }, - ], - }, - ) + deepEqual(await db.query2('user', john).include('*', 'friends').get(), { + id: 3, + name: 'john', + friends: [ + { id: 4, name: 'billy' }, + { id: 2, name: 'marie' }, + { id: 5, name: 'malcolm' }, + { id: 1, name: 'bob' }, + ], + }) await db.update('user', john, { friends: { @@ -154,19 +136,16 @@ await test('references modify', async (t) => { }, }) - deepEqual( - (await db.query('user', john).include('*', 'friends').get()).toObject(), - { - id: 3, - name: 'john', - friends: [ - { id: 4, name: 'billy' }, - { id: 2, name: 'marie' }, - { id: 1, name: 'bob' }, - { id: 5, name: 'malcolm' }, - ], - }, - ) + deepEqual(await db.query2('user', john).include('*', 'friends').get(), { + id: 3, + name: 'john', + friends: [ + { id: 4, name: 'billy' }, + { id: 2, name: 'marie' }, + { id: 1, name: 'bob' }, + { id: 5, name: 'malcolm' }, + ], + }) await db.update('user', john, { friends: { @@ -174,40 +153,28 @@ await test('references modify', async (t) => { }, }) - deepEqual( - (await db.query('user', john).include('*', 'friends').get()).toObject(), - { - id: 3, - name: 'john', - friends: [ - { id: 4, name: 'billy' }, - { id: 1, name: 'bob' }, - ], - }, - ) + deepEqual(await db.query2('user', john).include('*', 'friends').get(), { + id: 3, + name: 'john', + friends: [ + { id: 4, name: 'billy' }, + { id: 1, name: 'bob' }, + ], + }) await db.update('user', john, { friends: null, }) - deepEqual( - (await db.query('user', john).include('*', 'friends').get()).toObject(), - { - id: 3, - name: 'john', - friends: [], - }, - ) + deepEqual(await db.query2('user', john).include('*', 'friends').get(), { + id: 3, + name: 'john', + friends: [], + }) }) await test('index>len', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -231,7 +198,7 @@ await test('index>len', async (t) => { name: 'marie', }) - const john = db.create('user', { + const john = await db.create('user', { name: 'john', friends: [bob, marie], }) @@ -249,15 +216,12 @@ await test('index>len', async (t) => { }, }) - deepEqual( - (await db.query('user', john).include('*', 'friends').get()).toObject(), - { - id: 3, - name: 'john', - friends: [ - { id: 2, name: 'marie' }, - { id: 1, name: 'bob' }, - ], - }, - ) + deepEqual(await db.query2('user', john).include('*', 'friends').get(), { + id: 3, + name: 'john', + friends: [ + { id: 2, name: 'marie' }, + { id: 1, name: 'bob' }, + ], + }) }) diff --git a/test/references/referencesModify.ts b/test/references/referencesModify.ts index f25805c4df..1955c6da48 100644 --- a/test/references/referencesModify.ts +++ b/test/references/referencesModify.ts @@ -1,15 +1,10 @@ import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { deepEqual } from '../shared/assert.js' await test('references modify', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db= await testDb(t, { types: { user: { props: { @@ -51,7 +46,7 @@ await test('references modify', async (t) => { await db.drain() deepEqual( - (await db.query('user').include('*', 'friends').get()).toObject(), + await db.query2('user').include('*', 'friends').get(), [ { id: 1, name: 'bob', friends: [] }, { id: 2, name: 'marie', friends: [{ id: 3, name: 'john' }] }, @@ -69,7 +64,7 @@ await test('references modify', async (t) => { await db.drain() deepEqual( - (await db.query('user').include('*', 'friends').get()).toObject(), + await db.query2('user').include('*', 'friends').get(), [ { id: 1, name: 'bob', friends: [{ id: 3, name: 'john' }] }, { id: 2, name: 'marie', friends: [{ id: 3, name: 'john' }] }, @@ -90,7 +85,7 @@ await test('references modify', async (t) => { }) deepEqual( - (await db.query('user').include('*', 'friends').get()).toObject(), + await db.query2('user').include('*', 'friends').get(), [ { id: 1, name: 'bob', friends: [] }, { id: 2, name: 'marie', friends: [] }, @@ -101,13 +96,7 @@ await test('references modify', async (t) => { }) await test('references modify 2', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { a: { name: 'string', @@ -148,13 +137,7 @@ await test('references modify 2', async (t) => { }) await test('reference move', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { a: { name: 'string', @@ -200,35 +183,19 @@ await test('reference move', async (t) => { bees: [b2], }) - deepEqual( - (await db.query('a').include('bees').get()).toObject()[0].bees[0].id, - 2, - ) + deepEqual((await db.query2('a').include('bees').get())[0].bees[0].id, 2) await db.update('a', a, { bees: [b2, b2], }) - deepEqual( - (await db.query('a').include('bees').get()).toObject()[0].bees.length, - 1, - ) - deepEqual( - (await db.query('a').include('bees').get()).toObject()[0].bees[0].id, - 2, - ) + deepEqual((await db.query2('a').include('bees').get())[0].bees.length, 1) + deepEqual((await db.query2('a').include('bees').get())[0].bees[0].id, 2) }) // https://linear.app/1ce/issue/FDN-1735 await test('try to modify undefined refs', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { movie: { name: 'string', @@ -261,9 +228,9 @@ await test('try to modify undefined refs', async (t) => { genre: 'Crime', }) const a1 = db.create('actor', { name: 'Uma Thurman', movies: [m1, m2] }) - const a2 = db.create('actor', { name: 'Jonh Travolta', movies: [m2] }) + db.create('actor', { name: 'John Travolta', movies: [m2] }) - //await db.query('movie').include('*', '**').get().inspect() + //await db.query2('movie').include('*', '**').get().inspect() await db.update('movie', m1, { actors: { delete: [a1] }, diff --git a/test/save/blockHash.ts b/test/save/blockHash.ts index 99ea5757ee..1fa8f363f1 100644 --- a/test/save/blockHash.ts +++ b/test/save/blockHash.ts @@ -1,4 +1,4 @@ -import assert, { equal, notEqual } from 'node:assert' +import assert, { equal } from 'node:assert' import fs from 'node:fs/promises' import path from 'node:path' import { createHash } from 'node:crypto' @@ -7,8 +7,8 @@ import test from '../shared/test.js' import native from '../../src/native.js' import { deepEqual } from '../shared/assert.js' import { getBlockHash } from '../../src/db-server/blocks.js' +import { checksum } from '../../src/db-client/query2/index.js' -const f = (v) => v.map((r) => r.hash) const sha1 = async (path: string) => createHash('sha1') .update(await fs.readFile(path)) @@ -21,7 +21,7 @@ await test('isomorphic types have equal hashes', async (t) => { await db.start({ clean: true }) t.after(() => db.destroy()) - await db.setSchema({ + const schema = { types: { article: { title: 'string', @@ -32,23 +32,24 @@ await test('isomorphic types have equal hashes', async (t) => { body: 'string', }, }, - }) + } as const + const client = await db.setSchema(schema) for (let i = 0; i < 200_000; i++) { - db.create('article', { + client.create('article', { title: 'party in the house', body: 'there was', }) - db.create('story', { + client.create('story', { title: 'party in the house', body: 'there was', }) } - await db.drain() + await client.drain() deepEqual( - (await db.query('article').get()).checksum, - (await db.query('story').get()).checksum, + checksum(await client.query2('article').get()), + checksum(await client.query2('story').get()), ) assert( native.equals( diff --git a/test/save/save.ts b/test/save/save.ts index 2e7dc9626f..a33b692031 100644 --- a/test/save/save.ts +++ b/test/save/save.ts @@ -1,4 +1,4 @@ -import { BasedDb, filter } from '../../src/index.js' +import { BasedDb, DbClient, getDefaultHooks } from '../../src/index.js' import { deepEqual, equal } from '../shared/assert.js' import test from '../shared/test.js' import { setTimeout } from 'node:timers/promises' @@ -10,22 +10,22 @@ await test('simple', async (t) => { await db.start({ clean: true }) t.after(() => db.destroy()) - await db.setSchema({ + const schema = { locales: { - en: { required: true }, - fr: { required: true }, - nl: { required: true }, - el: { required: true }, - he: { required: true }, - it: { required: true }, - lv: { required: true }, - lb: { required: true }, - ro: { required: true }, - sl: { required: true }, - es: { required: true }, - de: { required: true }, - cs: { required: true }, - et: { required: true }, + en: {}, + fr: {}, + nl: {}, + el: {}, + he: {}, + it: {}, + lv: {}, + lb: {}, + ro: {}, + sl: {}, + es: {}, + de: {}, + cs: {}, + et: {}, }, types: { user: { @@ -63,21 +63,22 @@ await test('simple', async (t) => { }, }, }, - }) + } as const + const client = await db.setSchema(schema) - db.create('user', { + client.create('user', { name: 'youzi', email: 'youzi@yazi.yo', alias: 'best', }) - db.create('user', { + client.create('user', { name: 'youri', email: 'youri@yari.yo', alias: 'alsobest', }) - db.create('typeTest', {}) + client.create('typeTest', {}) - await db.drain() + await client.drain() await db.save() const db2 = new BasedDb({ @@ -85,24 +86,25 @@ await test('simple', async (t) => { }) await db2.start() t.after(() => db2.destroy()) + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), + }) - const a = await db.query('user').get().toObject() - const b = await db2.query('user').get().toObject() + const a = await client.query2('user').get() + const b = await client2.query2('user').get() deepEqual(b, a) - const c = await db.create('user', { name: 'jerp' }) - const d = await db2.create('user', { name: 'jerp' }) + const c = await client.create('user', { name: 'jerp' }) + const d = await client2.create('user', { name: 'jerp' }) equal(c, 3) equal(d, 3) await db2.save() - const user1 = await db2.create('user', { name: 'jerp' }) - + await client2.create('user', { name: 'jerp' }) await db2.save() - const user2 = await db2.create('user', { name: 'jerp' }) - + await client2.create('user', { name: 'jerp' }) await db2.save() }) @@ -131,13 +133,7 @@ await test('empty root', async (t) => { }) await test('refs', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.destroy()) - - await db.setSchema({ + const schema = { types: { group: { props: { @@ -161,24 +157,32 @@ await test('refs', async (t) => { }, }, }, + } as const + + const db = new BasedDb({ + path: t.tmp, }) + await db.start({ clean: true }) + t.after(() => db.destroy()) + + const client = await db.setSchema(schema) - const grp = db.create('group', { + const grp = client.create('group', { name: 'best', }) - db.create('user', { + client.create('user', { name: 'youzi', email: 'youzi@yazi.yo', group: grp, }) - db.create('user', { + client.create('user', { name: 'youri', email: 'youri@yari.yo', group: grp, }) - await db.drain() + await client.drain() await db.save() const db2 = new BasedDb({ @@ -186,9 +190,12 @@ await test('refs', async (t) => { }) t.after(() => db2.destroy()) await db2.start() + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), + }) - const users1 = await db.query('user').include('group').get().toObject() - const users2 = await db2.query('user').include('group').get().toObject() + const users1 = await client.query2('user').include('group').get() + const users2 = await client2.query2('user').include('group').get() deepEqual(users1, users2) }) @@ -228,16 +235,10 @@ await test('auto save', async (t) => { }) await test('text', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => db.destroy()) - - await db.setSchema({ + const schema = { locales: { en: {}, - fi: { fallback: 'en' }, + fi: { fallback: ['en'] }, }, types: { article: { @@ -247,10 +248,17 @@ await test('text', async (t) => { }, }, }, + } as const + const db = new BasedDb({ + path: t.tmp, }) + await db.start({ clean: true }) + t.after(() => db.destroy()) + + const client = await db.setSchema(schema) // Text: Wikipedia CC BY-SA 4.0 - db.create('article', { + client.create('article', { title: { en: 'Galileo Galilei', fi: 'Galileo Galilei', @@ -260,7 +268,7 @@ await test('text', async (t) => { fi: 'Galileo Galilei (15. helmikuuta 1564 Pisa, Firenzen herttuakunta – 8. tammikuuta 1642 Arcetri, Toscanan suurherttuakunta) oli italialainen tähtitieteilijä, filosofi ja fyysikko. Hänen merkittävimmät saavutuksensa liittyvät tieteellisen menetelmän kehitykseen aristoteelisesta nykyiseen muotoonsa. Häntä on kutsuttu tieteen, klassisen fysiikan ja tähtitieteen isäksi.', }, }) - db.create('article', { + client.create('article', { title: { en: 'Pope Urban VIII', fi: 'Urbanus VIII', @@ -271,7 +279,7 @@ await test('text', async (t) => { }, }) - await db.drain() + await client.drain() await db.save() const db2 = new BasedDb({ @@ -279,9 +287,12 @@ await test('text', async (t) => { }) t.after(() => db2.destroy()) await db2.start() + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), + }) - const articles1 = await db.query('article').get().toObject() - const articles2 = await db2.query('article').get().toObject() + const articles1 = await client.query2('article').get() + const articles2 = await client2.query2('article').get() deepEqual(articles1, articles2) }) @@ -352,18 +363,13 @@ await test.skip('db is drained before save', async (t) => { await db2.start() deepEqual( - await db2.query('person').include('name', 'books').get().toObject(), - await db.query('person').include('name', 'books').get().toObject(), + await db2.query2('person').include('name', 'books').get(), + await db.query2('person').include('name', 'books').get(), ) }) await test('create', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - await db.setSchema({ + const schema = { types: { person: { props: { @@ -372,31 +378,37 @@ await test('create', async (t) => { }, }, }, + } as const + const db = new BasedDb({ + path: t.tmp, }) + await db.start({ clean: true }) + const client = await db.setSchema(schema) + t.after(() => db.destroy()) - db.create('person', { + client.create('person', { name: 'Joe', }) - await db.drain() + await client.drain() await db.save() - db.create('person', { + client.create('person', { name: 'John', }) - await db.drain() + await client.drain() await db.save() - db.create('person', { + client.create('person', { name: 'Neo', alias: 'haxor', }) - await db.drain() + await client.drain() await db.save() - db.create('person', { + client.create('person', { name: 'trinity', alias: 'haxor', }) - await db.drain() + await client.drain() await db.save() // load the same db into a new instance @@ -405,11 +417,11 @@ await test('create', async (t) => { }) await db2.start() t.after(() => db2.destroy()) + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), + }) - deepEqual( - await db2.query('person').get().toObject(), - await db.query('person').get().toObject(), - ) + deepEqual(await client2.query2('person').get(), await client.query2('person').get()) }) await test('upsert', async (t) => { @@ -419,7 +431,7 @@ await test('upsert', async (t) => { await db.start({ clean: true }) t.after(() => db.destroy()) - await db.setSchema({ + const schema = { types: { person: { props: { @@ -429,19 +441,17 @@ await test('upsert', async (t) => { }, }, }, - }) + } as const + const client = await db.setSchema(schema) - const joe = db.create('person', { + client.create('person', { name: 'Joe', alias: 'boss', }) - await db.drain() + await client.drain() await db.save() - await db.upsert('person', { - alias: 'boss', - age: 42, - }) - await db.drain() + await client.upsert('person', { alias: 'boss' }, { age: 42 }) + await client.drain() await db.save() // load the same db into a new instance @@ -450,11 +460,14 @@ await test('upsert', async (t) => { }) await db2.start() t.after(() => db2.destroy()) + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), + }) - deepEqual(await db.query('person').get(), [ + deepEqual(await client.query2('person').get(), [ { id: 1, name: 'Joe', age: 42, alias: 'boss' }, ]) - deepEqual(await db2.query('person').get(), [ + deepEqual(await client2.query2('person').get(), [ { id: 1, name: 'Joe', age: 42, alias: 'boss' }, ]) }) @@ -466,7 +479,7 @@ await test('alias blocks', async (t) => { await db.start({ clean: true }) t.after(() => db.destroy()) - await db.setSchema({ + const schema = { types: { person: { props: { @@ -475,26 +488,27 @@ await test('alias blocks', async (t) => { }, }, }, - }) + } as const + const client = await db.setSchema(schema) for (let i = 0; i < 100_000; i++) { - db.create('person', { + client.create('person', { name: 'Joe', }) } - await db.drain() + await client.drain() await db.save() - const john = await db.create('person', { + const john = await client.create('person', { name: 'John', alias: 'bf', }) - await db.drain() + await client.drain() await db.save() - db.update('person', 1, { alias: 'bf' }) + client.update('person', 1, { alias: 'bf' }) for (let id = 2; id < john; id++) { - db.delete('person', id) + client.delete('person', id) } - await db.drain() + await client.drain() await db.save() // load the same db into a new instance @@ -503,10 +517,13 @@ await test('alias blocks', async (t) => { }) await db2.start() t.after(() => db2.destroy()) + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), + }) deepEqual( - await db2.query('person').get().toObject(), - await db.query('person').get().toObject(), + await client2.query2('person').get(), + await client.query2('person').get(), ) }) @@ -517,7 +534,7 @@ await test('simulated periodic save', async (t) => { await db.start({ clean: true }) t.after(() => db.destroy()) - await db.setSchema({ + const schema = { types: { book: { props: { @@ -536,80 +553,78 @@ await test('simulated periodic save', async (t) => { }, }, }, - }) + } as const + const client = await db.setSchema(schema) // create some people const people = await Promise.all([ - db.create('person', { + client.create('person', { name: 'Slim', alias: 'slim', }), - db.create('person', { + client.create('person', { name: 'Slick', alias: 'slick', }), - db.create('person', { + client.create('person', { name: 'Joe', alias: 'joe', }), - db.create('person', { + client.create('person', { name: 'Ben', alias: 'boss', }), - db.create('person', { + client.create('person', { name: 'Steve', }), ]) - db.update('person', people[1], { + client.update('person', people[1], { bf: people[2], }) // create some books for (let i = 0; i < 1000; i++) { - db.create('book', { + client.create('book', { name: `book ${i}`, isbn: '9789295055025', owner: people[i % people.length], }) } - await db.drain() + await client.drain() await db.save() // more books for (let i = 0; i < 1000; i++) { - db.create('book', { + client.create('book', { name: `book ${1000 + i}`, isbn: '9789295055025', owner: people[i % people.length], }) } - await db.drain() + await client.drain() await db.save() // change a node using an alias - db.upsert('person', { - alias: 'slim', - name: 'Shady', - }) - await db.drain() + client.upsert('person', { alias: 'slim' }, { name: 'Shady' }) + await client.drain() await db.save() // replace alias - db.create('person', { + client.create('person', { name: 'Slide', alias: 'slick', }) - await db.drain() + await client.drain() await db.save() // move alias - await db.update('person', people[4], { + await client.update('person', people[4], { alias: 'boss', }) - await db.drain() + await client.drain() await db.save() // load the same db into a new instance @@ -618,75 +633,68 @@ await test('simulated periodic save', async (t) => { }) await db2.start() t.after(() => db2.destroy()) + const client2 = new DbClient({ + hooks: getDefaultHooks(db.server), + }) // Change node using alias saved deepEqual( - await db - .query('person') + await client + .query2('person') .filter('alias', 'includes', 'slim') .include('alias', 'name') - .get() - .toObject(), + .get(), [{ id: 1, alias: 'slim', name: 'Shady' }], ) deepEqual( - await db2 - .query('person') + await client2 + .query2('person') .filter('alias', 'includes', 'slim') .include('alias', 'name') - .get() - .toObject(), + .get(), [{ id: 1, alias: 'slim', name: 'Shady' }], ) // Replace alias saved deepEqual( - await db - .query('person') + await client + .query2('person') .filter('alias', 'includes', 'slick') .include('alias', 'name') - .get() - .toObject(), + .get(), [{ id: 6, alias: 'slick', name: 'Slide' }], ) deepEqual( - await db2 - .query('person') + await client2 + .query2('person') .filter('alias', 'includes', 'slick') .include('alias', 'name') - .get() - .toObject(), + .get(), [{ id: 6, alias: 'slick', name: 'Slide' }], ) // Move alias saved deepEqual( - await db - .query('person') + await client + .query2('person') .filter('alias', 'includes', 'boss') .include('alias', 'name') - .get() - .toObject(), + .get(), [{ id: 5, name: 'Steve', alias: 'boss' }], ) deepEqual( - await db2 - .query('person') + await client2 + .query2('person') .filter('alias', 'includes', 'boss') .include('alias', 'name') - .get() - .toObject(), + .get(), [{ id: 5, name: 'Steve', alias: 'boss' }], ) // All have the same books deepEqual( - await db2 - .query('person') - .include('name', 'alias', 'books') - .get() - .toObject(), - await db.query('person').include('name', 'alias', 'books').get().toObject(), + await client2.query2('person').include('name', 'alias', 'books').get(), + await client.query2('person').include('name', 'alias', 'books').get(), ) }) @@ -695,9 +703,9 @@ await test('edge val', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) - await db.setSchema({ + const client = await db.setSchema({ types: { round: { name: 'alias', @@ -727,11 +735,11 @@ await test('edge val', async (t) => { }, }) - const sequence1 = await db.create('sequence', {}) - const sequence2 = await db.create('sequence', {}) - const scenario1 = await db.create('scenario', {}) - const scenario2 = await db.create('scenario', {}) - const phase = await db.create('phase', { + const sequence1 = await client.create('sequence', {}) + const sequence2 = await client.create('sequence', {}) + const scenario1 = await client.create('scenario', {}) + const scenario2 = await client.create('scenario', {}) + const phase = await client.create('phase', { scenarios: [ { id: scenario1, @@ -740,7 +748,7 @@ await test('edge val', async (t) => { ], }) await db.save() - db.update('phase', phase, { + client.update('phase', phase, { scenarios: { add: [ { @@ -750,15 +758,15 @@ await test('edge val', async (t) => { ], }, }) - //await db.query('phase').include('scenarios.$sequence').get().inspect() + //await client.query2('phase').include('scenarios.$sequence').get().inspect() await db.save() - await db.update('phase', phase, { + await client.update('phase', phase, { scenarios: { delete: [scenario1], }, }) - //await db.query('phase').include('scenarios.$sequence').get().inspect() + //await client.query2('phase').include('scenarios.$sequence').get().inspect() }) await test('no mismatch', async (t) => { @@ -768,7 +776,7 @@ await test('no mismatch', async (t) => { await db.start({ clean: true }) t.after(() => db.stop(true)) - await db.setSchema({ + const schema = { types: { user: { props: { @@ -776,9 +784,10 @@ await test('no mismatch', async (t) => { }, }, }, - }) + } as const + const client = await db.setSchema(schema) - await db.create('user', { + await client.create('user', { name: 'xxx', }) @@ -787,15 +796,17 @@ await test('no mismatch', async (t) => { const db2 = new BasedDb({ path: t.tmp, }) - t.after(() => t.backup(db2)) - await db2.start() - await db2.create('user', { - name: 'xxx', + + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), }) - await db2.create('user', { + await client2.create('user', { + name: 'xxx', + }) + await client2.create('user', { name: 'xxx2', }) diff --git a/test/save/saveEdge.ts b/test/save/saveEdge.ts index 9c88a7bfb3..4ed16881e5 100644 --- a/test/save/saveEdge.ts +++ b/test/save/saveEdge.ts @@ -7,9 +7,9 @@ await test('save edge', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) - await db.setSchema({ + const client = await db.setSchema({ types: { user: { props: { @@ -23,8 +23,8 @@ await test('save edge', async (t) => { }, }) - const user1 = await db.create('user', {}) - const user2 = await db.create('user', { + const user1 = await client.create('user', {}) + const user2 = await client.create('user', { bestFriend: { id: user1, $uint8: 21, @@ -33,14 +33,14 @@ await test('save edge', async (t) => { await db.save() - await db.update('user', user2, { + await client.update('user', user2, { bestFriend: { id: user1, $uint8: 42, }, }) - deepEqual(await db.query('user', user2).include('**').get(), { + deepEqual(await client.query2('user', user2).include('**').get(), { id: 2, bestFriend: { id: 1, diff --git a/test/save/saveInterval.ts b/test/save/saveInterval.ts index e86b3fbaa9..a9deecd5c5 100644 --- a/test/save/saveInterval.ts +++ b/test/save/saveInterval.ts @@ -1,5 +1,5 @@ import { setTimeout } from 'node:timers/promises' -import { BasedDb } from '../../src/index.js' +import { BasedDb, DbClient, getDefaultHooks } from '../../src/index.js' import test from '../shared/test.js' import { deepEqual } from '../shared/assert.js' @@ -11,7 +11,7 @@ await test('saveInterval', async (t) => { await db.start({ clean: true }) t.after(() => db.destroy()) - await db.setSchema({ + const schema = { types: { user: { props: { @@ -20,23 +20,23 @@ await test('saveInterval', async (t) => { }, }, }, - }) + } as const + const client = await db.setSchema(schema) - db.create('user', { + client.create('user', { externalId: 'cool', potato: 'fries', }) - db.create('user', { + client.create('user', { externalId: 'cool2', potato: 'wedge', }) - await db.drain() - + await client.drain() await setTimeout(1e3) - const res1 = await db.query('user').get().toObject() + const res1 = await client.query2('user').get() await db.stop(true) @@ -45,9 +45,12 @@ await test('saveInterval', async (t) => { }) await db2.start() t.after(() => db2.destroy()) + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), + }) await db2.schemaIsSet() - const res2 = await db2.query('user').get().toObject() + const res2 = await client2.query2('user').get() deepEqual(res1, res2) }) diff --git a/test/save/saveRange.ts b/test/save/saveRange.ts index c7349e217b..ab75fdcfa5 100644 --- a/test/save/saveRange.ts +++ b/test/save/saveRange.ts @@ -1,28 +1,9 @@ -import { createHash } from 'node:crypto'; import { readdir } from 'node:fs/promises' -import { BasedDb, DbServer } from '../../src/index.js' +import { BasedDb, DbClient, getDefaultHooks } from '../../src/index.js' import test from '../shared/test.js' import { italy } from '../shared/examples.js' import { deepEqual, equal, notEqual } from '../shared/assert.js' -import { getBlockHash, getBlockStatuses } from '../../src/db-server/blocks.js' - -const getActiveBlocks = async (db: DbServer, tc: number): Promise> => (await getBlockStatuses(db, tc)).reduce((acc, cur, i) => { - if (cur) { - acc.push(i) - } - return acc -}, [] as Array) -const block2start = (block: number, capacity: number): number => block * capacity + 1; -const hashType = async (db: DbServer, typeName: string): Promise => { - const tc = db.schemaTypesParsed[typeName].id - const capacity = db.schemaTypesParsed[typeName].blockCapacity - const hash = createHash('sha256') - const bhs = await Promise.all((await getActiveBlocks(db, tc)).map((block) => getBlockHash(db, tc, block2start(block, capacity)))) - for (const bh of bhs) { - hash.update(bh) - } - return hash.digest('hex') -} +import { countDirtyBlocks, hashType } from '../shared/index.js' await test('save simple range', async (t) => { const db = new BasedDb({ @@ -33,7 +14,7 @@ await test('save simple range', async (t) => { return db.destroy() }) - await db.setSchema({ + const schema = { types: { user: { props: { @@ -44,7 +25,8 @@ await test('save simple range', async (t) => { }, }, }, - }) + } as const + const client = await db.setSchema(schema) const N = 800_000 const slen = 80 @@ -59,7 +41,7 @@ await test('save simple range', async (t) => { xn1 ^= xn2 } - db.create('user', { + client.create('user', { age: i, name: 'mr flop ' + i, email: 'abuse@disaster.co.uk', @@ -67,26 +49,23 @@ await test('save simple range', async (t) => { }) } - await db.drain() + await client.drain() const save1_start = performance.now() await db.save() const save1_end = performance.now() const firstHash = await hashType(db.server, 'user') - db.update('user', 1, { + client.update('user', 1, { age: 1337, }) - await db.drain() - deepEqual( - (await db.query('user').include('age').range(0, 1).get()).toObject(), - [ - { - id: 1, - age: 1337, - }, - ], - ) + await client.drain() + deepEqual(await client.query2('user').include('age').range(0, 1).get(), [ + { + id: 1, + age: 1337, + }, + ]) const save2_start = performance.now() await db.save() @@ -114,35 +93,34 @@ await test('save simple range', async (t) => { 'schema.bin', ]) - const load_start = performance.now() - const newDb = new BasedDb({ + //const load_start = performance.now() + const db2 = new BasedDb({ path: t.tmp, }) - await newDb.start() - t.after(() => newDb.destroy()) - const load_end = performance.now() + await db2.start() + t.after(() => db2.destroy()) + //const load_end = performance.now() + + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), + }) - const thirdHash = await hashType(newDb.server, 'user') + const thirdHash = await hashType(db2.server, 'user') notEqual(firstHash, secondHash) equal(secondHash, thirdHash) + deepEqual(await client2.query2('user').include('age').range(0, 1).get(), [ + { + id: 1, + age: 1337, + }, + ]) deepEqual( - (await newDb.query('user').include('age').range(0, 1).get()).toObject(), - [ - { - id: 1, - age: 1337, - }, - ], - ) - deepEqual( - ( - await newDb - .query('user') - .include('age') - .range(200000, 200000 + 1) - .get() - ).toObject(), + await client2 + .query2('user') + .include('age') + .range(200000, 200000 + 1) + .get(), [ { id: 200001, @@ -151,28 +129,23 @@ await test('save simple range', async (t) => { ], ) - deepEqual( - (await newDb.query('user').include('name').range(0, 2).get()).toObject(), - [ - { - id: 1, - name: 'mr flop 1', - }, - { - id: 2, - name: 'mr flop 2', - }, - ], - ) + deepEqual(await client2.query2('user').include('name').range(0, 2).get(), [ + { + id: 1, + name: 'mr flop 1', + }, + { + id: 2, + name: 'mr flop 2', + }, + ]) deepEqual( - ( - await newDb - .query('user') - .include('name') - .range(200_000, 200_000 + 2) - .get() - ).toObject(), + await client2 + .query2('user') + .include('name') + .range(200_000, 200_000 + 2) + .get(), [ { id: 200001, @@ -186,24 +159,14 @@ await test('save simple range', async (t) => { ) }) -async function countDirtyBlocks(server: DbServer) { - let n = 0 - - for (const t of Object.keys(server.schemaTypesParsedById)) { - n += (await getBlockStatuses(server, Number(t))).reduce((acc, cur) => acc + ~~!!(cur & 0x4), 0) - } - - return n -} - await test('reference changes', async (t) => { const db = new BasedDb({ path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) - await db.setSchema({ + const client = await db.setSchema({ types: { user: { props: { @@ -222,45 +185,65 @@ await test('reference changes', async (t) => { }) const users = Array.from({ length: 3 }, (_, k) => - db.create('user', { + client.create('user', { name: 'mr flop ' + k, }), ) - await db.drain() - equal(await countDirtyBlocks(db.server), 1, 'creating new users creates a dirty range') + await client.drain() + equal( + await countDirtyBlocks(db.server), + 1, + 'creating new users creates a dirty range', + ) - db.create('doc', { + client.create('doc', { title: 'The Wonders of AI', creator: users[0], }) - await db.drain() - equal(await countDirtyBlocks(db.server), 2, 'creating nodes in two types makes both dirty') + await client.drain() + equal( + await countDirtyBlocks(db.server), + 2, + 'creating nodes in two types makes both dirty', + ) await db.save() equal(await countDirtyBlocks(db.server), 0, 'saving clears dirt') - const doc2 = db.create('doc', { + const doc2 = client.create('doc', { title: 'The Slops of AI', }) - const doc3 = db.create('doc', { + const doc3 = client.create('doc', { title: 'The Hype of AI', }) - await db.drain() - equal(await countDirtyBlocks(db.server), 1, 'creating docs makes the range dirty') + await client.drain() + equal( + await countDirtyBlocks(db.server), + 1, + 'creating docs makes the range dirty', + ) await db.save() equal(await countDirtyBlocks(db.server), 0, 'saving clears dirt') // Link user -> doc - db.update('user', users[1], { docs: [doc2] }) - await db.drain() - equal(await countDirtyBlocks(db.server), 2, 'Linking a user to doc makes both dirty') + client.update('user', users[1], { docs: [doc2] }) + await client.drain() + equal( + await countDirtyBlocks(db.server), + 2, + 'Linking a user to doc makes both dirty', + ) await db.save() equal(await countDirtyBlocks(db.server), 0, 'saving clears dirt') // Link doc -> user - db.update('doc', doc3, { creator: users[2] }) - await db.drain() - equal(await countDirtyBlocks(db.server), 2, 'Linking a doc to user makes both dirty') + client.update('doc', doc3, { creator: users[2] }) + await client.drain() + equal( + await countDirtyBlocks(db.server), + 2, + 'Linking a doc to user makes both dirty', + ) await db.save() equal(await countDirtyBlocks(db.server), 0, 'saving clears dirt') }) @@ -270,9 +253,9 @@ await test('ref block moves', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) - await db.setSchema({ + const client = await db.setSchema({ types: { a: { props: { @@ -289,22 +272,22 @@ await test('ref block moves', async (t) => { }, }) - const a1 = await db.create('a', { x: 1 }) - const b1 = await db.create('b', { y: 1, aref: a1 }) + const a1 = await client.create('a', { x: 1 }) + const b1 = await client.create('b', { y: 1, aref: a1 }) for (let i = 0; i < 100_000; i++) { - db.create('a', { x: i % 256 }) - db.create('b', { y: i % 256 }) + client.create('a', { x: i % 256 }) + client.create('b', { y: i % 256 }) } - await db.drain() + await client.drain() for (let i = 0; i < 100_000; i++) { - db.delete('a', i + 2) - db.delete('b', i + 2) + client.delete('a', i + 2) + client.delete('b', i + 2) } - const an = await db.create('a', { x: 2 }) - const bn = await db.create('b', { y: 2, aref: an }) + const an = await client.create('a', { x: 2 }) + const bn = await client.create('b', { y: 2, aref: an }) await db.save() - await db.update('a', a1, { bref: bn }) + await client.update('a', a1, { bref: bn }) // t.backup will continue the test from here }) @@ -313,9 +296,9 @@ await test('ref removal', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) - await db.setSchema({ + const client = await db.setSchema({ types: { a: { props: { @@ -333,12 +316,12 @@ await test('ref removal', async (t) => { }) for (let i = 0; i < 100_000; i++) { - const a = db.create('a', { x: i % 256 }) - db.create('b', { y: 255 - (i % 256), aref: a }) + const a = client.create('a', { x: i % 256 }) + client.create('b', { y: 255 - (i % 256), aref: a }) } await db.save() for (let i = 0; i < 100_000; i++) { - db.update('a', i + 1, { bref: null }) + client.update('a', i + 1, { bref: null }) } // t.backup will continue the test from here @@ -349,9 +332,9 @@ await test('refs removal with delete', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) - await db.setSchema({ + const client = await db.setSchema({ types: { a: { props: { @@ -368,12 +351,12 @@ await test('refs removal with delete', async (t) => { }, }) - const a = db.create('a', { x: 13 }) + const a = client.create('a', { x: 13 }) for (let i = 0; i < 10; i++) { - db.create('b', { y: 255 - (i % 256), aref: a }) + client.create('b', { y: 255 - (i % 256), aref: a }) } await db.save() - db.delete('a', a) + client.delete('a', a) }) await test('large block gap', async (t) => { @@ -381,9 +364,9 @@ await test('large block gap', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) - await db.setSchema({ + const client = await db.setSchema({ types: { b: { blockCapacity: 10_000, @@ -394,11 +377,11 @@ await test('large block gap', async (t) => { }, }) - db.create('b', { + client.create('b', { y: 10, }) for (let i = 268435456; i < 268468224; i++) { - db.create( + client.create( 'b', { id: i, @@ -408,5 +391,5 @@ await test('large block gap', async (t) => { ) } - await db.drain() + await client.drain() }) diff --git a/test/scenarios/e-commerce.ts b/test/scenarios/e-commerce.ts index 19a3a1da6a..425f33876c 100644 --- a/test/scenarios/e-commerce.ts +++ b/test/scenarios/e-commerce.ts @@ -1,4 +1,4 @@ -import { errors } from '../../src/db-client/modify/error.js' +import { errors } from '../../src/db-client/_modify/error.js' import { BasedDb } from '../../src/index.js' import { throws, equal, isSorted } from '../shared/assert.js' import test from '../shared/test.js' @@ -34,9 +34,9 @@ await test('E-commerce Simulation', async (t) => { clearInterval(intervalId) }) - t.after(async () => t.backup(db)) + t.after(async () => t.backup(db.server)) - await db.setSchema({ + const client = await db.setSchema({ locales: { en: {}, de: {} }, // Add locales for text fields types: { user: { @@ -135,7 +135,7 @@ await test('E-commerce Simulation', async (t) => { const userIdsArr: any[] = [] for (let i = 0; i < initialCategories; i++) { - const catId = db.create('category', { + const catId = client.create('category', { name: `Category ${i}`, description: { en: `Description for category ${i}` }, }) @@ -145,7 +145,7 @@ await test('E-commerce Simulation', async (t) => { } for (let i = 0; i < initialUsers; i++) { - const userId = db.create('user', { + const userId = client.create('user', { name: `User ${i}`, email: `user${i}@example.com`, lastLogin: Math.max( @@ -161,7 +161,7 @@ await test('E-commerce Simulation', async (t) => { for (let i = 0; i < initialProducts; i++) { const category = getRandom(categoryIds) if (category) { - const prodId = db.create('product', { + const prodId = client.create('product', { name: `Product ${i} ${randomString(5)}`, description: { en: `This is product ${i}. ${randomString(50)}`, @@ -177,7 +177,7 @@ await test('E-commerce Simulation', async (t) => { totalItemsCreated++ } } - await db.drain() + await client.drain() // --- Simulation Loop --- let totalAliasUpdate = 0 @@ -192,7 +192,7 @@ await test('E-commerce Simulation', async (t) => { const entityType = Math.random() if (entityType < 0.1 && categoryIds < 500) { // Create Category - const catId = await db.create('category', { + const catId = await client.create('category', { name: `New Category ${totalItemsCreated}`, description: { en: `Dynamic category ${totalItemsCreated}` }, }) @@ -201,7 +201,7 @@ await test('E-commerce Simulation', async (t) => { totalItemsCreated++ } else if (entityType < 0.4 && userIds < 10000) { // Create User - const userId = await db.create('user', { + const userId = await client.create('user', { name: `User ${totalItemsCreated}`, email: `user${totalItemsCreated}@example.com`, }) @@ -212,7 +212,7 @@ await test('E-commerce Simulation', async (t) => { // Create Product const category = getRandom(categoryIds) if (category) { - const prodId = await db.create('product', { + const prodId = await client.create('product', { name: `Product ${totalItemsCreated} ${randomString(5)}`, description: { en: `Desc ${totalItemsCreated}` }, price: randomPrice(), @@ -229,7 +229,7 @@ await test('E-commerce Simulation', async (t) => { const user = getRandom(userIds) const product = getRandom(productIds) if (user && product) { - const reviewId = await db.create('review', { + const reviewId = await client.create('review', { user, // product, rating: (Math.floor(Math.random() * 5) + 1) as 1 | 2 | 3 | 4 | 5, @@ -281,8 +281,7 @@ await test('E-commerce Simulation', async (t) => { // Update User (Name/Email via Upsert) const oldEmail = `user${getRandom(userIds)}@example.com` if (oldEmail) { - await db.upsert('user', { - email: oldEmail, // Find by alias + await client.upsert('user', { email: oldEmail }, { name: `Updated Name ${randomString(4)}`, lastLogin: Date.now(), }) @@ -308,21 +307,21 @@ await test('E-commerce Simulation', async (t) => { const idx = Math.floor(Math.random() * productIdsArr.length) const productId = productIdsArr[idx] if (productId) { - await db.delete('product', productId).catch(catchNotExists) + await client.delete('product', productId).catch(catchNotExists) productIdsArr.splice(idx, 1) } } else if (entityType < 0.6 && userIdsArr.length > 50) { const idx = Math.floor(Math.random() * userIdsArr.length) const userId = userIdsArr[idx] if (userId) { - await db.delete('user', userId).catch(catchNotExists) + await client.delete('user', userId).catch(catchNotExists) userIdsArr.splice(idx, 1) } } else if (reviewIdsArr.length > 10) { const idx = Math.floor(Math.random() * reviewIdsArr.length) const reviewId = reviewIdsArr[idx] if (reviewId) { - await db.delete('review', reviewId).catch(catchNotExists) + await client.delete('review', reviewId).catch(catchNotExists) reviewIdsArr.splice(idx, 1) } } @@ -332,7 +331,7 @@ await test('E-commerce Simulation', async (t) => { const queryType = Math.random() if (queryType < 0.1) { isSorted( - await db.query('user').sort('lastLogin', 'asc').get(), + await client.query2('user').sort('lastLogin', 'asc').get(), 'lastLogin', 'asc', ) @@ -341,7 +340,7 @@ await test('E-commerce Simulation', async (t) => { const categoryId = getRandom(categoryIds) if (categoryId) { await db - .query('product') + .query2('product') .filter('category', '=', categoryId) .sort('price', Math.random() > 0.5 ? 'asc' : 'desc') .include('name', 'price', 'stock') @@ -354,7 +353,7 @@ await test('E-commerce Simulation', async (t) => { if (userId) { // console.log({ userId }) await db - .query('user', userId) + .query2('user', userId) .include( 'name', 'viewedProducts.name', @@ -368,7 +367,7 @@ await test('E-commerce Simulation', async (t) => { const productId = getRandom(productIds) if (productId) { await db - .query('review') + .query2('review') .filter('product', '=', productId) .sort('rating', 'desc') .include('rating', 'comment', 'user.name') @@ -390,7 +389,7 @@ await test('E-commerce Simulation', async (t) => { } if (searchTerm) { await db - .query('product') + .query2('product') .search(searchTerm, 'name', 'description') .include('name', 'price') .range(0, 5) @@ -400,7 +399,7 @@ await test('E-commerce Simulation', async (t) => { // Get user by email (alias) const email = `user${getRandom(userIds)}@example.com` if (email) { - await db.query('user', { email }).get() + await client.query2('user', { email }).get() } } } @@ -410,13 +409,13 @@ await test('E-commerce Simulation', async (t) => { // Occasionally try invalid operations await throws( async () => - db.create('product', { name: 'Too expensive', price: 20000 }), + client.create('product', { name: 'Too expensive', price: 20000 }), false, 'Validation: Price too high', ) await throws( async () => - db.create('review', { + client.create('review', { rating: 6, user: getRandom(userIds), product: getRandom(productIds), @@ -484,7 +483,7 @@ await test('E-commerce Simulation', async (t) => { await wait(500) const finalProductCount = ( - await db.query('product').range(0, 10_000_000).get() + await client.query2('product').range(0, 10_000_000).get() ).length equal( diff --git a/test/scenarios/northwind.ts b/test/scenarios/northwind.ts index 4429d24843..de0458e27a 100644 --- a/test/scenarios/northwind.ts +++ b/test/scenarios/northwind.ts @@ -1,4 +1,3 @@ -import { BasedDb } from '../../src/index.js' // import { mermaid } from '@based/schema-diagram' import { deepCopy } from '../../src/utils/index.js' import test from '../shared/test.js' @@ -7,15 +6,10 @@ import { deepEqual } from '../shared/assert.js' import type { SchemaIn } from '../../src/schema/index.js' await test('Basic SQL', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await createNorthwindDb(db) + const db = await createNorthwindDb(t) // 1. Retrieve all columns in the Region table. - const r1 = await db.query('region').include('*').get() + const r1 = await db.query2('region').include('*').get() deepEqual(r1, [ { id: 1, @@ -36,7 +30,7 @@ await test('Basic SQL', async (t) => { ]) // 2. Select the FirstName and LastName columns from the Employees table. - const r2 = await db.query('employees').include('firstName', 'lastName').get() + const r2 = await db.query2('employees').include('firstName', 'lastName').get() deepEqual(r2, [ { id: 1, lastName: 'Davolio', firstName: 'Nancy' }, { id: 2, lastName: 'Fuller', firstName: 'Andrew' }, @@ -52,7 +46,7 @@ await test('Basic SQL', async (t) => { // 3. Select the FirstName and LastName columns from the Employees table. // Sort by LastName. const r3 = await db - .query('employees') + .query2('employees') .include('firstName', 'lastName') .sort('lastName') .get() @@ -72,7 +66,7 @@ await test('Basic SQL', async (t) => { // expensive to cheapest. // Show OrderId, OrderDate, ShippedDate, CustomerId, and Freight. const r4 = await db - .query('orders') + .query2('orders') .include('orderDate', 'shippedDate', 'customer.id', 'freight') .sort('freight', 'desc') .range(0, 3) @@ -103,7 +97,7 @@ await test('Basic SQL', async (t) => { // 5. Create a report showing the title and the first and last name of all sales representatives. const r5 = await db - .query('employees') + .query2('employees') .include('title', 'firstName', 'lastName') .filter('title', '=', 'Sales Representative') .get() @@ -152,7 +146,7 @@ await test('Basic SQL', async (t) => { // 6a. Create a report showing the first and last names of all employees who have a region specified. const r6a = await db - .query('employees') + .query2('employees') .include('firstName', 'lastName', 'region') .filter('region', '!=', '') .get() @@ -170,7 +164,7 @@ await test('Basic SQL', async (t) => { // 6b. Create a report showing the first and last names of all employees who don't have a region specified. const r6b = await db - .query('employees') + .query2('employees') .include('firstName', 'lastName', 'region') .filter('region', '=', '') .get() @@ -189,7 +183,7 @@ await test('Basic SQL', async (t) => { // with a letter in the last half of the alphabet. // Sort by LastName in descending order. // TODO - // const r7 = await db.query('employees').include('firstName', 'lastName').filter('lastName', 'startsWith', ?? + // const r7 = await db.query2('employees').include('firstName', 'lastName').filter('lastName', 'startsWith', ?? // 8. Create a report showing the title of courtesy and the first and last name of all employees // whose title of courtesy begins with "M". @@ -199,7 +193,7 @@ await test('Basic SQL', async (t) => { // Seattle or Redmond. // TODO Impossible to OR const r9 = await db - .query('employees') + .query2('employees') .include('firstName', 'lastName', 'title', 'city', 'region') .filter('title', 'includes', 'Sales') .filter('region', '!=', '') @@ -240,7 +234,7 @@ await test('Basic SQL', async (t) => { // customers in Mexico or in any city in Spain except Madrid. // TODO Impossible const r10 = await db - .query('customers') + .query2('customers') .include('companyName', 'contactTitle', 'city', 'country') //.filter('country', 'includes', ['Mexico', 'Spain']) .filter('country', 'includes', ['Mexico', 'Spain']) @@ -309,7 +303,7 @@ await test('Basic SQL', async (t) => { // 12. Find the Total Number of Units Ordered of Product ID 3 const r12 = await db - .query('orderDetails') + .query2('orderDetails') .filter('product', '=', 3) .count() .get() @@ -320,7 +314,7 @@ await test('Basic SQL', async (t) => { ) // 13. Retrieve the number of employees in each city - const r13 = await db.query('employees').groupBy('city').count().get() + const r13 = await db.query2('employees').groupBy('city').count().get() deepEqual( r13, { @@ -339,7 +333,7 @@ await test('Basic SQL', async (t) => { // 15. Find the Companies (the CompanyName) that placed orders in 1997 const r15 = await db - .query('orders') + .query2('orders') .include('orderDate', 'customer.companyName') .filter('orderDate', '..', [ new Date('1997'), @@ -379,7 +373,7 @@ await test('Basic SQL', async (t) => { // Sort by Company Name. // TODO filter by field? const r17 = await db - .query('orders') + .query2('orders') .include( 'customer.companyName', 'employee.firstName', @@ -430,7 +424,10 @@ await test('Basic SQL', async (t) => { // SELECT * FROM Customers // WHERE country='Mexico'; - const r19 = await db.query('customers').filter('country', '=', 'Mexico').get() + const r19 = await db + .query2('customers') + .filter('country', '=', 'Mexico') + .get() deepEqual( r19, [ @@ -510,7 +507,7 @@ await test('Basic SQL', async (t) => { // SELECT * FROM products ORDER BY price; const r20 = await db - .query('products') + .query2('products') .sort('unitPrice', 'desc') .range(0, 4) .get() @@ -563,7 +560,7 @@ await test('Basic SQL', async (t) => { // SELECT * FROM products ORDER BY price; const r21 = await db - .query('products') + .query2('products') .sort('unitPrice', 'desc') .range(0, 3) .get() @@ -606,7 +603,7 @@ await test('Basic SQL', async (t) => { // SELECT * FROM customers WHERE country IN ('Germany', 'France', 'UK'); const r22 = await db - .query('customers') + .query2('customers') .filter('country', '=', ['Germany', 'France', 'UK']) .range(0, 3) .get() @@ -661,7 +658,7 @@ await test('Basic SQL', async (t) => { // SELECT * FROM products WHERE unitPrice BETWEEN 10 AND 20 ORDER BY price; const r23 = await db - .query('products') + .query2('products') .filter('unitPrice', '..', [10, 20]) .sort('unitPrice', 'desc') .get() @@ -923,9 +920,9 @@ await test('Basic SQL', async (t) => { ) // SELECT customer_id AS ID, company_name AS customer FROM customers; - const r24 = ( - await db.query('customers').include('companyName').get().toObject() - ).map((r) => ({ id: r.id, customer: r.companyName })) + const r24 = (await db.query2('customers').include('companyName').get()).map( + (r) => ({ id: r.id, customer: r.companyName }), + ) deepEqual( r24, [ @@ -1031,17 +1028,17 @@ await test('Basic SQL', async (t) => { // SELECT 'supplier', contact_name, city, country // FROM Suppliers const r25unionA = await db - .query('customers') + .query2('customers') .include('contactName', 'city', 'country') .range(0, 2) .get() - .toObject() + const r25unionB = await db - .query('suppliers') + .query2('suppliers') .include('contactName', 'city', 'country') .range(0, 2) .get() - .toObject() + const r25union = [ ...r25unionA.map((r) => ({ type: 'customer', ...r })), ...r25unionB.map((r) => ({ type: 'supplier', ...r })), @@ -1089,17 +1086,17 @@ await test('Basic SQL', async (t) => { // WHERE Country='Germany' // ORDER BY City; const r26unionAllA = await db - .query('customers') + .query2('customers') .include('city', 'country') .range(0, 3) .get() - .toObject() + const r26unionAllB = await db - .query('suppliers') + .query2('suppliers') .include('city', 'country') .range(0, 3) .get() - .toObject() + const r26unionAll = [ ...r26unionAllA.map(({ city, country }) => ({ city, country })), ...r26unionAllB.map(({ city, country }) => ({ city, country })), @@ -1119,12 +1116,7 @@ await test('Basic SQL', async (t) => { }) await test('insert and update', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await createNorthwindDb(db) + const db =await createNorthwindDb(t) // INSERT INTO customers (company_name, contact_name, address, city, postal_code, country) // VALUES ('Cardinal', 'Tom B. Erichsen', 'Skagen 21', 'Stavanger', '4006', 'Norway'); @@ -1139,7 +1131,7 @@ await test('insert and update', async (t) => { deepEqual( await db - .query('customers') + .query2('customers') .include('*') .filter('companyName', '=', 'Cardinal') .get(), @@ -1170,7 +1162,7 @@ await test('insert and update', async (t) => { deepEqual( await db - .query('customers') + .query2('customers') .include('*') .filter('companyName', '=', 'Cardinal') .get(), @@ -1197,17 +1189,16 @@ await test('insert and update', async (t) => { 'customers', ( await db - .query('customers') + .query2('customers') .include('id') .filter('companyName', '=', 'Cardinal') .get() - .toObject() )[0].id, ) deepEqual( await db - .query('customers') + .query2('customers') .include('*') .filter('companyName', '=', 'Cardinal') .get(), @@ -1216,19 +1207,14 @@ await test('insert and update', async (t) => { }) await test('inner join', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await createNorthwindDb(db) + const db = await createNorthwindDb(t) // SELECT orders.order_id, customers.company_name, orders.order_date // FROM orders // INNER JOIN customers ON orders.customer_id = customers.customer_id; deepEqual( await db - .query('orders') + .query2('orders') .include('customer.companyName', 'orderDate') .range(0, 10) .get(), @@ -1288,22 +1274,17 @@ await test('inner join', async (t) => { }) await test('left join', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await createNorthwindDb(db) + const db = await createNorthwindDb(t) // SELECT customers.company_name, orders.order_id // FROM customers // LEFT JOIN orders // ON customers.customer_id = orders.customer_id // ORDER BY customers.company_name; - //console.log(await db.query('customers').include('companyName', (q) => q('orders').filter('customerId' '=' ??) + //console.log(await db.query2('customers').include('companyName', (q) => q('orders').filter('customerId' '=' ??) deepEqual( await db - .query('customers') + .query2('customers') .include('companyName', (q) => q('orders').include('id')) .sort('companyName') .range(0, 5) @@ -1388,22 +1369,17 @@ await test.skip('right join', async (t) => { }) await test('full join', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await createNorthwindDb(db) + const db = await createNorthwindDb(t) db.delete( 'customers', - (await db.query('customers', { customerId: 'WELLI' }).get()).id!, + (await db.query2('customers', { customerId: 'WELLI' }).get()).id!, ) // Delete orders by WANDK - const wandk = await db.query('customers', { customerId: 'WANDK' }).get() + const wandk = await db.query2('customers', { customerId: 'WANDK' }).get() const wandkOrders = await db - .query('orders') + .query2('orders') .filter('customer', '=', wandk) .get() for (const order of wandkOrders) { @@ -1415,12 +1391,9 @@ await test('full join', async (t) => { // FULL OUTER JOIN orders ON customers.customer_id=orders.customer_id // ORDER BY customers.company_name; - const customers = await db.query('customers').get().toObject() - const orders = await db - .query('orders') - .include('customer.id') - .get() - .toObject() + const customers = await db.query2('customers').get() + const orders = await db.query2('orders').include('customer.id').get() + const result: any[] = [] // LEFT JOIN: Customers with Orders @@ -1470,12 +1443,7 @@ await test('full join', async (t) => { }) await test('self join', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await createNorthwindDb(db) + const db = await createNorthwindDb(t) // SELECT A.company_name AS CustomerName1, B.company_name AS CustomerName2, A.City // FROM customers A, customers B @@ -1485,10 +1453,9 @@ await test('self join', async (t) => { const result: any[] = [] ;( (await db - .query('customers') + .query2('customers') .include('customerId', 'companyName', 'city') - .get() - .toObject()) as { + .get()) as { id: number customerId: string city: string @@ -1512,17 +1479,12 @@ await test('self join', async (t) => { }) await test('aggregates', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - await createNorthwindDb(db) + const db = await createNorthwindDb(t) // min // SELECT MIN(unit_price) // FROM products; - deepEqual(await db.query('products').min('unitPrice').get(), { + deepEqual(await db.query2('products').min('unitPrice').get(), { unitPrice: { min: 2.5 }, }) @@ -1531,7 +1493,7 @@ await test('aggregates', async (t) => { // FROM products // GROUP BY category_id; deepEqual( - await db.query('products').min('unitPrice').groupBy('category').get(), + await db.query2('products').min('unitPrice').groupBy('category').get(), { '1': { unitPrice: { min: 4.5 } }, '2': { unitPrice: { min: 10 } }, @@ -1547,20 +1509,20 @@ await test('aggregates', async (t) => { // max // SELECT MAX(unit_price) // FROM products; - deepEqual(await db.query('products').max('unitPrice').get(), { + deepEqual(await db.query2('products').max('unitPrice').get(), { unitPrice: { max: 263.5 }, }) // count // SELECT COUNT(*) // FROM products; - deepEqual(await db.query('products').count().get(), { count: 77 }) + deepEqual(await db.query2('products').count().get(), { count: 77 }) // count group by // SELECT COUNT(*) AS [number of products], category_id // FROM products // GROUP BY category_id; - deepEqual(await db.query('products').count().groupBy('category').get(), { + deepEqual(await db.query2('products').count().groupBy('category').get(), { '1': { count: 12 }, '2': { count: 12 }, '3': { count: 13 }, @@ -1574,7 +1536,7 @@ await test('aggregates', async (t) => { // sum // SELECT SUM(quantity) // FROM order_details; - deepEqual(await db.query('orderDetails').sum('quantity').get(), { + deepEqual(await db.query2('orderDetails').sum('quantity').get(), { quantity: { sum: 51317 }, }) @@ -1584,7 +1546,7 @@ await test('aggregates', async (t) => { // WHERE product_id = 11; deepEqual( await db - .query('orderDetails') + .query2('orderDetails') .sum('quantity') .filter('product.id', '=', 11) .get(), @@ -1597,7 +1559,7 @@ await test('aggregates', async (t) => { // GROUP BY order_id; deepEqual( await db - .query('orderDetails') + .query2('orderDetails') .sum('quantity') .groupBy('order') .range(0, 10) @@ -1619,7 +1581,7 @@ await test('aggregates', async (t) => { // avg // SELECT AVG(unit_price) // FROM products; - deepEqual(await db.query('products').avg('unitPrice').get(), { + deepEqual(await db.query2('products').avg('unitPrice').get(), { unitPrice: { average: 28.833896103896105 }, }) @@ -1629,7 +1591,7 @@ await test('aggregates', async (t) => { // WHERE category_id = 1; deepEqual( await db - .query('products') + .query2('products') .avg('unitPrice') .filter('category.id', '=', 1) .get(), @@ -1641,7 +1603,7 @@ await test('aggregates', async (t) => { // FROM products // GROUP BY category_id; deepEqual( - await db.query('products').avg('unitPrice').groupBy('category').get(), + await db.query2('products').avg('unitPrice').groupBy('category').get(), { '1': { unitPrice: { average: 37.979166666666664 } }, '2': { unitPrice: { average: 22.854166666666668 } }, @@ -1656,12 +1618,6 @@ await test('aggregates', async (t) => { }) await test('hooks', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - const schema = deepCopy(defaultSchema) schema.types.orderDetails.props['discountAmount'] = 'number' schema.types.orderDetails['hooks'] = { @@ -1676,10 +1632,10 @@ await test('hooks', async (t) => { } }, } - await createNorthwindDb(db, schema as SchemaIn) + const db = await createNorthwindDb(t, schema as SchemaIn) // SELECT Avg(unit_price * discount) AS [Average discount] FROM [order_details]; - deepEqual(await db.query('orderDetails').avg('discountAmount').get(), { + deepEqual(await db.query2('orderDetails').avg('discountAmount').get(), { discountAmount: { average: 1.4448364269141538 }, }) }) diff --git a/test/scenarios/nycTaxi.ts b/test/scenarios/nycTaxi.ts index 98201b710e..3523ac7b12 100644 --- a/test/scenarios/nycTaxi.ts +++ b/test/scenarios/nycTaxi.ts @@ -1,11 +1,10 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' import { join } from 'path' import { readdir, readFile } from 'node:fs/promises' import { promisify } from 'node:util' import { gunzip as _gunzip } from 'zlib' import { Sema } from 'async-sema' -import { logMemoryUsage } from '../shared/index.js' +import { logMemoryUsage, testDb } from '../shared/index.js' const gunzip = promisify(_gunzip) @@ -342,16 +341,7 @@ class Loading { } await test.skip('taxi', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - // FIXME - //t.after(() => t.backup(db)) - t.after(() => db.stop()) - - await db.setSchema({ + const db = await testDb(t, { types: { zone: { props: { @@ -431,7 +421,7 @@ await test.skip('taxi', async (t) => { }, }, }, - }) + }, { noBackup: true }) for (let i = 0; i < taxiZoneLookup.length; i += 4) { db.create('zone', { @@ -448,7 +438,7 @@ await test.skip('taxi', async (t) => { name: rates[i + 1], }) } - //await db.query('rate').include('*').get().inspect() + //await db.query2('rate').include('*').get().inspect() db.create('vendor', { vendorId: '1', @@ -475,27 +465,25 @@ await test.skip('taxi', async (t) => { clamp(Math.round(isNaN(x) ? 0 : x), -2147483648, 2147483647) const createTrip = async (trip: any) => { - // TODO toObject() shouldn't be needed const { id: vendor = null } = await db - .query('vendor', { vendorId: trip.VendorID }) + .query2('vendor', { vendorId: trip.VendorID }) .include('id') .get() - .toObject() + const { id: rate = null } = await db - .query('rate', { rateCodeId: trip.RatecodeID ?? '99' }) + .query2('rate', { rateCodeId: trip.RatecodeID ?? '99' }) .include('id') .get() - .toObject() + const { id: pickupLoc = null } = await db - .query('zone', { locationId: trip.PULocationID ?? '264' }) + .query2('zone', { locationId: trip.PULocationID ?? '264' }) .include('id') .get() - .toObject() + const { id: dropoffLoc = null } = await db - .query('zone', { locationId: trip.DOLocationID ?? '264' }) + .query2('zone', { locationId: trip.DOLocationID ?? '264' }) .include('id') .get() - .toObject() const pickup = new Date(trip.tpep_pickup_datetime) const dropoff = new Date(trip.tpep_dropoff_datetime) @@ -549,16 +537,16 @@ await test.skip('taxi', async (t) => { process.stderr.write('\n') await db.drain() - await db.query('zone').include('borough').get().inspect() - // await db.query('zone').include('*').get().inspect() - // await db.query('vendor').include('trips').get().inspect() + await db.query2('zone').include('borough').get().inspect() + // await db.query2('zone').include('*').get().inspect() + // await db.query2('vendor').include('trips').get().inspect() // await db - // .query('trip') + // .query2('trip') // .include('pickupLoc', 'dropoffLoc', 'paymentType') // .get() // .inspect() await db - .query('trip') + .query2('trip') .include( 'pickup', 'tripDistance', @@ -567,12 +555,12 @@ await test.skip('taxi', async (t) => { ) .get() .inspect() - // await db.query('trip').count().groupBy('dropoffLoc.borough').get().inspect() // TBD: nested prop in groupBy - await db.query('trip').count().groupBy('dropoffLoc').get().inspect() - await db.query('trip').count().groupBy('paymentType').get().inspect() + // await db.query2('trip').count().groupBy('dropoffLoc.borough').get().inspect() // TBD: nested prop in groupBy + await db.query2('trip').count().groupBy('dropoffLoc').get().inspect() + await db.query2('trip').count().groupBy('paymentType').get().inspect() console.log('trip count') - await db.query('trip').count().get().inspect() - // await db.query('vendor').sum('trips').get().inspect() BUG: requires validation or // TBD: group by all + await db.query2('trip').count().get().inspect() + // await db.query2('vendor').sum('trips').get().inspect() BUG: requires validation or // TBD: group by all // const makeDays = (startYear: number, endYear: number) => { // const days: Date[] = [] @@ -587,7 +575,7 @@ await test.skip('taxi', async (t) => { // const res = await Promise.all( // days.map((day) => // db - // .query('trip') + // .query2('trip') // .filter('pickup', '>=', day) // .filter('dropoff', '<=', new Date(day).setUTCHours(23, 59, 59, 0)) // //.count() @@ -596,12 +584,12 @@ await test.skip('taxi', async (t) => { // .get(), // ), // ) - // res.map((r) => r.toObject()) + // res.map((r) => r) // Yearly/Monthly/Daily revenue console.log('Yearly/Monthly/Daily revenue') await db - .query('trip') + .query2('trip') //.filter('pickupYear', '>=', new Date('2022-01-01')) //.filter('pickupYear', '<=', new Date('2024-05-31')) .filter('pickupYear', '>=', 2022) @@ -614,7 +602,7 @@ await test.skip('taxi', async (t) => { // Revenue Breakdown by Vendor console.log('Revenue Breakdown by Vendor') await db - .query('vendor') + .query2('vendor') .include('name', (select) => { select('trips') .groupBy('pickup', { step: 'year', timeZone: 'America/New_York' }) @@ -627,7 +615,7 @@ await test.skip('taxi', async (t) => { // Find the top 10 trips per day with the highest tip per mile. // TBD: need callback to combine functions tipAmount/tripDistance console.log('Top tippers') await db - .query('trip') + .query2('trip') // .include('pickup', 'fees.tipAmount', 'tripDistance') .groupBy('pickup', { step: 'day', timeZone: 'America/New_York' }) .max('fees.tipAmount') @@ -640,20 +628,20 @@ await test.skip('taxi', async (t) => { // Rush hour utilization console.log('Rush hour utilization') const rh1 = await db - .query('trip') + .query2('trip') .filter('pickupHour', '>=', 7) .filter('pickupHour', '<=', 10) .or((t) => t.filter('pickupHour', '>=', 16).filter('pickupHour', '<=', 19)) .groupBy('pickup', { step: 'dow', timeZone: 'America/New_York' }) .count() .get() - .toObject() + const rh2 = await db - .query('trip') + .query2('trip') .groupBy('pickup', { step: 'dow', timeZone: 'America/New_York' }) .count() .get() - .toObject() + console.log( Object.keys(day2enum).reduce( (prev, key) => ( @@ -666,7 +654,7 @@ await test.skip('taxi', async (t) => { // Most popular routes await db - .query('trip') + .query2('trip') .groupBy('pickupDropoffLocs') .count() .sort('pickupDropoffLocs') // TBD: has no effect yethas no effect yet @@ -677,13 +665,13 @@ await test.skip('taxi', async (t) => { // Avg rush hour speed between zones console.log('Avg rush hour speed between zones') await db - .query('trip') + .query2('trip') .filter('pickupHour', '>=', 7) .filter('pickupHour', '<=', 10) .or((t) => t.filter('pickupHour', '>=', 16).filter('pickupHour', '<=', 19)) .groupBy('pickup', { step: 'dow', timeZone: 'America/New_York' }) //.groupBy('pickupDropoffLocs') - .harmonicMean('avgSpeed') + .hmean('avgSpeed') .get() .inspect() diff --git a/test/scenarios/vote.ts b/test/scenarios/vote.ts index b2656e0234..a697cbc7cd 100644 --- a/test/scenarios/vote.ts +++ b/test/scenarios/vote.ts @@ -13,7 +13,7 @@ await test('schema with many uint8 fields', async (t) => { // maxModifySize: 1000 * 1000 * 1000, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) // t.after(() => db.destroy()) const maxPaymentsPerHub = 10000 @@ -152,7 +152,7 @@ await test('schema with many uint8 fields', async (t) => { const queueJob = async () => { const confirmation = async () => { const rdyForConfirmationToken = await db - .query('round', final) + .query2('round', final) .include((select) => { const t = select('payments') t.filter('status', '=', ['Requested']) @@ -160,7 +160,7 @@ await test('schema with many uint8 fields', async (t) => { t.include(['status']) }) .get() - const r = rdyForConfirmationToken.toObject().payments + const r = rdyForConfirmationToken.payments for (const payment of r) { db.update('payment', payment.id, { status: 'ReadyForConfirmationToken', @@ -171,7 +171,7 @@ await test('schema with many uint8 fields', async (t) => { const paymentIntent = async () => { const rdyForPaymentIntent = await db - .query('round', final) + .query2('round', final) .include((select) => { const t = select('payments') t.filter('status', '=', ['RequestedIntent']) @@ -180,7 +180,7 @@ await test('schema with many uint8 fields', async (t) => { }) .get() - const r = rdyForPaymentIntent.toObject().payments + const r = rdyForPaymentIntent.payments for (const payment of r) { db.update('payment', payment.id, { status: 'ReadyForPaymentIntent', @@ -300,7 +300,7 @@ await test('schema with many uint8 fields', async (t) => { const realIds = [...ids.keys()] const myThings = await db - .query('payment', realIds) + .query2('payment', realIds) .filter('status', '=', [ 'ReadyForConfirmationToken', 'ReadyForPaymentIntent', @@ -389,7 +389,7 @@ await test('schema with many uint8 fields', async (t) => { console.log( 'handled votes #', - (await db.query('vote').range(0, 1e6).get()).length, + (await db.query2('vote').range(0, 1e6).get()).length, ) await wait(100) diff --git a/test/scenarios/voteEdges.ts b/test/scenarios/voteEdges.ts index 43bc838339..3a9e2fca2e 100644 --- a/test/scenarios/voteEdges.ts +++ b/test/scenarios/voteEdges.ts @@ -129,6 +129,6 @@ await test('votesEdges', async (t) => { await db.drain() }, `Update random order id ${amount} votes, ${a}`) - await db.query('vote').get().inspect(1) - await db.query('round', final).include('*', '**').get().inspect(1) + await db.query2('vote').get().inspect(1) + await db.query2('round', final).include('*', '**').get().inspect(1) }) diff --git a/test/scenarios/voteLargeAmounts.perf.ts b/test/scenarios/voteLargeAmounts.perf.ts index ddfb741e65..be783d5c19 100644 --- a/test/scenarios/voteLargeAmounts.perf.ts +++ b/test/scenarios/voteLargeAmounts.perf.ts @@ -58,7 +58,7 @@ await test('schema with many uint8 fields', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) const voteCountrySchema: any = countrySchema @@ -128,20 +128,20 @@ await test('schema with many uint8 fields', async (t) => { const timeActions = async () => { //console.log('\n------ Status ------') - // await db.query('vote').count().get().inspect() - // await db.query('payment').count().get().inspect() + // await db.query2('vote').count().get().inspect() + // await db.query2('payment').count().get().inspect() const d = performance.now() await db.save() const tSave = performance.now() - d //console.log('took', tSave.toFixed(2), 'ms to save') - const cnt = await db.query('vote').count().get() + const cnt = await db.query2('vote').count().get() // TODO This crashes the test runner if it fails //assert(tSave < 1e3) // TODO better assert //assert(cnt.execTime < 5) //await db - // .query('payment') + // .query2('payment') // .include('id') // .filter('status', '=', 'Requested') // .get() @@ -151,16 +151,16 @@ await test('schema with many uint8 fields', async (t) => { // 'group by on all', // ( // await db - // .query('vote') + // .query2('vote') // .groupBy('fromCountry') // .sum(...s) // .get() // ).execTime.toFixed(2), // 'ms', //) - const n = cnt.toObject().count + const n = cnt.count const grp = await db - .query('vote') + .query2('vote') .groupBy('fromCountry') .sum(...s) .get() diff --git a/test/scenarios/voteStorage.ts b/test/scenarios/voteStorage.ts index 37765d1139..cd264d5d37 100644 --- a/test/scenarios/voteStorage.ts +++ b/test/scenarios/voteStorage.ts @@ -3,6 +3,7 @@ import test from '../shared/test.js' import { SchemaProp, SchemaType } from '../../src/schema/index.js' import { deepEqual } from '../shared/assert.js' import { inspect } from 'util' +import {testDb} from '../shared/index.js' const countrySchema: SchemaType = { props: { @@ -51,11 +52,11 @@ await test('vote including round', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) const voteCountrySchema: any = countrySchema - await db.setSchema({ + const client = await db.setSchema({ types: { payment: { fingerprint: 'alias', @@ -125,32 +126,26 @@ await test('vote including round', async (t) => { }, }, }) - const final = await db.create('round', {}) + const final = await client.create('round', {}) for (let i = 0; i < 3e5; i++) { - const payment = db.create('payment', { + const payment = client.create('payment', { fingerprint: `blablabla-${i}`, status: 'WebhookSuccess', round: final, }) - const vote = db.create('vote', { + const vote = client.create('vote', { fingerprint: `blablabla-vote-${i}`, payment, round: final, }) } await db.save() - console.log('set all items', await db.drain()) + console.log('set all items', await client.drain()) }) const testVotes = (opts: { votes: any; amount: number }) => { return test(`vote single ref test remove ${inspect(opts)}`, async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const client = await testDb(t, { types: { round: { votes: { @@ -172,24 +167,23 @@ const testVotes = (opts: { votes: any; amount: number }) => { let amount = opts.amount - const final = await db.create('round') + const final = await client.create('round', {}) for (let i = 0; i < amount; i++) { - db.create('vote', { + client.create('vote', { round: final, }) } - console.log(`Creating votes (${amount})ms`, await db.drain()) + console.log(`Creating votes (${amount})ms`, await client.drain()) console.log('Remove votes from final') - await db.update('round', final, { + await client.update('round', final, { votes: opts.votes, }) deepEqual( - (await db.query('round', final).include('votes').get().toObject()).votes - .length, + (await client.query2('round', final).include('votes').get()).votes.length, 0, 'clear refs', ) @@ -198,26 +192,22 @@ const testVotes = (opts: { votes: any; amount: number }) => { for (let i = 0; i < len; i++) { const randomId = amount === 1 ? 1 : Math.ceil(Math.random() * amount) deepEqual( - await db.query('vote', randomId).include('round').get(), + await client.query2('vote', randomId).include('round').get(), { id: randomId, round: null }, `clears refs on the other side ${randomId}`, ) } - const votes = await db - .query('vote') - .range(0, 1e6) - .include('id') - .get() - .toObject() + const votes = await client.query2('vote').range(0, 1e6).include('id').get() + let i = votes.length - 1 for (i = 0; i < votes.length; i++) { - db.delete('vote', votes[i].id) + client.delete('vote', votes[i].id) } console.log( 'Total db time removing all votes (refs in round)', - await db.drain(), + await client.drain(), ) }) } diff --git a/test/schema/parse/types.ts b/test/schema/parse/types.ts new file mode 100644 index 0000000000..d8f39f1e06 --- /dev/null +++ b/test/schema/parse/types.ts @@ -0,0 +1,34 @@ +import { + parseSchema, + type SchemaOut, + type SchemaReferences, +} from '../../../src/schema.js' +import { test } from '../../shared/index.js' + +await test('types', async () => { + const schemaOut: SchemaOut = parseSchema({ + hash: 0, + locales: { + nl: true, + }, + types: { + coolUser: { + props: { + name: 'string', + myObj: { + type: 'object', + props: { + dicky: 'string', + }, + }, + }, + }, + coolType: { + myUser: { + ref: 'coolUser', + prop: 'myType', + }, + }, + }, + }) +}) diff --git a/test/schema/props/validate.ts b/test/schema/props/validate.ts new file mode 100644 index 0000000000..8878d123b2 --- /dev/null +++ b/test/schema/props/validate.ts @@ -0,0 +1,66 @@ +import test from '../../shared/test.js' +import { + number, + uint8, + int8, + enum_ as enumProp, + boolean, +} from '../../../src/schema/defs/props/fixed.js' +import { AutoSizedUint8Array } from '../../../src/utils/AutoSizedUint8Array.js' + +const assertThrows = (fn: () => void, re: RegExp) => { + try { + fn() + } catch (e) { + if (re.test(e.message)) return + throw new Error(`Expected error matching ${re}, got "${e.message}"`) + } + throw new Error(`Expected error matching ${re}, but function did not throw`) +} + +await test('Fixed props validation: throws on invalid input', async (t) => { + const autoBuf = new AutoSizedUint8Array() + const writeBuf = new Uint8Array(10) + + // Number validation + // @ts-ignore + const numProp = new number({ type: 'number', min: 10, max: 20 }, ['test'], {}) + + assertThrows(() => numProp.pushValue(autoBuf, 5), /smaller than min/) + assertThrows(() => numProp.pushValue(autoBuf, 25), /larger than max/) + assertThrows(() => numProp.pushValue(autoBuf, 'string'), /Invalid type/) + + assertThrows(() => numProp.write(writeBuf, 5, 0), /smaller than min/) + assertThrows(() => numProp.write(writeBuf, 25, 0), /larger than max/) + assertThrows(() => numProp.write(writeBuf, 'string', 0), /Invalid type/) + + // Uint8 validation + // @ts-ignore + const u8Prop = new uint8({ type: 'uint8' }, ['test'], {}) + assertThrows(() => u8Prop.pushValue(autoBuf, 256), /Value out of range/) + assertThrows(() => u8Prop.pushValue(autoBuf, -1), /Value out of range/) + + assertThrows(() => u8Prop.write(writeBuf, 256, 0), /Value out of range/) + assertThrows(() => u8Prop.write(writeBuf, -1, 0), /Value out of range/) + + // Int8 validation + // @ts-ignore + const i8Prop = new int8({ type: 'int8' }, ['test'], {}) + assertThrows(() => i8Prop.pushValue(autoBuf, 128), /Value out of range/) + assertThrows(() => i8Prop.pushValue(autoBuf, -129), /Value out of range/) + + assertThrows(() => i8Prop.write(writeBuf, 128, 0), /Value out of range/) + assertThrows(() => i8Prop.write(writeBuf, -129, 0), /Value out of range/) + + // Enum validation + // @ts-ignore + const enProp = new enumProp({ type: 'enum', enum: ['a', 'b'] }, ['test'], {}) + assertThrows(() => enProp.pushValue(autoBuf, 'c'), /Invalid enum value/) + assertThrows(() => enProp.write(writeBuf, 'c', 0), /Invalid enum value/) + + // Boolean validation + // @ts-ignore + const boolProp = new boolean({ type: 'boolean' }, ['test'], {}) + assertThrows(() => boolProp.pushValue(autoBuf, 123), /Invalid type/) + assertThrows(() => boolProp.write(writeBuf, 123, 0), /Invalid type/) +}) diff --git a/test/schema/props/write.ts b/test/schema/props/write.ts new file mode 100644 index 0000000000..d79c81f0b5 --- /dev/null +++ b/test/schema/props/write.ts @@ -0,0 +1,146 @@ +import test from '../../shared/test.js' +import { deepEqual } from '../../shared/assert.js' +import { + number, + timestamp, + uint8, + int8, + uint16, + int16, + uint32, + int32, + enum_ as enumProp, + boolean, +} from '../../../src/schema/defs/props/fixed.js' +import { + string, + json, + binary, + cardinality, +} from '../../../src/schema/defs/props/separate.js' +import { + references, + reference, +} from '../../../src/schema/defs/props/references.js' +import { AutoSizedUint8Array } from '../../../src/utils/AutoSizedUint8Array.js' +import { LangCode, Modify } from '../../../src/zigTsExports.js' + +await test('Fixed props: write matches pushValue', async (t) => { + const cases = [ + { Ctor: number, schema: { type: 'number' }, value: 123.456 }, + { Ctor: timestamp, schema: { type: 'timestamp' }, value: Date.now() }, + { Ctor: uint8, schema: { type: 'uint8' }, value: 123 }, + { Ctor: int8, schema: { type: 'int8' }, value: -12 }, + { Ctor: uint16, schema: { type: 'uint16' }, value: 12345 }, + { Ctor: int16, schema: { type: 'int16' }, value: -12345 }, + { Ctor: uint32, schema: { type: 'uint32' }, value: 12345678 }, + { Ctor: int32, schema: { type: 'int32' }, value: -12345678 }, + { Ctor: enumProp, schema: { type: 'enum', enum: ['a', 'b'] }, value: 0 }, // enum index + { Ctor: boolean, schema: { type: 'boolean' }, value: true }, + ] + + for (const { Ctor, schema, value } of cases) { + // @ts-ignore + const prop = new Ctor(schema, ['test'], {}) + const autoBuf = new AutoSizedUint8Array() + try { + prop.pushValue(autoBuf, value) + } catch (e) { + if (Ctor === enumProp) { + prop.pushValue(autoBuf, 'a') + } else { + throw e + } + } + + const pushResult = new Uint8Array(autoBuf.data.subarray(0, autoBuf.length)) + + const writeBuf = new Uint8Array(pushResult.length + 10) + const offset = 2 + // @ts-ignore + if (Ctor === enumProp) { + prop.write(writeBuf, 'a', offset) + } else { + prop.write(writeBuf, value, offset) + } + + const writeResult = writeBuf.subarray(offset, offset + pushResult.length) + + deepEqual(writeResult, pushResult, `Mismatch for ${schema.type}`) + } +}) + +await test('Separate props: write matches pushValue', async (t) => { + const cases = [ + { + Ctor: string, + schema: { type: 'string' }, + value: 'hello world', + lang: LangCode.en, + }, + { + Ctor: json, + schema: { type: 'json' }, + value: { foo: 'bar' }, + lang: LangCode.en, + }, + { + Ctor: binary, + schema: { type: 'binary' }, + value: new Uint8Array([1, 2, 3]), + lang: LangCode.en, + }, + // Cardinality + { + Ctor: cardinality, + schema: { type: 'cardinality' }, + value: ['a', 'b'], + lang: LangCode.en, + }, + ] + + for (const { Ctor, schema, value, lang } of cases) { + // @ts-ignore + const prop = new Ctor(schema, ['test'], {}) + const autoBuf = new AutoSizedUint8Array() + // @ts-ignore + prop.pushValue(autoBuf, value, undefined, lang) + + const pushResult = new Uint8Array(autoBuf.data.subarray(0, autoBuf.length)) + + const writeBuf = new Uint8Array(pushResult.length + 10) + const offset = 5 + // @ts-ignore + // @ts-ignore + prop.write(writeBuf, value, offset, undefined, lang) + + const writeResult = writeBuf.subarray(offset, offset + pushResult.length) + + deepEqual(writeResult, pushResult, `Mismatch for ${schema.type}`) + } +}) + +await test('References props: write matches pushValue', async (t) => { + const refsValue = [1000, 2000] + + // @ts-ignore + const prop = new references({ type: 'references' }, ['test'], {}) + const autoBuf = new AutoSizedUint8Array() + + // Modify.update + const op = Modify.update + + // @ts-ignore + prop.pushValue(autoBuf, refsValue, op, LangCode.en) + + const pushResult = new Uint8Array(autoBuf.data.subarray(0, autoBuf.length)) + + const writeBuf = new Uint8Array(pushResult.length + 100) + const offset = 10 + + prop.write(writeBuf, refsValue, offset, op, LangCode.en) + + const writeResult = writeBuf.subarray(offset, offset + pushResult.length) + + deepEqual(writeResult, pushResult, 'Mismatch for references') +}) diff --git a/test/search.ts b/test/search.ts index 5f4ed3b7a6..465781ba2b 100644 --- a/test/search.ts +++ b/test/search.ts @@ -1,16 +1,10 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' import { deepEqual, equal } from './shared/assert.js' import { italy } from './shared/examples.js' await test('like filter', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { article: { props: { @@ -31,7 +25,7 @@ await test('like filter', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('body', 'like', 'article') .include('id') .range(0, 1e3) @@ -43,7 +37,7 @@ await test('like filter', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('body', 'like', 'snurfelpants') .include('id') .range(0, 1e3) @@ -55,7 +49,7 @@ await test('like filter', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('body', 'like', ['snurfelpants', 'article']) .include('id') .range(0, 1e3) @@ -67,7 +61,7 @@ await test('like filter', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('body', 'like', 'kxngdom') .include('id') .range(0, 1e3) @@ -80,7 +74,7 @@ await test('like filter', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('body', 'like', 'derperp') .include('id') .range(0, 1e3) @@ -93,7 +87,7 @@ await test('like filter', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('body', 'like', 'kxngdom', { score: 0 }) .include('id') .range(0, 1e3) @@ -105,13 +99,7 @@ await test('like filter', async (t) => { }) await test('compressed', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { article: { props: { @@ -136,7 +124,7 @@ await test('compressed', async (t) => { // sort + search equal( await db - .query('article') + .query2('article') .search('Netherlands', { body: 0, title: 1 }) .include('id', 'date') .range(0, amount) @@ -148,7 +136,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('giraffe', { body: 0, title: 1 }) .include('id', 'date', 'title') .range(0, amount) @@ -160,7 +148,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('kingdom', { body: 0, title: 1 }) .include('id', 'date', 'title') .sort('date') @@ -173,7 +161,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('Netherlands', { body: 0, title: 1 }) .include('id', 'date') .sort('date') @@ -186,7 +174,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('giraffe', { body: 0, title: 1 }) .include('id', 'date', 'title') .sort('date') @@ -199,7 +187,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('derp', { body: 0, title: 1 }) .include('id', 'date', 'title') .sort('date') @@ -212,7 +200,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('giraffe first', { body: 0, title: 1 }) .include('id', 'date', 'title', 'body') .range(0, 1e3) @@ -224,7 +212,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('first', { body: 0, title: 1 }) .include('id', 'date', 'title', 'body') .sort('date') @@ -237,7 +225,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('second', { body: 0, title: 1 }) .include('id', 'date', 'title') .sort('date') @@ -250,7 +238,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('giraffe first', { body: 0, title: 1 }) .include('id', 'date', 'title') .sort('date') @@ -263,7 +251,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('italy netherlands', { body: 0, title: 1 }) .include('id', 'date', 'title') .sort('date') @@ -276,7 +264,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('italy netherlands', 'body', 'title') .include('id', 'date', 'title') .sort('date') @@ -289,7 +277,7 @@ await test('compressed', async (t) => { equal( await db - .query('article') + .query2('article') .search('italy netherlands', 'body', 'title') .include('id', 'date', 'title') .sort('date') @@ -302,13 +290,7 @@ await test('compressed', async (t) => { ) }) await test('simple', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { article: { props: { @@ -328,7 +310,7 @@ await test('simple', async (t) => { equal( await db - .query('article') + .query2('article') .search('giraffe first', 'body') .include('id', 'date', 'title') .sort('date') @@ -341,7 +323,7 @@ await test('simple', async (t) => { equal( await db - .query('article') + .query2('article') .search('derp derp', 'body', 'title') .include('id', 'date', 'title') .sort('date') @@ -354,13 +336,7 @@ await test('simple', async (t) => { }) await test('search ids', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { article: { props: { @@ -394,7 +370,7 @@ await test('search ids', async (t) => { equal( await db - .query('article', [first, second]) + .query2('article', [first, second]) .search('first', 'body') .include('id', 'date', 'title') .range(0, 1e3) @@ -406,7 +382,7 @@ await test('search ids', async (t) => { equal( await db - .query('article', [first, second]) + .query2('article', [first, second]) .search('first', 'body') .sort('date') .include('id', 'date', 'title') @@ -419,13 +395,7 @@ await test('search ids', async (t) => { }) await test('like filter mbs', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { article: { props: { @@ -446,7 +416,7 @@ await test('like filter mbs', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('body', 'like', 'mihailovitsin') .include('id') .range(0, 1e3) @@ -457,7 +427,7 @@ await test('like filter mbs', async (t) => { equal( ( await db - .query('article') + .query2('article') .filter('body', 'like', 'mihailovitšin') .include('id') .range(0, 1e3) @@ -468,13 +438,7 @@ await test('like filter mbs', async (t) => { }) await test('giraffe first', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { article: { props: { @@ -493,7 +457,7 @@ await test('giraffe first', async (t) => { equal( await db - .query('article') + .query2('article') .search('giraffe first', { body: 0, title: 1 }) .include('id', 'date', 'title', 'body') .range(0, 1e3) @@ -505,13 +469,7 @@ await test('giraffe first', async (t) => { }) await test('first letter', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { article: { props: { @@ -525,7 +483,7 @@ await test('first letter', async (t) => { name: 'Kavel Omval Naast De Poort', }) - deepEqual(await db.query('article').search('Kavel').get(), [ + deepEqual(await db.query2('article').search('Kavel').get(), [ { id: 1, $searchScore: 0, name: 'Kavel Omval Naast De Poort' }, ]) }) diff --git a/test/serializeQueryDef.ts b/test/serializeQueryDef.ts index 6a8d64cc97..cb90f0a80b 100644 --- a/test/serializeQueryDef.ts +++ b/test/serializeQueryDef.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' import { deepEqual } from './shared/assert.js' await test('serialize', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -30,25 +24,24 @@ await test('serialize', async (t) => { await db.drain() - deepEqual((await db.query('user').get()).toObject(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, isNice: false }, { id: 2, isNice: true }, { id: 3, isNice: false }, ]) - deepEqual( - (await db.query('user').filter('isNice', '=', true).get()).toObject(), - [{ id: 2, isNice: true }], - ) + deepEqual(await db.query2('user').filter('isNice', '=', true).get(), [ + { id: 2, isNice: true }, + ]) - deepEqual((await db.query('user').filter('isNice').get()).toObject(), [ + deepEqual(await db.query2('user').filter('isNice').get(), [ { id: 2, isNice: true }, ]) - deepEqual((await db.query('user').filter('isNice', false).get()).toObject(), [ + deepEqual(await db.query2('user').filter('isNice', false).get(), [ { id: 1, isNice: false }, { id: 3, isNice: false }, ]) - // const def = db.query('user').filter('isNice', false).def + // const def = db.query2('user').filter('isNice', false).def }) diff --git a/test/shared/assert.ts b/test/shared/assert.ts index 33cff334ff..e5952ff629 100644 --- a/test/shared/assert.ts +++ b/test/shared/assert.ts @@ -5,13 +5,16 @@ import { BasedQueryResponse } from '../../src/db-client/query/BasedQueryResponse import { PropTypeInverse } from '../../src/zigTsExports.js' export { perf } from './perf.js' -// add fn -export const deepEqual = (a, b, msg?: string) => { +export const deepEqual = ( + a: A, + b: NoInfer : A>, + msg?: string, +) => { if (a instanceof BasedQueryResponse) { - a = a.toObject() + a = a } if (b instanceof BasedQueryResponse) { - b = b.toObject() + b = b } if (!uDeepEqual(a, b)) { const m = `${msg || ``} @@ -25,9 +28,9 @@ ${util.inspect(a, { depth: 10, maxStringLength: 60 })} } } -export const notEqual = (a, b, msg?: string) => { +export const notEqual = (a: any, b: any, msg?: string) => { if (uDeepEqual(a, b)) { - const m = `${msg || 'Should not be equal:'} + const m = `${msg || 'Should not be equal:'} ------------------ A --------------------- ${util.inspect(a, { depth: 10, maxStringLength: 60 })} ------------------ B --------------------- diff --git a/test/shared/index.ts b/test/shared/index.ts index c61c217401..017f398ba0 100644 --- a/test/shared/index.ts +++ b/test/shared/index.ts @@ -1,3 +1,7 @@ +import { createHash } from 'node:crypto' +import { getBlockHash, getBlockStatuses } from '../../src/db-server/blocks.js' +import type { ResolveSchema, SchemaIn, StrictSchema } from '../../src/schema.js' +import { DbClient, DbServer, getDefaultHooks } from '../../src/sdk.js' import test from './test.js' export * from './assert.js' export * from './examples.js' @@ -30,3 +34,84 @@ export function logMemoryUsage() { console.log(` arrayBuffers: ${formatBytes(memoryUsage.arrayBuffers)}`) } } + +export const testDb = async ( + t: Parameters[1]>[0], + schema: StrictSchema, + opts: { noBackup?: boolean, noClean?: boolean, path?: string } = {}, +): Promise>> => { + const server = await testDbServer(t, opts) + return testDbClient(server, schema) +} + +export const testDbClient = async ( + server: DbServer, + schema?: StrictSchema, +): Promise>> => { + const client = new DbClient({ + hooks: getDefaultHooks(server), + }) + if (schema) { + await client.setSchema(schema) + } + return client as unknown as DbClient> +} + +export const testDbServer = async ( + t: Parameters[1]>[0], + opts: { noBackup?: boolean, noClean?: boolean, path?: string } = {}, +): Promise => { + const db = new DbServer({ path: opts.path ?? t.tmp }) + await db.start({ clean: !opts.noClean }) + if (opts.noBackup) { + t.after(() => db.destroy()) + } else { + t.after(() => t.backup(db)) + } + return db +} + +export async function countDirtyBlocks(server: DbServer) { + let n = 0 + + for (const t of Object.keys(server.schemaTypesParsedById)) { + n += (await getBlockStatuses(server, Number(t))).reduce( + (acc, cur) => acc + ~~!!(cur & 0x4), + 0, + ) + } + + return n +} + +export const getActiveBlocks = async ( + db: DbServer, + tc: number, +): Promise> => + (await getBlockStatuses(db, tc)).reduce((acc, cur, i) => { + if (cur) { + acc.push(i) + } + return acc + }, [] as Array) + +const block2start = (block: number, capacity: number): number => + block * capacity + 1 + +export const hashType = async ( + db: DbServer, + typeName: string, +): Promise => { + const tc = db.schemaTypesParsed[typeName].id + const capacity = db.schemaTypesParsed[typeName].blockCapacity + const hash = createHash('sha256') + const bhs = await Promise.all( + (await getActiveBlocks(db, tc)).map((block) => + getBlockHash(db, tc, block2start(block, capacity)), + ), + ) + for (const bh of bhs) { + hash.update(bh) + } + return hash.digest('hex') +} diff --git a/test/shared/northwindDb.ts b/test/shared/northwindDb.ts index a9a30729b6..811f8295ba 100644 --- a/test/shared/northwindDb.ts +++ b/test/shared/northwindDb.ts @@ -1,5 +1,8 @@ -import { Schema, type SchemaIn } from '../../src/schema/index.js' -import { BasedDb } from '../../src/index.js' +import {ResolveSchema} from '../../dist/schema.js' +import { type SchemaIn } from '../../src/schema/index.js' +import { DbClient } from '../../src/sdk.js' +import { testDb } from './index.js' +import test from './test.js' const schCompanyName = { type: 'string', maxBytes: 40 } as const const schContactName = { type: 'string', maxBytes: 30 } as const @@ -302,10 +305,10 @@ export const defaultSchema = Object.freeze({ }) export default async function createNorthwindDb( - db: BasedDb, + t: Parameters[1]>[0], schema: SchemaIn = defaultSchema as SchemaIn, -) { - await db.setSchema(schema) +): Promise>> { + const db = await testDb(t, schema) // categories ;[ @@ -679,8 +682,7 @@ export default async function createNorthwindDb( [9, '48304'], [9, '55113'], [9, '55439'], - ].forEach((row) => db.upsert('territories', { - territoryId: row[1], + ].forEach((row) => db.upsert('territories', { territoryId: `${row[1]}` }, { employees: { add: [ row[0] ], }, @@ -1537,7 +1539,7 @@ export default async function createNorthwindDb( [11077, 'RATTC', 1, '1998-05-06', '1998-06-03', '', 2, 8.53, 'Rattlesnake Canyon Grocery', '2817 Milton Dr.', 'Albuquerque', 'NM', '87110', 'USA'], ].map(async (row) => db.create('orders', { id: row[0], - customer: await db.upsert('customers', { customerId: row[1] }), + customer: await db.upsert('customers', { customerId: `${row[1]}` }, {}), employee: row[2], orderStatus: (row[5] === '') ? 'created' : 'shipped', orderDate: row[3] && new Date(row[3]), @@ -3924,4 +3926,6 @@ export default async function createNorthwindDb( { unsafe: true }, ), ) + + return db } diff --git a/test/shared/playground.ts b/test/shared/playground.ts index 7345a2ac2b..aea4d26a82 100644 --- a/test/shared/playground.ts +++ b/test/shared/playground.ts @@ -35,6 +35,6 @@ // console.log('set', await db.drain(), 'ms') -// await db.query('thing').range(0, 1e6).filter('isNice', false).get().inspect() +// await db.query2('thing').range(0, 1e6).filter('isNice', false).get().inspect() // await db.destroy() diff --git a/test/shared/startWorker.ts b/test/shared/startWorker.ts index aa38f675d1..5c706b44c5 100644 --- a/test/shared/startWorker.ts +++ b/test/shared/startWorker.ts @@ -3,7 +3,7 @@ import fs from 'node:fs/promises' import { fileURLToPath } from 'url' import { join, dirname } from 'path' import { DbClient } from '../../src/db-client/index.js' -import { BasedDb } from '../../src/index.js' +import { BasedDb, type DbServer } from '../../src/index.js' import native from '../../src/native.js' import * as utils from '../../src/utils/index.js' import hash from '../../src/hash/hash.js' @@ -17,7 +17,7 @@ type Utils = typeof utils export const clientWorker = async ( t: any, - db: BasedDb, + db: DbServer, fn: ( client: DbClient, data: T, @@ -64,14 +64,14 @@ export const clientWorker = async ( done = r }) - db.server.on('schema', (s) => { + db.on('schema', (s) => { schemaChannel.port1.postMessage(s) }) port1.on('message', async (d) => { if (d === 'started') { - if (db.server.schema) { - schemaChannel.port1.postMessage(db.server.schema) + if (db.schema) { + schemaChannel.port1.postMessage(db.schema) } return } @@ -80,7 +80,7 @@ export const clientWorker = async ( return } const seqId = d.id - const result = await db.server[d.fn](...d.data) + const result = await db[d.fn](...d.data) port1.postMessage({ id: seqId, result }) }) diff --git a/test/shared/test.ts b/test/shared/test.ts index 33b993201d..e34e4ec1d9 100644 --- a/test/shared/test.ts +++ b/test/shared/test.ts @@ -1,9 +1,14 @@ import { styleText } from 'node:util' import { fileURLToPath } from 'url' import { join, dirname, resolve } from 'path' -import { BasedDb } from '../../src/index.js' +import { + BasedDb, + DbClient, + DbServer, + getDefaultHooks, +} from '../../src/index.js' import { deepEqual } from './assert.js' -import { wait, bufToHex } from '../../src/utils/index.js' +import { wait } from '../../src/utils/index.js' import fs from 'node:fs/promises' export const counts = { @@ -29,7 +34,7 @@ const errors = new Set() export type T = { after: (fn: () => Promise | void, push?: boolean) => void - backup: (db: BasedDb) => Promise + backup: (db: DbServer) => Promise tmp: string } @@ -61,7 +66,7 @@ const test: { afters.unshift(fn) } }, - backup: async (db: BasedDb) => { + backup: async (db: DbServer) => { afters.push(async () => { try { await db.destroy() @@ -72,17 +77,19 @@ const test: { return } - const fields = ['*', '**'] - const make = async (db) => { + const make = async (db: DbServer) => { + const client = new DbClient({ + hooks: getDefaultHooks(db), + }) const checksums: any[] = [] const data: any[] = [] const counts: any[] = [] - for (const type in db.server.schema?.types) { - let x = await db.query(type).include(fields).get() - checksums.push(x.checksum) - data.push(x.toObject()) - counts.push(await db.query(type).count().get().toObject().count) + for (const type in db.schema?.types) { + let x = await client.query2(type).include('*', '**').get() + checksums.push(x['checksum']) + data.push(x) + counts.push((await client.query2(type).count().get()).count) } return [checksums, data, counts] @@ -95,17 +102,15 @@ const test: { console.log(styleText('gray', `saved db ${performance.now() - d} ms`)) const size = await dirSize(t.tmp) - - const kbs = ~~(size / 1024) - if (kbs < 5000) { - console.log(styleText('gray', `backup size ${kbs}kb`)) - } else { - console.log(styleText('gray', `backup size ${~~(kbs / 1024)}mb`)) - } + const strSize = + size < 1_048_576 + ? `${Math.ceil(size / 1024)} kiB` + : `${Math.ceil(size / 1_048_576)} MiB` + console.log(styleText('gray', `backup size ${strSize}`)) await db.stop() - const newDb = new BasedDb({ + const newDb = new DbServer({ path: t.tmp, }) @@ -135,7 +140,7 @@ const test: { deepEqual( b[di], a[di], - `Mismatch after backup (len:${b.length}) ${Object.keys(db.server.schema!.types)[di]}`, + `Mismatch after backup (len:${b.length}) ${Object.keys(db.schema!.types)[di]}`, ) } const ci = findFirstDiffPos(counts, c) @@ -143,7 +148,7 @@ const test: { deepEqual( c[ci], counts[ci], - `Mismatching count after backup (len:${b.length}) ${Object.keys(db.server.schema!.types)[ci]}`, + `Mismatching count after backup (len:${b.length}) ${Object.keys(db.schema!.types)[ci]}`, ) } } diff --git a/test/simpleQuery.ts b/test/simpleQuery.ts index c1643eae61..03b7e9e61f 100644 --- a/test/simpleQuery.ts +++ b/test/simpleQuery.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' import { deepEqual } from './shared/assert.js' await test('query', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -39,22 +33,16 @@ await test('query', async (t) => { await db.drain() - deepEqual( - (await db.query('user').include('id').get()).toObject(), - [{ id: 1 }], - 'Id only', - ) + deepEqual(await db.query2('user').include('id').get(), [{ id: 1 }], 'Id only') deepEqual( - ( - await db.query('user').filter('age', '<', 20).include('id', 'age').get() - ).toObject(), + await db.query2('user').filter('age', '<', 20).include('id', 'age').get(), [], ) deepEqual( - (await db.query('user').include('*').get()).toObject(), - (await db.query('user').get()).toObject(), + await db.query2('user').include('*').get(), + await db.query2('user').get(), 'include * works as "get all fields"', ) }) diff --git a/test/singleRef.ts b/test/singleRef.ts index 5cb1acbba8..587604dc77 100644 --- a/test/singleRef.ts +++ b/test/singleRef.ts @@ -2,18 +2,10 @@ import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { deepEqual, equal } from './shared/assert.js' import { setTimeout } from 'timers/promises' -import { wait } from '../src/utils/index.js' +import { testDb } from './shared/index.js' await test('single special cases', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => db.destroy()) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -68,9 +60,9 @@ await test('single simple', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) - await db.setSchema({ + const client = await db.setSchema({ types: { user: { props: { @@ -99,13 +91,13 @@ await test('single simple', async (t) => { }, }) - db.create('simple', { - user: db.create('user', { + client.create('simple', { + user: client.create('user', { name: 'Mr snurp', }), }) - deepEqual(await db.query('simple').include('user.name').get(), [ + deepEqual(await client.query2('simple').include('user.name').get(), [ { id: 1, user: { @@ -115,56 +107,50 @@ await test('single simple', async (t) => { }, ]) - db.update('simple', 1, { + client.update('simple', 1, { user: null, }) - deepEqual(await db.query('simple').include('user.name').get(), [ + deepEqual(await client.query2('simple').include('user.name').get(), [ { id: 1, user: null, }, ]) - const x = db.create('user', { + const x = client.create('user', { name: 'Mr snurp', }) - const blax = await db.create('simple', { + const blax = await client.create('simple', { user: x, }) const ids: any[] = [] for (let i = 0; i < 1e5; i++) { - ids.push(db.create('simple', {})) + ids.push(client.create('simple', {})) } - const bla2 = await db.create('simple', { + await client.create('simple', { user: x, mySelf: blax, }) - await db.isModified() + await client.isModified() for (let i = 0; i < 1e5; i++) { - ids.push(db.delete('simple', ids[i])) + ids.push(client.delete('simple', ids[i])) } await db.save() - db.update('simple', blax, { + client.update('simple', blax, { mySelf: null, }) }) await test('simple nested', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -208,26 +194,26 @@ await test('simple nested', async (t) => { await db.drain() - deepEqual((await db.query('blup').include('flap').get()).toObject(), [ + deepEqual(await db.query2('blup').include('flap').get(), [ { id: 1, flap: 'B', }, ]) - const result1 = await db.query('user').include('myBlup.flap').get() + const result1 = await db.query2('user').include('myBlup.flap').get() for (const r of result1) { equal(r.myBlup.flap, 'B') } - const result = await db.query('simple').include('user.myBlup.flap').get() + const result = await db.query2('simple').include('user.myBlup.flap').get() for (const r of result) { equal(r.user.myBlup.flap, 'B') } - deepEqual((await db.query('user').include('simple').get()).toObject(), [ + deepEqual(await db.query2('user').include('simple').get(), [ { id: 1, simple: { id: 1 }, @@ -240,14 +226,14 @@ await test('simple nested', async (t) => { await db.drain() - deepEqual((await db.query('simple').include('user').get()).toObject(), [ + deepEqual(await db.query2('simple').include('user').get(), [ { id: 1, user: null, }, ]) - deepEqual((await db.query('user').include('simple').get()).toObject(), [ + deepEqual(await db.query2('user').include('simple').get(), [ { id: 1, simple: null, @@ -256,13 +242,7 @@ await test('simple nested', async (t) => { }) await test('single reference object', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -321,7 +301,7 @@ await test('single reference object', async (t) => { await db.drain() - deepEqual((await db.query('simple').include('admin.user').get()).toObject(), [ + deepEqual(await db.query2('simple').include('admin.user').get(), [ { id: 1, admin: { @@ -334,13 +314,7 @@ await test('single reference object', async (t) => { }) await test('nested', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -429,13 +403,12 @@ await test('nested', async (t) => { await db.drain() - deepEqual( - (await db.query('simple').include('id').range(0, 1).get()).toObject(), - [{ id: 1 }], - ) + deepEqual(await db.query2('simple').include('id').range(0, 1).get(), [ + { id: 1 }, + ]) deepEqual( - (await db.query('simple').include('user').range(0, 1).get()).toObject(), + await db.query2('simple').include('user').range(0, 1).get(), [ { id: 1, @@ -446,9 +419,7 @@ await test('nested', async (t) => { ) deepEqual( - ( - await db.query('simple', lastRes).include('user.location').get() - ).toObject(), + await db.query2('simple', lastRes).include('user.location').get(), { id: await lastRes, user: { @@ -460,7 +431,7 @@ await test('nested', async (t) => { ) deepEqual( - (await db.query('simple', lastRes).include('user').get()).toObject(), + await db.query2('simple', lastRes).include('user').get(), { id: await lastRes, user: { @@ -478,13 +449,11 @@ await test('nested', async (t) => { ) deepEqual( - ( - await db - .query('simple') // lastRes - .include('user.myBlup') - .range((await lastRes!) - 1, await lastRes) - .get() - ).toObject(), + await db + .query2('simple') // lastRes + .include('user.myBlup') + .range((await lastRes!) - 1, await lastRes) + .get(), [ { id: await lastRes, @@ -495,7 +464,7 @@ await test('nested', async (t) => { ) deepEqual( - (await db.query('simple', lastRes).include('user.myBlup').get()).toObject(), + await db.query2('simple', lastRes).include('user.myBlup').get(), { id: await lastRes, user: { id: 1, myBlup: { id: 1, flap: 'A', name: 'blup !' } }, @@ -504,9 +473,7 @@ await test('nested', async (t) => { ) deepEqual( - ( - await db.query('simple', lastRes).include('user.myBlup', 'lilBlup').get() - ).toObject(), + await db.query2('simple', lastRes).include('user.myBlup', 'lilBlup').get(), { id: await lastRes, user: { id: 1, myBlup: { id: 1, flap: 'A', name: 'blup !' } }, @@ -516,20 +483,18 @@ await test('nested', async (t) => { ) equal( - (await db.query('simple', lastRes).include('user.myBlup').get()).node().user - .myBlup.flap, + (await db.query2('simple', lastRes).include('user.myBlup').get()).node() + .user.myBlup.flap, 'A', 'Read nested field with getter', ) deepEqual( - ( - await db - .query('simple') - .include('user.myBlup', 'lilBlup', 'user.name') - .range((await lastRes!) - 1, await lastRes) - .get() - ).toObject(), + await db + .query2('simple') + .include('user.myBlup', 'lilBlup', 'user.name') + .range((await lastRes!) - 1, await lastRes) + .get(), [ { id: await lastRes, @@ -545,30 +510,21 @@ await test('nested', async (t) => { ) deepEqual( - ( - await db.query('simple', lastRes).include('user.location.label').get() - ).toObject(), + await db.query2('simple', lastRes).include('user.location.label').get(), { id: await lastRes, user: { id: 1, location: { label: 'BLA BLA' } } }, ) - deepEqual( - ( - await db.query('simple', lastRes).include('user.location').get() - ).toObject(), - { - id: await lastRes, - user: { id: 1, location: { label: 'BLA BLA', x: 1, y: 2 } }, - }, - ) + deepEqual(await db.query2('simple', lastRes).include('user.location').get(), { + id: await lastRes, + user: { id: 1, location: { label: 'BLA BLA', x: 1, y: 2 } }, + }) deepEqual( - ( - await db - .query('simple') - .include('user.myBlup', 'lilBlup') - .range((await lastRes!) - 1, await lastRes) - .get() - ).toObject(), + await db + .query2('simple') + .include('user.myBlup', 'lilBlup') + .range((await lastRes!) - 1, await lastRes) + .get(), [ { id: await lastRes, @@ -590,13 +546,11 @@ await test('nested', async (t) => { ) deepEqual( - ( - await db - .query('simple') - .include('user', 'user.myBlup') - .range((await lastRes!) - 1, await lastRes) - .get() - ).toObject(), + await db + .query2('simple') + .include('user', 'user.myBlup') + .range((await lastRes!) - 1, await lastRes) + .get(), [ { id: await lastRes, @@ -617,12 +571,10 @@ await test('nested', async (t) => { ) deepEqual( - ( - await db - .query('simple', lastRes) - .include('user', 'user.myBlup', 'lilBlup') - .get() - ).toObject(), + await db + .query2('simple', lastRes) + .include('user', 'user.myBlup', 'lilBlup') + .get(), { id: await lastRes, user: { @@ -642,13 +594,7 @@ await test('nested', async (t) => { }) await test('single reference multi refs strings', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -708,7 +654,7 @@ await test('single reference multi refs strings', async (t) => { await db.drain() const result = await db - .query('simple') + .query2('simple') .include('user', 'user.myBlup', 'lilBlup') .get() @@ -723,12 +669,12 @@ await test('single reference multi refs strings', async (t) => { await db.drain() const result2 = await db - .query('simple') + .query2('simple') .filter('age', '=', 5) .include('user', 'user.myBlup', 'lilBlup') .get() - deepEqual(result2.toObject(), [ + deepEqual(result2, [ { id: 2, user: null, @@ -738,16 +684,10 @@ await test('single reference multi refs strings', async (t) => { }) await test('update same value', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { - en: { required: true }, - fr: { required: true }, + en: {}, + fr: {}, }, types: { country: { @@ -768,15 +708,11 @@ await test('update same value', async (t) => { name: 'Country X', }) - await db.update('contestant', { - id, + await db.update('contestant', id, { country: countryId, }) - await db.update('contestant', { - id, + await db.update('contestant', id, { country: countryId, }) - - await wait(1e3) }) diff --git a/test/singleRefQuery.ts b/test/singleRefQuery.ts index 32f678a6be..616ce692eb 100644 --- a/test/singleRefQuery.ts +++ b/test/singleRefQuery.ts @@ -1,15 +1,9 @@ import { deepEqual } from './shared/assert.js' -import { BasedDb } from '../src/index.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' await test('single reference query', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -110,11 +104,11 @@ await test('single reference query', async (t) => { await db.drain() const result2 = await db - .query('simple') + .query2('simple') .filter('user.myBlup.age', '=', 10) .get() - deepEqual(result2.toObject(), [ + deepEqual(result2, [ { id: 1, smurp: 0, @@ -125,13 +119,13 @@ await test('single reference query', async (t) => { ]) const result = await db - .query('simple') + .query2('simple') .filter('lilBlup.age', '=', 20) .filter('flap.power', '=', 10) .include('lilBlup', 'flap') .get() - deepEqual(result.toObject(), [ + deepEqual(result, [ { id: 4, lilBlup: { diff --git a/test/sort/sort.perf.ts b/test/sort/sort.perf.ts index bb6a919e88..9ff74b572d 100644 --- a/test/sort/sort.perf.ts +++ b/test/sort/sort.perf.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' -import test from '../shared/test.js' import { deepEqual, equal } from '../shared/assert.js' +import test from '../shared/test.js' +import { testDb } from '../shared/index.js' await test('1M', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -41,7 +35,7 @@ await test('1M', async (t) => { ) const r = await db - .query('user') + .query2('user') .include('age', 'name', 'email') .range(0, 1e5) .sort('email') diff --git a/test/sort/sort.ts b/test/sort/sort.ts index 6f9436b1bf..d8091201fa 100644 --- a/test/sort/sort.ts +++ b/test/sort/sort.ts @@ -1,15 +1,10 @@ -import { BasedDb } from '../../src/index.js' +import { BasedDb, DbClient, getDefaultHooks } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { deepEqual, equal } from '../shared/assert.js' await test('basic', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -58,9 +53,7 @@ await test('basic', async (t) => { await db.drain() deepEqual( - ( - await db.query('user').sort('age', 'desc').include('email', 'age').get() - ).toObject(), + await db.query2('user').sort('age', 'desc').include('email', 'age').get(), [ { id: 1, email: 'blap@blap.blap.blap', age: 201 }, { id: 4, email: 'nurp@nurp.nurp.nurp', age: 200 }, @@ -72,9 +65,7 @@ await test('basic', async (t) => { ) deepEqual( - ( - await db.query('user').sort('age', 'asc').include('email', 'age').get() - ).toObject(), + await db.query2('user').sort('age', 'asc').include('email', 'age').get(), [ { id: 5, email: 'z@z.z', age: 1 }, { id: 2, email: 'flap@flap.flap.flap', age: 50 }, @@ -88,7 +79,7 @@ await test('basic', async (t) => { await db.drain() deepEqual( - await db.query('user').sort('email', 'asc').include('email', 'age').get(), + await db.query2('user').sort('email', 'asc').include('email', 'age').get(), [ { id: 1, email: 'blap@blap.blap.blap', age: 201 }, { id: 2, email: 'flap@flap.flap.flap', age: 50 }, @@ -100,7 +91,7 @@ await test('basic', async (t) => { ) deepEqual( - await db.query('user').sort('email', 'desc').include('email', 'age').get(), + await db.query2('user').sort('email', 'desc').include('email', 'age').get(), [ { id: 1, email: 'blap@blap.blap.blap', age: 201 }, { id: 2, email: 'flap@flap.flap.flap', age: 50 }, @@ -120,7 +111,7 @@ await test('basic', async (t) => { await db.drain() deepEqual( - await db.query('user').sort('email').include('email', 'age').get(), + await db.query2('user').sort('email').include('email', 'age').get(), [ { id: 1, email: 'blap@blap.blap.blap', age: 201 }, { id: 2, email: 'flap@flap.flap.flap', age: 50 }, @@ -133,7 +124,7 @@ await test('basic', async (t) => { ) deepEqual( - await db.query('user').sort('age').include('email', 'age').get(), + await db.query2('user').sort('age').include('email', 'age').get(), [ { id: 5, email: 'z@z.z', age: 1 }, { id: 2, email: 'flap@flap.flap.flap', age: 50 }, @@ -152,7 +143,7 @@ await test('basic', async (t) => { await db.drain() deepEqual( - await db.query('user').sort('email').include('email', 'age').get(), + await db.query2('user').sort('email').include('email', 'age').get(), [ { id: 1, email: 'blap@blap.blap.blap', age: 201 }, { id: 6, email: 'dd@dd.dd', age: 999 }, @@ -171,7 +162,7 @@ await test('basic', async (t) => { await db.drain() deepEqual( - await db.query('user').sort('age').include('email', 'age').get(), + await db.query2('user').sort('age').include('email', 'age').get(), [ { id: 5, email: 'z@z.z', age: 1 }, { id: 2, email: 'flap@flap.flap.flap', age: 50 }, @@ -190,7 +181,7 @@ await test('basic', async (t) => { await db.drain() deepEqual( - await db.query('user').sort('age').include('email', 'age').get(), + await db.query2('user').sort('age').include('email', 'age').get(), [ { id: 6, email: 'dd@dd.dd', age: 0 }, { id: 5, email: 'z@z.z', age: 1 }, @@ -202,7 +193,7 @@ await test('basic', async (t) => { 'update mrX to age 0', ) - deepEqual(await db.query('user').sort('age').include('email', 'age').get(), [ + deepEqual(await db.query2('user').sort('age').include('email', 'age').get(), [ { id: 6, email: 'dd@dd.dd', age: 0 }, { id: 5, email: 'z@z.z', age: 1 }, { id: 2, email: 'flap@flap.flap.flap', age: 50 }, @@ -224,7 +215,7 @@ await test('basic', async (t) => { await db.drain() deepEqual( - await db.query('user', ids).include('name', 'age').sort('age').get(), + await db.query2('user', ids).include('name', 'age').sort('age').get(), [ { id: 6, name: 'mr x', age: 0 }, { id: 5, name: 'mr z', age: 1 }, @@ -242,7 +233,7 @@ await test('basic', async (t) => { deepEqual( await db - .query('user', ids) + .query2('user', ids) .include('name', 'age') .sort('age', 'desc') .get(), @@ -268,7 +259,7 @@ await test('basic', async (t) => { deepEqual( await db - .query('user', ids2) + .query2('user', ids2) .include('name', 'age') .sort('age', 'asc') .get(), @@ -298,7 +289,7 @@ await test('basic', async (t) => { deepEqual( await db - .query('user', ids2) + .query2('user', ids2) .include('name', 'age') .sort('age', 'asc') .get(), @@ -328,7 +319,8 @@ await test('basic', async (t) => { await db.drain() equal( - (await db.query('user', ids2).include('name', 'age', 'email').get()).length, + (await db.query2('user', ids2).include('name', 'age', 'email').get()) + .length, 16, 'Check default query after remove', ) @@ -336,7 +328,7 @@ await test('basic', async (t) => { equal( ( await db - .query('user', ids2) + .query2('user', ids2) .include('name', 'age', 'email') .sort('email') .get() @@ -348,7 +340,7 @@ await test('basic', async (t) => { equal( ( await db - .query('user', ids2) + .query2('user', ids2) .include('name', 'age', 'email') .sort('name') .get() @@ -364,7 +356,7 @@ await test('basic', async (t) => { equal( ( await db - .query('user', ids2) + .query2('user', ids2) .include('name', 'age', 'email') .sort('name') .get() @@ -382,7 +374,7 @@ await test('basic', async (t) => { equal( ( await db - .query('user', ids2) + .query2('user', ids2) .include('name', 'age', 'email') .sort('email') .get() @@ -398,7 +390,7 @@ await test('basic', async (t) => { equal( ( await db - .query('user', ids2) + .query2('user', ids2) .include('name', 'age', 'email') .sort('email') .get() @@ -409,15 +401,7 @@ await test('basic', async (t) => { }) await test('sort - from start (1M items)', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - // db.blockSize = 1e5 - - await db.setSchema({ + const schema = { types: { user: { props: { @@ -428,31 +412,36 @@ await test('sort - from start (1M items)', async (t) => { }, }, }, + } as const + const db = new BasedDb({ + path: t.tmp, }) + await db.start({ clean: true }) + const client = await db.setSchema(schema) - db.create('user', { + client.create('user', { name: 'mr blap', age: 100, email: 'blap@blap.blap.blap', }) - db.create('user', { + client.create('user', { name: 'mr flap', age: 50, email: 'flap@flap.flap.flap', }) for (let i = 0; i < 1000e3; i++) { - db.create('user', { + client.create('user', { name: 'mr ' + i, age: i + 101, }) } - await db.drain() + await client.drain() deepEqual( - await db.query('user').include('name').sort('age').range(0, 2).get(), + await client.query2('user').include('name').sort('age').range(0, 2).get(), [ { id: 2, name: 'mr flap' }, { id: 1, name: 'mr blap' }, @@ -460,7 +449,7 @@ await test('sort - from start (1M items)', async (t) => { ) deepEqual( - await db.query('user').include('name').sort('age').range(0, 2).get(), + await client.query2('user').include('name').sort('age').range(0, 2).get(), [ { id: 2, name: 'mr flap' }, { id: 1, name: 'mr blap' }, @@ -468,7 +457,7 @@ await test('sort - from start (1M items)', async (t) => { ) deepEqual( - await db.query('user').include('name').sort('name').range(0, 2).get(), + await client.query2('user').include('name').sort('name').range(0, 2).get(), [ { id: 3, @@ -483,18 +472,17 @@ await test('sort - from start (1M items)', async (t) => { await db.stop() - const newDb = new BasedDb({ + const db2 = new BasedDb({ path: t.tmp, }) - - await newDb.start() - - t.after(() => newDb.destroy()) + await db2.start() + t.after(() => db2.destroy()) + const client2 = new DbClient({ + hooks: getDefaultHooks(db2.server), + }) deepEqual( - ( - await newDb.query('user').include('name').sort('name').range(0, 2).get() - ).toObject(), + await client2.query2('user').include('name').sort('name').range(0, 2).get(), [ { id: 3, @@ -506,18 +494,10 @@ await test('sort - from start (1M items)', async (t) => { }, ], ) - - // newDb.server.destroySortIndex('user', 'age') }) await test('unset value on create', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { dialog: { props: { @@ -529,26 +509,26 @@ await test('unset value on create', async (t) => { }, }) - await db.query('dialog').sort('fun', 'desc').get().toObject() + await db.query2('dialog').sort('fun', 'desc').get() - const id1 = await db.create('dialog', { + await db.create('dialog', { fun: '1', }) - const id2 = await db.create('dialog', { + await db.create('dialog', { fun: '2', }) - const id3 = await db.create('dialog', { + await db.create('dialog', { fun: '3', }) - const id4 = await db.create('dialog', {}) + await db.create('dialog', {}) const id5 = await db.create('dialog', {}) deepEqual( - await db.query('dialog').sort('fun', 'desc').get().toObject(), + await db.query2('dialog').sort('fun', 'desc').get(), [ { id: 3, @@ -574,7 +554,7 @@ await test('unset value on create', async (t) => { 'first', ) - deepEqual(await db.query('dialog').sort('fun', 'desc').get().toObject(), [ + deepEqual(await db.query2('dialog').sort('fun', 'desc').get(), [ { id: 3, fun: '3' }, { id: 2, fun: '2' }, { id: 1, fun: '1' }, @@ -586,7 +566,7 @@ await test('unset value on create', async (t) => { fun: '0', }) - deepEqual(await db.query('dialog').sort('fun', 'desc').get().toObject(), [ + deepEqual(await db.query2('dialog').sort('fun', 'desc').get(), [ { id: 3, fun: '3', @@ -612,7 +592,7 @@ await test('unset value on create', async (t) => { db.delete('dialog', id5) await db.drain() - deepEqual(await db.query('dialog').sort('fun', 'desc').get().toObject(), [ + deepEqual(await db.query2('dialog').sort('fun', 'desc').get(), [ { id: 3, fun: '3', diff --git a/test/sort/sortAlias.ts b/test/sort/sortAlias.ts index 8d2f69bd56..84f5703a81 100644 --- a/test/sort/sortAlias.ts +++ b/test/sort/sortAlias.ts @@ -25,7 +25,7 @@ await test('alias', async (t) => { await db.drain() isSorted( - await db.query('article').sort('email', 'desc').get(), + await db.query2('article').sort('email', 'desc').get(), 'email', 'desc', 'After create', @@ -41,7 +41,7 @@ await test('alias', async (t) => { await db.drain() isSorted( - await db.query('article').sort('email', 'desc').get(), + await db.query2('article').sort('email', 'desc').get(), 'email', 'desc', 'After update', @@ -54,7 +54,7 @@ await test('alias', async (t) => { await db.drain() isSorted( - await db.query('article').sort('email', 'desc').get(), + await db.query2('article').sort('email', 'desc').get(), 'email', 'desc', 'After delete', @@ -70,7 +70,7 @@ await test('alias', async (t) => { await db.drain() isSorted( - await db.query('article').sort('email', 'desc').get(), + await db.query2('article').sort('email', 'desc').get(), 'email', 'desc', 'After create (same values)', diff --git a/test/sort/sortBinary.ts b/test/sort/sortBinary.ts index dbb487a684..b77cbfd95c 100644 --- a/test/sort/sortBinary.ts +++ b/test/sort/sortBinary.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { deepEqual } from '../shared/assert.js' await test('binary sort', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { binary: { props: { @@ -41,14 +35,14 @@ await test('binary sort', async (t) => { await db.drain() - deepEqual(await db.query('binary').include('name', 'data').get(), [ + deepEqual(await db.query2('binary').include('name', 'data').get(), [ { id: 1, name: 'first', data: buffer1 }, { id: 2, name: 'second', data: buffer2 }, { id: 3, name: 'third', data: buffer3 }, ]) deepEqual( - await db.query('binary').sort('data').include('name', 'data').get(), + await db.query2('binary').sort('data').include('name', 'data').get(), [ { id: 3, name: 'third', data: buffer3 }, { id: 1, name: 'first', data: buffer1 }, @@ -58,7 +52,11 @@ await test('binary sort', async (t) => { ) deepEqual( - await db.query('binary').sort('data', 'desc').include('name', 'data').get(), + await db + .query2('binary') + .sort('data', 'desc') + .include('name', 'data') + .get(), [ { id: 2, name: 'second', data: buffer2 }, { id: 1, name: 'first', data: buffer1 }, diff --git a/test/sort/sortById.ts b/test/sort/sortById.ts index 14c4695453..b6e552ad02 100644 --- a/test/sort/sortById.ts +++ b/test/sort/sortById.ts @@ -1,5 +1,5 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { isSorted } from '../shared/assert.js' const schema = { @@ -17,22 +17,8 @@ const schema = { }, } as const -// import { Schema} f - -// type Schema = typeof schema - await test('sort by id', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => { - return t.backup(db) - }) - - await db.setSchema(schema) + const db = await testDb(t, schema) for (let i = 0; i < 1e6; i++) { db.create('user', { @@ -44,13 +30,13 @@ await test('sort by id', async (t) => { const dbTime = await db.drain() isSorted( - await db.query('user').include('name').sort('id', 'asc').get(), + await db.query2('user').include('name').sort('id', 'asc').get(), 'id', 'asc', ) isSorted( - await db.query('user').include('name').sort('id', 'desc').get(), + await db.query2('user').include('name').sort('id', 'desc').get(), 'id', 'desc', ) @@ -62,7 +48,7 @@ await test('sort by id', async (t) => { } isSorted( - await db.query('user').include('name', 'friends.name').range(0, 1).get(), + await db.query2('user').include('name', 'friends.name').range(0, 1).get(), 'id', 'asc', ) diff --git a/test/sort/sortEnum.ts b/test/sort/sortEnum.ts index f05698749b..38f8399560 100644 --- a/test/sort/sortEnum.ts +++ b/test/sort/sortEnum.ts @@ -1,20 +1,9 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' -import { deepEqual, equal } from '../shared/assert.js' +import { testDb } from '../shared/index.js' await test('sort Enum', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => { - return t.backup(db) - }) - const status = ['a', 'b', 'c', 'd', 'e', 'f'] - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -39,15 +28,19 @@ await test('sort Enum', async (t) => { const q: any[] = [] for (let i = 0; i < 500; i++) { - q.push(db.query('user', randoIds).get()) + q.push(db.query2('user', randoIds).get()) } q.push( - db.query('user').filter('status', '=', ['a', 'b', 'c']).range(0, 950).get(), + db + .query2('user') + .filter('status', '=', ['a', 'b', 'c']) + .range(0, 950) + .get(), ) q.push( - db.query('user').filter('status', '=', ['d']).range(0, 600).get(), + db.query2('user').filter('status', '=', ['d']).range(0, 600).get(), // .inspect(1000), ) @@ -63,6 +56,6 @@ await test('sort Enum', async (t) => { // 'creating string sort index should not take longer then 500ms', // ) - // const r = await db.query('user').range(0, 1e5).sort('status').get() + // const r = await db.query2('user').range(0, 1e5).sort('status').get() // filter }) diff --git a/test/sort/sortHll.ts b/test/sort/sortHll.ts index ec267131d4..0026081b00 100644 --- a/test/sort/sortHll.ts +++ b/test/sort/sortHll.ts @@ -1,16 +1,11 @@ -import { BasedDb, xxHash64 } from '../../src/index.js' +import { xxHash64 } from '../../src/index.js' import { ENCODER } from '../../src/utils/uint8.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' import { deepEqual, equal } from 'node:assert' await test('sortCardinality', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { article: { derp: 'number', @@ -55,13 +50,11 @@ await test('sortCardinality', async (t) => { }) deepEqual( - ( - await db - .query('article') - .sort('brazilians', 'desc') - .include('count', 'brazilians') - .get() - ).toObject(), + await db + .query2('article') + .sort('brazilians', 'desc') + .include('count', 'brazilians') + .get(), [ { id: 1, @@ -78,9 +71,7 @@ await test('sortCardinality', async (t) => { ) deepEqual( - ( - await db.query('article').sort('count', 'asc').include('derp').get() - ).toObject(), + await db.query2('article').sort('count', 'asc').include('derp').get(), [ { id: 2, @@ -101,13 +92,11 @@ await test('sortCardinality', async (t) => { await db.drain() deepEqual( - ( - await db - .query('article') - .sort('count', 'asc') - .include('count', 'brazilians') - .get() - ).toObject(), + await db + .query2('article') + .sort('count', 'asc') + .include('count', 'brazilians') + .get(), [ { id: 2, @@ -151,10 +140,9 @@ await test('sortCardinality', async (t) => { }) const result = await db - .query('article') + .query2('article') .filter('id', '=', testRecordId) .get() - .toObject() const count = Math.abs(result[0].brazilians) const countError = count - num_brazos @@ -179,9 +167,7 @@ await test('sortCardinality', async (t) => { ) deepEqual( - ( - await db.query('article').sort('count', 'desc').include('count').get() - ).toObject(), + await db.query2('article').sort('count', 'desc').include('count').get(), [ { id: 1, @@ -200,13 +186,11 @@ await test('sortCardinality', async (t) => { await db.drain() deepEqual( - ( - await db - .query('article') - .sort('brazilians', 'desc') - .include('derp', 'count') - .get() - ).toObject(), + await db + .query2('article') + .sort('brazilians', 'desc') + .include('derp', 'count') + .get(), [ { id: 2, @@ -240,12 +224,7 @@ await test('sortCardinality', async (t) => { }) deepEqual( - await db - .query('article') - .sort('count', 'desc') - .include('count') - .get() - .toObject(), + await db.query2('article').sort('count', 'desc').include('count').get(), [ { id: 1008, count: 3 }, { id: 2, count: 2 }, diff --git a/test/sort/sortIds.ts b/test/sort/sortIds.ts index 7c1dc61223..8aac82e22d 100644 --- a/test/sort/sortIds.ts +++ b/test/sort/sortIds.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { isSorted } from '../shared/assert.js' await test('ids', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -42,22 +36,16 @@ await test('ids', async (t) => { await db.drain() const ids: number[] = await Promise.all(res) - isSorted(await db.query('user', ids).sort('age').get(), 'age') - isSorted(await db.query('user', ids).sort('name').get(), 'name') - isSorted(await db.query('user', ids).sort('flap').get(), 'flap') - isSorted(await db.query('user', ids).sort('blurf').get(), 'blurf') - isSorted(await db.query('user', ids).sort('bla').get(), 'bla') - isSorted(await db.query('user', ids).sort('mep').get(), 'mep') + isSorted(await db.query2('user', ids).sort('age').get(), 'age') + isSorted(await db.query2('user', ids).sort('name').get(), 'name') + isSorted(await db.query2('user', ids).sort('flap').get(), 'flap') + isSorted(await db.query2('user', ids).sort('blurf').get(), 'blurf') + isSorted(await db.query2('user', ids).sort('bla').get(), 'bla') + isSorted(await db.query2('user', ids).sort('mep').get(), 'mep') }) await test('references', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -104,7 +92,7 @@ await test('references', async (t) => { isSorted( ( await db - .query('article', id) + .query2('article', id) .include((s) => s('contributors').sort('flap')) .get() ).node().contributors, diff --git a/test/sort/sortNodeId.ts b/test/sort/sortNodeId.ts index 1c0a6ef494..e21a1192b2 100644 --- a/test/sort/sortNodeId.ts +++ b/test/sort/sortNodeId.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { deepEqual } from '../shared/assert.js' await test.skip('basic sort by id', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { person: { props: { @@ -29,12 +23,11 @@ await test.skip('basic sort by id', async (t) => { deepEqual( await db - .query('person') + .query2('person') .include('name') .sort('id', 'desc') .range(0, 5) - .get() - .toObject(), + .get(), [ { id: 99, diff --git a/test/sort/sortNumber.ts b/test/sort/sortNumber.ts index 96b8f706fa..0ca6f93de0 100644 --- a/test/sort/sortNumber.ts +++ b/test/sort/sortNumber.ts @@ -1,17 +1,10 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { deepEqual, isSorted } from '../shared/assert.js' await test('numbers', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - const animals = ['pony', 'whale', 'dolphin', 'dog'] - - await db.setSchema({ + const db = await testDb(t, { types: { example: { props: { @@ -56,39 +49,39 @@ await test('numbers', async (t) => { u32: { increment: 100 }, }) - isSorted(await db.query('example').sort('u32').include('u32').get(), 'u32') + isSorted(await db.query2('example').sort('u32').include('u32').get(), 'u32') isSorted( - await db.query('example').sort('boolean').include('boolean').get(), + await db.query2('example').sort('boolean').include('boolean').get(), 'boolean', ) - isSorted(await db.query('example').sort('u8').include('u8').get(), 'u8') - isSorted(await db.query('example').sort('i8').include('i8').get(), 'i8') - isSorted(await db.query('example').sort('i16').include('i16').get(), 'i16') - isSorted(await db.query('example').sort('i32').include('i32').get(), 'i32') + isSorted(await db.query2('example').sort('u8').include('u8').get(), 'u8') + isSorted(await db.query2('example').sort('i8').include('i8').get(), 'i8') + isSorted(await db.query2('example').sort('i16').include('i16').get(), 'i16') + isSorted(await db.query2('example').sort('i32').include('i32').get(), 'i32') isSorted( - await db.query('example').sort('number').include('number').get(), + await db.query2('example').sort('number').include('number').get(), 'number', ) isSorted( - await db.query('example').sort('timestamp').include('timestamp').get(), + await db.query2('example').sort('timestamp').include('timestamp').get(), 'timestamp', ) deepEqual( await db - .query('example') + .query2('example') .sort('enum') .include('enum') .get() - .then((v) => v.toObject().map((v) => v.enum)), + .then((v) => v.map((v) => v.enum)), animalsResult.sort((a, b) => animals.indexOf(a) - animals.indexOf(b)), ) db.delete('example', 1) - isSorted(await db.query('example').sort('u32').include('u32').get(), 'u32') + isSorted(await db.query2('example').sort('u32').include('u32').get(), 'u32') await db - .query('example') + .query2('example') .include('enum') .get() - .then((v) => v.toObject().map((v) => v.enum)) + .then((v) => v.map((v) => v.enum)) }) diff --git a/test/sort/sortString.ts b/test/sort/sortString.ts index 51ebb7c376..e2d5247636 100644 --- a/test/sort/sortString.ts +++ b/test/sort/sortString.ts @@ -1,5 +1,6 @@ import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { deepEqual, equal, isSorted } from '../shared/assert.js' import { text } from '../shared/examples.js' import { randomString } from '../../src/utils/index.js' @@ -76,23 +77,23 @@ await test('compression / large strings', async (t) => { ) deepEqual( await db - .query('article') + .query2('article') .include('name', 'article', 'nr') .sort('article') .range(0, len) .get() - .then((v) => v.toObject().map((v) => v.nr)), + .then((v) => v.map((v) => v.nr)), results.sort((a, b) => a.nr - b.nr).map((v) => v.nr), name, ) deepEqual( await db - .query('article') + .query2('article') .include('name', 'article', 'nr') .sort('article', 'desc') .range(0, len) .get() - .then((v) => v.toObject().map((v) => v.nr)), + .then((v) => v.map((v) => v.nr)), results.sort((b, a) => a.nr - b.nr).map((v) => v.nr), name + ' desc', ) @@ -124,10 +125,7 @@ await test('compression / large strings', async (t) => { }) await test('fixed len strings', async (t) => { - const db = new BasedDb({ path: t.tmp }) - t.after(() => db.destroy()) - await db.start({ clean: true }) - await db.setSchema({ + const db = await testDb(t, { types: { article: { props: { @@ -148,7 +146,7 @@ await test('fixed len strings', async (t) => { await db.drain() isSorted( - await db.query('article').include('name', 'nr').sort('name', 'desc').get(), + await db.query2('article').include('name', 'nr').sort('name', 'desc').get(), 'name', 'desc', ) diff --git a/test/sort/sortTimestamp.ts b/test/sort/sortTimestamp.ts index c6fe6b8a3a..0dc84ee8a2 100644 --- a/test/sort/sortTimestamp.ts +++ b/test/sort/sortTimestamp.ts @@ -1,13 +1,9 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { isSorted } from '../shared/assert.js' await test('sort timestamp', async (t) => { - const db = new BasedDb({ path: t.tmp }) - t.after(() => t.backup(db)) - await db.start({ clean: true }) - - await db.setSchema({ + const db = await testDb(t, { types: { event: { props: { @@ -43,7 +39,7 @@ await test('sort timestamp', async (t) => { db.create('event', { name: 'Event Null' }) let ascResult = await db - .query('event') + .query2('event') .sort('startTime', 'asc') .include('startTime', 'name') .get() @@ -56,7 +52,7 @@ await test('sort timestamp', async (t) => { ) let descResult = await db - .query('event') + .query2('event') .sort('startTime', 'asc') .include('startTime', 'name') .get() @@ -72,7 +68,7 @@ await test('sort timestamp', async (t) => { await db.update('event', eventZeroId, { startTime: now - 1000 }) ascResult = await db - .query('event') + .query2('event') .sort('startTime', 'asc') .include('startTime', 'name') .get() @@ -85,7 +81,7 @@ await test('sort timestamp', async (t) => { ) descResult = await db - .query('event') + .query2('event') .sort('startTime', 'desc') .include('startTime', 'name') .get() @@ -101,7 +97,7 @@ await test('sort timestamp', async (t) => { await db.delete('event', eventDId) ascResult = await db - .query('event') + .query2('event') .sort('startTime', 'asc') .include('startTime', 'name') .get() @@ -114,7 +110,7 @@ await test('sort timestamp', async (t) => { ) descResult = await db - .query('event') + .query2('event') .sort('startTime', 'desc') .include('startTime', 'name') .get() @@ -128,11 +124,7 @@ await test('sort timestamp', async (t) => { }) await test('sort multicore', async (t) => { - const db = new BasedDb({ path: t.tmp }) - t.after(() => t.backup(db)) - await db.start({ clean: true }) - - await db.setSchema({ + const db = await testDb(t, { types: { event: { props: { @@ -157,7 +149,7 @@ await test('sort multicore', async (t) => { await db.drain() isSorted( - await db.query('event').sort('startTime', 'asc').get(), + await db.query2('event').sort('startTime', 'asc').get(), 'startTime', 'asc', ) @@ -167,7 +159,7 @@ await test('sort multicore', async (t) => { q.push( (async () => { isSorted( - await db.query('event').sort('startTime', 'asc').get(), + await db.query2('event').sort('startTime', 'asc').get(), 'startTime', 'asc', ) diff --git a/test/string.perf.ts b/test/string.perf.ts index c9164e6f9b..137f93356d 100644 --- a/test/string.perf.ts +++ b/test/string.perf.ts @@ -1,15 +1,10 @@ import { BasedDb } from '../src/index.js' import test from './shared/test.js' import { perf } from './shared/assert.js' +import { testDb } from './shared/index.js' await test('create 1m 2char strings', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { test: { string: 'string', @@ -29,13 +24,7 @@ await test('create 1m 2char strings', async (t) => { }) await test('create 1m 1000char strings', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { test: { string: 'string', diff --git a/test/string.ts b/test/string.ts index 7a0a7b452b..91cd9cfe89 100644 --- a/test/string.ts +++ b/test/string.ts @@ -1,18 +1,11 @@ import { ENCODER, fastPrng } from '../src/utils/index.js' -import { BasedDb } from '../src/index.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' import { deepEqual, equal } from './shared/assert.js' import { euobserver } from './shared/examples.js' await test('simple', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e4, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -46,45 +39,37 @@ await test('simple', async (t) => { await db.drain() - deepEqual( - (await db.query('user').include('name', 'snurp').get()).toObject(), - [ - { - id: 1, - snurp: 'derp derp', - name: '', - }, - ], - ) + deepEqual(await db.query2('user').include('name', 'snurp').get(), [ + { + id: 1, + snurp: 'derp derp', + name: '', + }, + ]) - deepEqual( - (await db.query('user').include('name', 'snurp', 'age').get()).toObject(), - [ - { - id: 1, - age: 99, - snurp: 'derp derp', - name: '', - }, - ], - ) + deepEqual(await db.query2('user').include('name', 'snurp', 'age').get(), [ + { + id: 1, + age: 99, + snurp: 'derp derp', + name: '', + }, + ]) deepEqual( - ( - await db - .query('user') - .include( - 'name', - 'snurp', - 'age', - 'email', - 'flap', - 'burp', - 'location.x', - 'location.y', - ) - .get() - ).toObject(), + await db + .query2('user') + .include( + 'name', + 'snurp', + 'age', + 'email', + 'flap', + 'burp', + 'location.x', + 'location.y', + ) + .get(), [ { id: 1, @@ -99,19 +84,16 @@ await test('simple', async (t) => { ], ) - deepEqual( - (await db.query('user').include('location.label').get()).toObject(), - [ - { - id: 1, - location: { - label: 'BLA BLA', - }, + deepEqual(await db.query2('user').include('location.label').get(), [ + { + id: 1, + location: { + label: 'BLA BLA', }, - ], - ) + }, + ]) - deepEqual((await db.query('user').include('location').get()).toObject(), [ + deepEqual(await db.query2('user').include('location').get(), [ { id: 1, location: { @@ -122,36 +104,31 @@ await test('simple', async (t) => { }, ]) - deepEqual( - (await db.query('user').include('location', 'burp').get()).toObject(), - [ - { - id: 1, - burp: 66, - location: { - x: 0, - y: 0, - label: 'BLA BLA', - }, + deepEqual(await db.query2('user').include('location', 'burp').get(), [ + { + id: 1, + burp: 66, + location: { + x: 0, + y: 0, + label: 'BLA BLA', }, - ], - ) + }, + ]) deepEqual( - ( - await db - .query('user') - .include( - 'age', - 'email', - 'flap', - 'burp', - 'location.x', - 'location.y', - 'location.label', - ) - .get() - ).toObject(), + await db + .query2('user') + .include( + 'age', + 'email', + 'flap', + 'burp', + 'location.x', + 'location.y', + 'location.label', + ) + .get(), [ { id: 1, @@ -164,7 +141,7 @@ await test('simple', async (t) => { ], ) - deepEqual((await db.query('user').get()).toObject(), [ + deepEqual(await db.query2('user').get(), [ { id: 1, name: '', @@ -179,13 +156,7 @@ await test('simple', async (t) => { }) await test('string + refs', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -282,13 +253,11 @@ await test('string + refs', async (t) => { await db.drain() deepEqual( - ( - await db - .query('simple') - .include('user.name', 'user.myBlup.name') - .range(0, 1) - .get() - ).toObject(), + await db + .query2('simple') + .include('user.name', 'user.myBlup.name') + .range(0, 1) + .get(), [ { id: 1, @@ -311,9 +280,7 @@ await test('string + refs', async (t) => { await db.drain() deepEqual( - ( - await db.query('simple').include('user.name', 'user.myBlup.name').get() - ).toObject(), + await db.query2('simple').include('user.name', 'user.myBlup.name').get(), [ { id: 1, @@ -335,13 +302,7 @@ await test('string + refs', async (t) => { }) await test('Big string disable compression', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { file: { props: { @@ -358,7 +319,7 @@ await test('Big string disable compression', async (t) => { await db.drain() equal( - (await db.query('file', file).get()).node().contents, + (await db.query2('file', file).get()).node().contents, euobserver, 'Get single id', ) @@ -369,10 +330,10 @@ await test('Big string disable compression', async (t) => { await db.drain() - equal((await db.query('file').get()).size > 1000 * 1e3, true) + equal((await db.query2('file').get()).size > 1000 * 1e3, true) deepEqual( - await db.query('file').get(), + await db.query2('file').get(), [ { id: 1, @@ -399,7 +360,7 @@ await test('Big string disable compression', async (t) => { var mb = 0 let p: any = [] for (let i = 0; i < 9; i++) { - p.push(db.query('file').get()) + p.push(db.query2('file').get()) mb += 74 } await Promise.all(p) @@ -408,7 +369,7 @@ await test('Big string disable compression', async (t) => { mb = 0 p = [] for (let i = 0; i < 9; i++) { - p.push(db.query('file').get()) + p.push(db.query2('file').get()) mb += 74 } await Promise.all(p) @@ -416,13 +377,7 @@ await test('Big string disable compression', async (t) => { }) await test('Big string', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { file: { props: { @@ -440,7 +395,7 @@ await test('Big string', async (t) => { await db.drain() equal( - (await db.query('file', file).get()).node().contents, + (await db.query2('file', file).get()).node().contents, euobserver, 'Get single id', ) @@ -453,7 +408,7 @@ await test('Big string', async (t) => { await db.drain() deepEqual( - (await db.query('file').get()).toObject(), + await db.query2('file').get(), [ { id: 1, @@ -471,13 +426,7 @@ await test('Big string', async (t) => { }) await test('schema compression prop', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { file: { props: { @@ -498,13 +447,13 @@ await test('schema compression prop', async (t) => { await db.drain() const uncompressedSize = await db - .query('file') + .query2('file') .include('contentsUncompressed') .get() .then((v) => v.size) const compressedSize = await db - .query('file') + .query2('file') .include('contentsCompressed') .get() .then((v) => v.size) @@ -518,16 +467,7 @@ await test('schema compression prop', async (t) => { await test('string compression - max buf size', async (t) => { const contents = 'a'.repeat(201) // min size for compression - const db = new BasedDb({ - maxModifySize: Buffer.byteLength(contents) * 2 + 100, - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { file: { props: { @@ -547,7 +487,7 @@ await test('string compression - max buf size', async (t) => { await db.drain() - const items = await db.query('file').get().toObject() + const items = await db.query2('file').get() for (const item of items) { equal(item.contents, contents, 'contents are the same') diff --git a/test/stringUtils.perf.ts b/test/stringUtils.perf.ts index 1a6dfaa220..75c42ea069 100644 --- a/test/stringUtils.perf.ts +++ b/test/stringUtils.perf.ts @@ -1,36 +1,44 @@ import test from './shared/test.js' import { perf } from './shared/assert.js' -import { fastPrng } from '../src/utils/fastPrng.js'; -import { DECODER, ENCODER } from '../src/utils/uint8.js'; +import { fastPrng } from '../src/utils/fastPrng.js' +import { DECODER, ENCODER } from '../src/utils/uint8.js' import native from '../src/native.js' -const chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; +const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789' let rnd: ReturnType function createRandomString(length: number): string { const result = new Uint8Array(length) for (let i = 0; i < length; i++) { - result[i] = chars.charCodeAt(rnd(0, chars.length - 1)); + result[i] = chars.charCodeAt(rnd(0, chars.length - 1)) } return DECODER.decode(result) } -let i = 0; -let j = 0; +let i = 0 +let j = 0 await test('string len', async (t) => { const opts = { repeat: 100_000 } as const for (const len of [10, 100, 512, 1024]) { rnd = fastPrng() - await perf(async () => { - i += ENCODER.encode(createRandomString(1024)).byteLength - }, `ENCODER ${len}`, opts) + await perf( + async () => { + i += ENCODER.encode(createRandomString(1024)).byteLength + }, + `ENCODER ${len}`, + opts, + ) rnd = fastPrng() - await perf(async () => { - j += native.stringByteLength(createRandomString(1024)) - }, `stringByteLength ${len}`, opts) + await perf( + async () => { + j += native.stringByteLength(createRandomString(1024)) + }, + `stringByteLength ${len}`, + opts, + ) console.log(i, j) } }) diff --git a/test/subscription/subscription.perf.ts b/test/subscription/subscription.perf.ts index 361b5913b8..a5b48816db 100644 --- a/test/subscription/subscription.perf.ts +++ b/test/subscription/subscription.perf.ts @@ -12,7 +12,7 @@ await test('subscription perf', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) await db.setSchema({ types: { @@ -33,7 +33,7 @@ await test('subscription perf', async (t) => { const dx = await db.drain() - const q = db.query('user', 1) + const q = db.query2('user', 1) const y = await q.get() console.log(q.buffer, y) @@ -76,7 +76,7 @@ await test('native single id perf', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) await db.setSchema({ types: { @@ -87,7 +87,7 @@ await test('native single id perf', async (t) => { }, }) - const q = db.query('user', 1).include('flap') + const q = db.query2('user', 1).include('flap') registerQuery(q) registerSubscription(q) diff --git a/test/subscription/subscription.ts b/test/subscription/subscription.ts index eb49a39709..6f6de76073 100644 --- a/test/subscription/subscription.ts +++ b/test/subscription/subscription.ts @@ -42,7 +42,7 @@ await test('subscription', async (t) => { let cnt = 0 const close = clients[1] - .query('user') + .query2('user') .include('derp') .subscribe((q) => { cnt++ @@ -77,7 +77,7 @@ await test('subscription', async (t) => { }) const close2 = clients[1] - .query('user', l) + .query2('user', l) .include('lang') .subscribe((q) => { cnt++ @@ -110,7 +110,7 @@ await test('subscription', async (t) => { }, 200) const close3 = clients[1] - .query('user', l) + .query2('user', l) .include('location') .subscribe((q) => { equal(lastSet, q.node(0).location, 'equals to last set') @@ -145,7 +145,7 @@ await test('subscription error', async (t) => { }) const close = clients[1] - .query('user') + .query2('user') .include('derp') .subscribe( (q) => { @@ -190,11 +190,11 @@ await test('subscribe to refs', async (t) => { let updatesReceived = 0 let size = 0 const close = clients[1] - .query('queue', queueId) + .query2('queue', queueId) .include('items') .subscribe((q) => { updatesReceived++ - const res = q.toObject() + const res = q size = res.items.length const n = performance.now() console.log(updatesReceived, 'update received after', n - d) diff --git a/test/subscription/subscriptionId.ts b/test/subscription/subscriptionId.ts index 4def65411f..aeee38f676 100644 --- a/test/subscription/subscriptionId.ts +++ b/test/subscription/subscriptionId.ts @@ -42,12 +42,12 @@ await test('subscriptionId', async (t) => { var idCounter = 0 var idFieldCounter = 0 - const close = clients[0].query('user', id).subscribe((d) => { + const close = clients[0].query2('user', id).subscribe((d) => { idCounter++ }) const close2 = clients[0] - .query('user', id) + .query2('user', id) .include('name') .subscribe((d) => { idFieldCounter++ @@ -104,11 +104,11 @@ await test('update after remove before subs loop', async (t) => { var cnt1 = 0 var cnt2 = 0 - const close = clients[0].query('user', id).subscribe((d) => { + const close = clients[0].query2('user', id).subscribe((d) => { cnt1++ }) const close2 = clients[0] - .query('user', id) + .query2('user', id) .include('name') .subscribe((d) => { cnt2++ diff --git a/test/subscription/subscriptionIdPartial.ts b/test/subscription/subscriptionIdPartial.ts index e2685b835b..9e028babbd 100644 --- a/test/subscription/subscriptionIdPartial.ts +++ b/test/subscription/subscriptionIdPartial.ts @@ -45,7 +45,7 @@ await test('filter', async (t) => { var idFieldCounter = 0 const close = clients[0] - .query('user', id) + .query2('user', id) .filter('x', '>', 5) .include('name') .subscribe((d) => { @@ -94,12 +94,12 @@ await test('partial update', async (t) => { var idCounter = 0 var idFieldCounter = 0 - const close = clients[0].query('user', id).subscribe((d) => { + const close = clients[0].query2('user', id).subscribe((d) => { idCounter++ }) const close2 = clients[0] - .query('user', id) + .query2('user', id) .include('x', 'gurk', 'rurp', 'flap') .subscribe((d) => { idFieldCounter++ diff --git a/test/subscription/subscriptionIdRemove.ts b/test/subscription/subscriptionIdRemove.ts index bac9f9f312..27d6b23735 100644 --- a/test/subscription/subscriptionIdRemove.ts +++ b/test/subscription/subscriptionIdRemove.ts @@ -68,7 +68,7 @@ await test('subscriptionIdRemove', async (t) => { }) // Subscribe to the user - const subscription = clients[0].query('user', id).subscribe(() => { + const subscription = clients[0].query2('user', id).subscribe(() => { const user = users.get(userId) if (user && user.active) { user.updateCount++ diff --git a/test/subscription/subscriptionMulti.perf.ts b/test/subscription/subscriptionMulti.perf.ts index 80e23b3457..8319c00cd8 100644 --- a/test/subscription/subscriptionMulti.perf.ts +++ b/test/subscription/subscriptionMulti.perf.ts @@ -43,7 +43,7 @@ await test('subscriptionMulti', async (t) => { await clients[1].drain() const close2 = clients[1] - .query('user') + .query2('user') .filter('derp', '>', 1e6 - 10) .subscribe((q) => { console.log(q) diff --git a/test/subscription/subscriptionNow.ts b/test/subscription/subscriptionNow.ts index 5571156e94..5cfc5ca058 100644 --- a/test/subscription/subscriptionNow.ts +++ b/test/subscription/subscriptionNow.ts @@ -66,7 +66,7 @@ await test('simple', async (t) => { var totalLen = 0 const close = clients[0] - .query('user', id) + .query2('user', id) .filter('date', '<', 'now - 2s') .subscribe((d) => { totalLen += d.length @@ -74,7 +74,7 @@ await test('simple', async (t) => { }) const close2 = clients[0] - .query('user') + .query2('user') .filter('date', '<', 'now - 2s') .subscribe((d) => { totalLen += d.length @@ -113,7 +113,7 @@ await test('multiFilter', async (t) => { var total = 0 const close = clients[0] - .query('sequence') + .query2('sequence') .locale('en') .filter('edition', '=', edition) .filter('startTime', '!=', 0) diff --git a/test/subscription/subscriptionSchemaChanges.ts b/test/subscription/subscriptionSchemaChanges.ts index 9e6db95e57..96c88d46fc 100644 --- a/test/subscription/subscriptionSchemaChanges.ts +++ b/test/subscription/subscriptionSchemaChanges.ts @@ -48,13 +48,13 @@ await test('subscription schema changes', async (t) => { }) let cnt = 0 const q = clients[1] - .query('user') + .query2('user') .include('derp', 'lang') .include((s) => { s('friends').include('*') }) .filter('lang', '=', 'de') - const result1 = q.get().toObject() + const result1 = q.get() await clients[0].setSchema({ types: { user: { @@ -76,7 +76,7 @@ await test('subscription schema changes', async (t) => { deepEqual(result1, q.get(), 'first schema change results are correct') const subResults: any[] = [] const close = q.subscribe((q) => { - subResults.push(q.toObject()) + subResults.push(q) cnt++ }) t.after(() => { @@ -133,8 +133,8 @@ await test('better subscription schema changes', async (t) => { }) const results: any[] = [] - db.query('user').subscribe((res) => { - const obj = res.toObject() + db.query2('user').subscribe((res) => { + const obj = res results.push(obj) }) diff --git a/test/subscription/subscriptionWorkers.perf.ts b/test/subscription/subscriptionWorkers.perf.ts index 654a963837..f8992275ff 100644 --- a/test/subscription/subscriptionWorkers.perf.ts +++ b/test/subscription/subscriptionWorkers.perf.ts @@ -19,7 +19,7 @@ await test('subscriptionWorkers', async (t) => { path: t.tmp, }) await db.start({ clean: true }) - t.after(() => t.backup(db)) + t.after(() => t.backup(db.server)) // TODO fix this type const voteCountrySchema: any = countrySchema @@ -67,7 +67,7 @@ await test('subscriptionWorkers', async (t) => { let updates = 0 if (i % 2) { close = client - .query('vote') + .query2('vote') .filter('fromCountry', '=', ['AE', 'NL']) .subscribe((v) => { updates++ diff --git a/test/text/text.ts b/test/text/text.ts index 93c049af5a..53e176cbaf 100644 --- a/test/text/text.ts +++ b/test/text/text.ts @@ -1,17 +1,12 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' import { italy } from '../shared/examples.js' import { deepEqual } from '../shared/assert.js' import { notEqual } from 'node:assert' +import { checksum as q2checksum } from '../../src/db-client/query2/index.js' +import { testDb } from '../shared/index.js' await test('simple', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, it: { fallback: ['en'] }, @@ -34,9 +29,8 @@ await test('simple', async (t) => { await db.drain() - let result = await db.query('dialog').include('id', 'fun').get() deepEqual( - result.toObject(), + await db.query2('dialog').include('id', 'fun').get(), [ { id: dialogId, @@ -50,9 +44,8 @@ await test('simple', async (t) => { 'Initial dialog with fun property', ) - result = await db.query('dialog').include('id').get() deepEqual( - result.toObject(), + await db.query2('dialog').include('id').get(), [ { id: dialogId, @@ -61,9 +54,8 @@ await test('simple', async (t) => { 'Dialog with only id included', ) - result = await db.query('dialog').locale('it').include('id', 'fun').get() deepEqual( - result.toObject(), + await db.query2('dialog').locale('it').include('id', 'fun').get(), [ { id: dialogId, @@ -73,22 +65,23 @@ await test('simple', async (t) => { 'Dialog with locale set to it', ) - result = await db - .query('dialog') - .locale('it') - .include('id', 'fun') - .filter('fun', 'includes', 'fliperdieflaperdiefloep', { lowerCase: true }) - .get() - deepEqual(result.toObject(), [], 'Filter fun with non-existent text') - - result = await db - .query('dialog') - .include('id', 'fun') - .filter('fun', 'includes', 'italy', { lowerCase: true }) - .get() + deepEqual( + await db + .query2('dialog') + .locale('it') + .include('id', 'fun') + .filter('fun', 'includes', 'fliperdieflaperdiefloep', { lowerCase: true }) + .get(), + [], + 'Filter fun with non-existent text', + ) deepEqual( - result.toObject(), + await db + .query2('dialog') + .include('id', 'fun') + .filter('fun', 'includes', 'italy', { lowerCase: true }) + .get(), [ { id: dialogId, @@ -102,14 +95,13 @@ await test('simple', async (t) => { 'Filter fun with text italy', ) - result = await db - .query('dialog') - .locale('it') - .include('id', 'fun') - .filter('fun', 'includes', 'italy', { lowerCase: true }) - .get() deepEqual( - result.toObject(), + await db + .query2('dialog') + .locale('it') + .include('id', 'fun') + .filter('fun', 'includes', 'italy', { lowerCase: true }) + .get(), [ { id: dialogId, @@ -119,20 +111,22 @@ await test('simple', async (t) => { 'Filter fun with text italy and locale set to it', ) - result = await db - .query('dialog') - .include('id', 'fun') - .filter('fun.en', 'includes', 'italy', { lowerCase: true }) - .get() - deepEqual(result.toObject(), [], 'Filter fun.en with text italy') - - result = await db - .query('dialog') - .include('id', 'fun') - .filter('fun.it', 'includes', 'italy', { lowerCase: true }) - .get() deepEqual( - result.toObject(), + await db + .query2('dialog') + .include('id', 'fun') + .filter('fun.en', 'includes', 'italy', { lowerCase: true }) + .get(), + [], + 'Filter fun.en with text italy', + ) + + deepEqual( + await db + .query2('dialog') + .include('id', 'fun') + .filter('fun.it', 'includes', 'italy', { lowerCase: true }) + .get(), [ { id: dialogId, @@ -146,14 +140,13 @@ await test('simple', async (t) => { 'Filter fun.it with text italy', ) - result = await db - .query('dialog') - .locale('en') - .include('id', 'fun') - .filter('fun.it', 'includes', 'italy', { lowerCase: true }) - .get() deepEqual( - result.toObject(), + await db + .query2('dialog') + .locale('en') + .include('id', 'fun') + .filter('fun.it', 'includes', 'italy', { lowerCase: true }) + .get(), [ { id: 1, @@ -171,9 +164,8 @@ await test('simple', async (t) => { { locale: 'fi' }, ) - result = await db.query('dialog').include('id', 'fun').locale('fi').get() deepEqual( - result.toObject(), + await db.query2('dialog').include('id', 'fun').locale('fi').get(), [ { id: dialogId, @@ -196,9 +188,8 @@ await test('simple', async (t) => { { locale: 'fi' }, ) - result = await db.query('dialog').include('id', 'fun').locale('fi').get() deepEqual( - result.toObject(), + await db.query2('dialog').include('id', 'fun').locale('fi').get(), [ { id: dialogId, @@ -214,9 +205,8 @@ await test('simple', async (t) => { const derpderp = await db.create('dialog', {}) - result = await db.query('dialog', mrSnurfInFinland).get() deepEqual( - result.toObject(), + await db.query2('dialog', mrSnurfInFinland).get(), { id: mrSnurfInFinland, fun: { @@ -228,9 +218,8 @@ await test('simple', async (t) => { 'Query mr snurf in finland', ) - result = await db.query('dialog', derpderp).get() deepEqual( - result.toObject(), + await db.query2('dialog', derpderp).get(), { id: derpderp, fun: { @@ -242,14 +231,13 @@ await test('simple', async (t) => { 'Query empty dialog', ) - result = await db - .query('dialog') - .locale('fi') - .include('id', 'fun') - .filter('fun', '=', '3', { lowerCase: true }) - .get() deepEqual( - result.toObject(), + await db + .query2('dialog') + .locale('fi') + .include('id', 'fun') + .filter('fun', '=', '3', { lowerCase: true }) + .get(), [ { id: dialogId, @@ -259,14 +247,13 @@ await test('simple', async (t) => { 'Exact match on fi', ) - result = await db - .query('dialog') - .locale('fi') - .include('id', 'fun') - .filter('fun', '=', 'mr snurf in finland!', { lowerCase: true }) - .get() deepEqual( - result.toObject(), + await db + .query2('dialog') + .locale('fi') + .include('id', 'fun') + .filter('fun', '=', 'mr snurf in finland!', { lowerCase: true }) + .get(), [ { id: 2, @@ -280,14 +267,12 @@ await test('simple', async (t) => { fun: { en: 'drink some tea!' }, }) - result = await db - .query('dialog') - .include('fun.en') - .filter('fun', '=', 'mr snurf in finland!', { lowerCase: true }) - .get() - deepEqual( - result.toObject(), + await db + .query2('dialog') + .include('fun.en') + .filter('fun', '=', 'mr snurf in finland!', { lowerCase: true }) + .get(), [ { id: 2, @@ -299,13 +284,7 @@ await test('simple', async (t) => { }) await test('search', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, fi: { fallback: ['en'] }, @@ -331,13 +310,12 @@ await test('search', async (t) => { await db.drain() - let result = await db - .query('dialog') - .include('id', 'fun') - .search('finland', 'fun') - .get() deepEqual( - result.toObject(), + await db + .query2('dialog') + .include('id', 'fun') + .search('finland', 'fun') + .get(), [ { id: 1, @@ -351,13 +329,12 @@ await test('search', async (t) => { 'Search for finland', ) - result = await db - .query('dialog') - .include('id', 'fun') - .search('kingdom', 'fun') - .get() deepEqual( - result.toObject(), + await db + .query2('dialog') + .include('id', 'fun') + .search('kingdom', 'fun') + .get(), [ { id: 1, @@ -371,14 +348,8 @@ await test('search', async (t) => { 'Search for kingdom', ) - result = await db - .query('dialog') - .include('id', 'fun') - .search('snurp', 'fun') - .get() - deepEqual( - result.toObject(), + await db.query2('dialog').include('id', 'fun').search('snurp', 'fun').get(), [ { id: 2, @@ -392,14 +363,8 @@ await test('search', async (t) => { 'Search for snurp', ) - result = await db - .query('dialog') - .include('id', 'fun') - .search('derp', 'fun') - .get() - deepEqual( - result.toObject(), + await db.query2('dialog').include('id', 'fun').search('derp', 'fun').get(), [ { id: 2, @@ -413,24 +378,24 @@ await test('search', async (t) => { 'Search for derp', ) - result = await db - .query('dialog') - .locale('fi') - .include('id', 'fun') - .search('derp', 'fun') - .get() - - deepEqual(result.toObject(), [], 'Search for derp with locale set to fi') - - result = await db - .query('dialog') - .locale('en') - .include('id', 'fun') - .search('derp', 'fun') - .get() + deepEqual( + await db + .query2('dialog') + .locale('fi') + .include('id', 'fun') + .search('derp', 'fun') + .get(), + [], + 'Search for derp with locale set to fi', + ) deepEqual( - result.toObject(), + await db + .query2('dialog') + .locale('en') + .include('id', 'fun') + .search('derp', 'fun') + .get(), [ { id: 2, @@ -441,14 +406,12 @@ await test('search', async (t) => { 'Search for derp with locale set to en', ) - result = await db - .query('dialog') - .include('id', 'fun') - .search('derp', 'fun.en') - .get() - deepEqual( - result, + await db + .query2('dialog') + .include('id', 'fun') + .search('derp', 'fun.en') + .get(), [ { id: 2, @@ -464,13 +427,7 @@ await test('search', async (t) => { }) await test('reference text', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: { required: true }, fr: { required: true }, @@ -487,14 +444,14 @@ await test('reference text', async (t) => { }, }) - const country1 = await db.create('country') + const country1 = await db.create('country', {}) await db.create('contestant', { name: 'New contestant', country: country1, }) - deepEqual(await db.query('country').include('*').get().toObject(), [ + deepEqual(await db.query2('country').include('*').get(), [ { id: 1, name: '', @@ -505,37 +462,28 @@ await test('reference text', async (t) => { }, ]) - deepEqual( - await db.query('contestant').include('*', 'country').get().toObject(), - [ - { + deepEqual(await db.query2('contestant').include('*', 'country').get(), [ + { + id: 1, + name: 'New contestant', + country: { id: 1, - name: 'New contestant', - country: { - id: 1, - name: '', - votingLegal: { - en: '', - fr: '', - }, + name: '', + votingLegal: { + en: '', + fr: '', }, }, - ], - ) + }, + ]) }) await test('sort', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, - it: { fallback: 'en' }, - fi: { fallback: 'en' }, + it: { fallback: ['en'] }, + fi: { fallback: ['en'] }, }, types: { dialog: { @@ -547,7 +495,7 @@ await test('sort', async (t) => { }, }) - await db.query('dialog').locale('fi').sort('fun', 'desc').get() + await db.query2('dialog').locale('fi').sort('fun', 'desc').get() const id1 = await db.create('dialog', { fun: { en: '3 en', fi: '1' }, @@ -573,7 +521,7 @@ await test('sort', async (t) => { deepEqual( await db - .query('dialog') + .query2('dialog') .include('fun') .locale('fi') .sort('fun', 'desc') @@ -604,7 +552,7 @@ await test('sort', async (t) => { ) deepEqual( - await db.query('dialog').include('fun').sort('fun.fi', 'desc').get(), + await db.query2('dialog').include('fun').sort('fun.fi', 'desc').get(), [ { id: 3, fun: { en: '1 en', fi: '3', it: '' } }, { id: 2, fun: { en: '2 en', fi: '2', it: '' } }, @@ -616,7 +564,7 @@ await test('sort', async (t) => { deepEqual( await db - .query('dialog') + .query2('dialog') .locale('en') .include('fun') .sort('fun', 'desc') @@ -636,7 +584,7 @@ await test('sort', async (t) => { deepEqual( await db - .query('dialog') + .query2('dialog') .locale('fi') .include('fun') .sort('fun', 'desc') @@ -669,7 +617,7 @@ await test('sort', async (t) => { deepEqual( await db - .query('dialog') + .query2('dialog') .locale('fi') .include('fun') .sort('fun', 'desc') @@ -694,7 +642,7 @@ await test('sort', async (t) => { ], ) - deepEqual(await db.query('dialog').locale('fi').sort('snurf', 'desc').get(), [ + deepEqual(await db.query2('dialog').locale('fi').sort('snurf', 'desc').get(), [ { id: 3, fun: '3', snurf: '3' }, { id: 2, fun: '2', snurf: '2' }, { id: 1, fun: '1', snurf: '1' }, @@ -713,14 +661,14 @@ await test('sort', async (t) => { await db.drain() - deepEqual(await db.query('dialog').locale('fi').sort('snurf', 'desc').get(), [ + deepEqual(await db.query2('dialog').locale('fi').sort('snurf', 'desc').get(), [ { id: 3, fun: '3', snurf: '3' }, { id: 2, fun: '2', snurf: '2' }, { id: 1, fun: '', snurf: '' }, { id: 4, snurf: '', fun: '' }, ]) - deepEqual(await db.query('dialog').locale('fi').sort('fun').get(), [ + deepEqual(await db.query2('dialog').locale('fi').sort('fun').get(), [ { id: 4, snurf: '', fun: '' }, { id: 1, fun: '', snurf: '' }, { id: 2, fun: '2', snurf: '2' }, @@ -733,7 +681,7 @@ await test('sort', async (t) => { await db.drain() - deepEqual(await db.query('dialog').locale('fi').sort('fun').get(), [ + deepEqual(await db.query2('dialog').locale('fi').sort('fun').get(), [ { id: 4, snurf: '', fun: '' }, { id: 3, snurf: '3', fun: '' }, { id: 1, snurf: '', fun: '' }, @@ -750,7 +698,7 @@ await test('sort', async (t) => { ) await db.drain() - deepEqual(await db.query('dialog').locale('fi').sort('fun').get(), [ + deepEqual(await db.query2('dialog').locale('fi').sort('fun').get(), [ { id: 4, snurf: '', fun: '' }, { id: 1, snurf: '', fun: '' }, { id: 3, snurf: '3', fun: '0' }, @@ -768,7 +716,7 @@ await test('sort', async (t) => { await db.drain() deepEqual( - await db.query('dialog').locale('fi').sort('fun').get(), + await db.query2('dialog').locale('fi').sort('fun').get(), [ { id: 4, snurf: '', fun: '' }, { id: 3, snurf: '3', fun: '' }, @@ -786,7 +734,7 @@ await test('sort', async (t) => { await db.drain() deepEqual( - await db.query('dialog').locale('fi').sort('fun').get(), + await db.query2('dialog').locale('fi').sort('fun').get(), [ { id: 4, snurf: '', fun: '' }, { id: 1, snurf: '', fun: '' }, @@ -811,7 +759,7 @@ await test('sort', async (t) => { await db.drain() deepEqual( - await db.query('dialog').locale('fi').sort('fun').get(), + await db.query2('dialog').locale('fi').sort('fun').get(), [ { id: 4, snurf: '', fun: '' }, { id: 3, snurf: '3', fun: '' }, @@ -823,13 +771,7 @@ await test('sort', async (t) => { }) await test('in object only', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, it: {}, @@ -854,20 +796,14 @@ await test('in object only', async (t) => { }, }) - deepEqual(await db.query('user', user1).get(), { + deepEqual(await db.query2('user', user1).get(), { id: 1, dict: { nice: { en: 'a', it: '' } }, }) }) await test('correct return from obj', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, it: {}, @@ -893,7 +829,7 @@ await test('correct return from obj', async (t) => { }, }) - deepEqual(await db.query('user', user1).get(), { + deepEqual(await db.query2('user', user1).get(), { id: 1, dict: { nice: { en: 'cool guy', it: '' } }, name: { en: '', it: '' }, @@ -901,13 +837,7 @@ await test('correct return from obj', async (t) => { }) await test('clear field', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, it: {}, @@ -923,7 +853,7 @@ await test('clear field', async (t) => { name: { en: 'coolnameEN', it: 'coolnameIT' }, }) - deepEqual(await db.query('user', user1).get(), { + deepEqual(await db.query2('user', user1).get(), { id: 1, name: { en: 'coolnameEN', it: 'coolnameIT' }, }) @@ -937,20 +867,14 @@ await test('clear field', async (t) => { { locale: 'en' }, ) - deepEqual(await db.query('user', user1).get(), { + deepEqual(await db.query2('user', user1).get(), { id: 1, name: { en: '', it: 'coolnameIT' }, }) }) await test('text and compression', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, it: {}, @@ -966,7 +890,7 @@ await test('text and compression', async (t) => { article: { en: italy, it: 'cool' }, }) - deepEqual(await db.query('user', user1).get(), { + deepEqual(await db.query2('user', user1).get(), { id: 1, article: { en: italy, it: 'cool' }, }) @@ -980,20 +904,14 @@ await test('text and compression', async (t) => { { locale: 'en' }, ) - deepEqual(await db.query('user', user1).get(), { + deepEqual(await db.query2('user', user1).get(), { id: 1, article: { en: '', it: 'cool' }, }) }) await test('text and crc32', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, it: {}, @@ -1012,7 +930,7 @@ await test('text and crc32', async (t) => { }, }) - const checksum = (await db.query('user', user1).get()).checksum + const checksum = q2checksum(await db.query2('user', user1).get()) await db.update('user', user1, { article: { @@ -1021,7 +939,7 @@ await test('text and crc32', async (t) => { }, }) - const checksum2 = (await db.query('user', user1).get()).checksum + const checksum2 = q2checksum(await db.query2('user', user1).get()) notEqual(checksum, checksum2, 'Checksum is not the same') }) diff --git a/test/text/textFallback.ts b/test/text/textFallback.ts index cbb5a8fecf..78f2bbde07 100644 --- a/test/text/textFallback.ts +++ b/test/text/textFallback.ts @@ -1,19 +1,11 @@ -import { BasedDb } from '../../src/index.js' -import { deepEqual } from '../shared/index.js' +import { deepEqual, testDb } from '../shared/index.js' import test from '../shared/test.js' await test('textFallback', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e6, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { - en: true, // do not know what required means - nl: { fallback: 'en' }, + en: true, + nl: { fallback: ['en'] }, }, types: { project: { @@ -72,11 +64,11 @@ await test('textFallback', async (t) => { // local second argument // false (block all fallbacks) or lang fallback - // await db.query('project').locale('nl').get().inspect(10) + // await db.query2('project').locale('nl').get().inspect(10) deepEqual( await db - .query('project') + .query2('project') .locale('nl') .include('title') .filter('title', 'includes', 'English') @@ -92,7 +84,7 @@ await test('textFallback', async (t) => { deepEqual( await db - .query('project') + .query2('project') .locale('nl') .include('title') .search('English', 'title') diff --git a/test/text/textFilter.ts b/test/text/textFilter.ts index bea98b96fb..c441ec9831 100644 --- a/test/text/textFilter.ts +++ b/test/text/textFilter.ts @@ -1,27 +1,14 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' import { join } from 'path' import { deepEqual } from '../shared/assert.js' await test('textFilter', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e3 * 1e3, - }) - await db.start({ clean: true }) - - const dbX = new BasedDb({ - path: join(t.tmp, 'x'), - maxModifySize: 1e3 * 1e3, - }) - await dbX.start({ clean: true }) - - t.after(() => t.backup(db)) - t.after(() => dbX.destroy()) - - await db.setSchema({ + const db = await testDb(t, { locales: { - en: { required: true }, + en: { + /* required: true */ + }, nl: {}, }, types: { @@ -38,26 +25,31 @@ await test('textFilter', async (t) => { }, }, }) - - await dbX.setSchema({ - locales: { - en: { required: true }, - nl: {}, - }, - types: { - project: { - props: { - createdAt: { - type: 'timestamp', - on: 'create', + const dbx = testDb( + t, + { + locales: { + en: { + /* required: true */ + }, + nl: {}, + }, + types: { + project: { + props: { + createdAt: { + type: 'timestamp', + on: 'create', + }, + title: { type: 'text' }, + description: { type: 'text' }, + abstract: { type: 'string' }, }, - title: { type: 'text' }, - description: { type: 'text' }, - abstract: { type: 'string' }, }, }, }, - }) + { noBackup: true, path: join(t.tmp, 'x') }, + ) await db.create( 'project', @@ -94,7 +86,7 @@ await test('textFilter', async (t) => { let searchTerms = ['a', 'ab', 'abc', 'abcd'] for (const term of searchTerms) { - await db.query('project').search(term, 'title', 'abstract').get() + await db.query2('project').search(term, 'title', 'abstract').get() // .inspect() } @@ -108,7 +100,7 @@ await test('textFilter', async (t) => { for (const term of searchTerms) { q.push( (async () => { - await db.query('project').search(term, 'title', 'abstract').get() + await db.query2('project').search(term, 'title', 'abstract').get() // .inspect() })(), ) @@ -117,14 +109,7 @@ await test('textFilter', async (t) => { }) await test('compressionFilter', async (t) => { - const db = new BasedDb({ - path: t.tmp, - maxModifySize: 1e3 * 1e3, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { function: { name: 'alias', @@ -201,5 +186,8 @@ await test('compressionFilter', async (t) => { msg: derp, }) - deepEqual(await db.query('event').filter('msg', 'includes', 'derp').get(), []) + deepEqual( + await db.query2('event').filter('msg', 'includes', 'derp').get(), + [], + ) }) diff --git a/test/text/textMany.ts b/test/text/textMany.ts index 4efa76a68b..2ec143817a 100644 --- a/test/text/textMany.ts +++ b/test/text/textMany.ts @@ -1,6 +1,6 @@ -import { BasedDb } from '../../src/index.js' import test from '../shared/test.js' import { fastPrng } from '../../src/utils/fastPrng.js' +import { testDb } from '../shared/index.js' const N_PROPS = 248 @@ -53,13 +53,7 @@ function setTextProps(): { } await test('Many text props', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: makeMaxSchema(), types: { dialog: { @@ -73,5 +67,5 @@ await test('Many text props', async (t) => { //console.log(JSON.stringify(db.server.schemaTypesParsed, null, 2)) await db.create('dialog', setTextProps()) - //console.log(await db.query('dialog').include('*').get()) + //console.log(await db.query2('dialog').include('*').get()) }) diff --git a/test/timestamp.ts b/test/timestamp.ts index 54a17ffafa..0835f9dc84 100644 --- a/test/timestamp.ts +++ b/test/timestamp.ts @@ -1,18 +1,10 @@ import { wait } from '../src/utils/index.js' -import { BasedDb } from '../src/index.js' import { deepEqual, equal } from './shared/assert.js' import test from './shared/test.js' +import { testDb } from './shared/index.js' await test('timestamp', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -36,7 +28,7 @@ await test('timestamp', async (t) => { name: 'youzi', }) - let res = (await db.query('user').get()).toObject() + let res = await db.query2('user').get() if (typeof res[0].createdAt !== 'number') { throw 'should be number' @@ -55,7 +47,7 @@ await test('timestamp', async (t) => { name: 'youzi1', }) - res = (await db.query('user').get()).toObject() + res = await db.query2('user').get() if (!(res[0].updatedAt > res[0].createdAt)) { throw 'updatedAt should be updated after update' @@ -63,7 +55,7 @@ await test('timestamp', async (t) => { const measure = async (v: number) => { deepEqual( - Math.floor((await db.query('user', youzi).get().toObject()).mrDerp / 10), + Math.floor(((await db.query2('user', youzi).get())?.mrDerp ?? 0) / 10), Math.floor(v / 10), ) } @@ -102,25 +94,17 @@ await test('timestamp', async (t) => { updatedAt: overwriteUpdatedAt, }) - const newUser = await db.query('user', jamex).get().toObject() - const updatedUser = await db.query('user', youzi).get().toObject() + const newUser = await db.query2('user', jamex).get() + const updatedUser = await db.query2('user', youzi).get() - equal(newUser.createdAt, overwriteCreatedAt) - equal(newUser.updatedAt, overwriteUpdatedAt) - equal(updatedUser.createdAt, overwriteCreatedAt) - equal(updatedUser.updatedAt, overwriteUpdatedAt) + equal(newUser?.createdAt, overwriteCreatedAt) + equal(newUser?.updatedAt, overwriteUpdatedAt) + equal(updatedUser?.createdAt, overwriteCreatedAt) + equal(updatedUser?.updatedAt, overwriteUpdatedAt) }) await test('timestamp before 1970', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -137,7 +121,6 @@ await test('timestamp before 1970', async (t) => { bday: d, }) - const res = await db.query('user', user).get().toObject() - - equal(res.bday, d.valueOf()) + const res = await db.query2('user', user).get() + equal(res?.bday, d.valueOf()) }) diff --git a/test/types.perf.ts b/test/types.perf.ts new file mode 100644 index 0000000000..5cce72eabe --- /dev/null +++ b/test/types.perf.ts @@ -0,0 +1,53 @@ +import { BasedDb } from '../src/index.js' +import test from './shared/test.js' +import { perf } from './shared/assert.js' +import { fastPrng } from '../src/utils/fastPrng.js' +import { testDb } from './shared/index.js' + +const NR_TYPES = 16384 + +await test('create and access many types', async (t) => { + const prng = fastPrng() + const rndType = () => `type${prng(1, NR_TYPES)}` + + const db = await testDb(t, { + types: Object.fromEntries( + Array.from({ length: 16384 }, (_, i) => [ + `type${i + 1}`, + { bool: 'boolean' }, + ]), + ), + }) + + await perf( + () => { + db.create(rndType(), { + bool: true, + }) + }, + 'create booleans', + { repeat: 1_000_000 }, + ) + + await db.drain() +}) + +await test('create many nodes', async (t) => { + const db = await testDb(t, { + types: { + type: { bool: 'boolean' }, + }, + }) + + await perf( + () => { + db.create('type', { + bool: true, + }) + }, + 'create booleans', + { repeat: 10_000_000 }, + ) + + await db.drain() +}) diff --git a/test/update.perf.ts b/test/update.perf.ts index 1b33fb2f40..d6f58ffdfb 100644 --- a/test/update.perf.ts +++ b/test/update.perf.ts @@ -1,18 +1,11 @@ -import { BasedDb } from '../src/index.js' +import assert from 'node:assert' import test from './shared/test.js' import { perf } from './shared/assert.js' -import assert from 'node:assert' +import { testDb } from './shared/index.js' await test('await updates', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - const status = ['a', 'b', 'c', 'd', 'e', 'f'] - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { diff --git a/test/update.ts b/test/update.ts index 5a6325b732..1ca937fb9f 100644 --- a/test/update.ts +++ b/test/update.ts @@ -1,15 +1,9 @@ -import { BasedDb } from '../src/index.js' import test from './shared/test.js' -import { deepEqual, equal, throws, perf } from './shared/assert.js' +import { deepEqual, equal, throws } from './shared/assert.js' +import { testDb } from './shared/index.js' await test('update with payload.id', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { article: { body: 'string', @@ -17,13 +11,12 @@ await test('update with payload.id', async (t) => { }, }) - const article1 = await db.create('article') - await db.update('article', { - id: article1, + const article1 = await db.create('article', {}) + await db.update('article', article1, { body: 'xxx', }) - deepEqual(await db.query('article').get().toObject(), [ + deepEqual(await db.query2('article').get(), [ { id: 1, body: 'xxx', @@ -32,13 +25,7 @@ await test('update with payload.id', async (t) => { }) await test('update', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { mep: { props: { @@ -82,7 +69,7 @@ await test('update', async (t) => { await db.drain() - deepEqual((await db.query('snurp').get()).toObject(), [ + deepEqual(await db.query2('snurp').get(), [ { a: 1, b: 2, @@ -127,7 +114,7 @@ await test('update', async (t) => { await db.drain() - deepEqual((await db.query('snurp').get()).toObject(), [ + deepEqual(await db.query2('snurp').get(), [ { a: 1, b: 2, @@ -156,7 +143,7 @@ await test('update', async (t) => { await db.drain() - deepEqual((await db.query('snurp', 2).get()).toObject(), { + deepEqual(await db.query2('snurp', 2).get(), { a: 0, b: 0, c: 0, @@ -170,7 +157,7 @@ await test('update', async (t) => { }) // for individual queries combine them - deepEqual((await db.query('snurp', [2, 1]).get()).toObject(), [ + deepEqual(await db.query2('snurp', [2, 1]).get(), [ { a: 1, b: 2, @@ -197,7 +184,7 @@ await test('update', async (t) => { }, ]) - const ids: any[] = [] + const ids: number[] = [] let snurpId = 1 for (; snurpId <= 1e6; snurpId++) { ids.push(snurpId) @@ -212,20 +199,18 @@ await test('update', async (t) => { await db.drain() - equal((await db.query('snurp', ids).get()).length, 1e6) + equal((await db.query2('snurp', ids).get())?.length, 1e6) - equal((await db.query('snurp', ids).range(0, 100).get()).length, 100) + equal((await db.query2('snurp', ids).range(0, 100).get()).length, 100) - equal((await db.query('snurp', ids).range(10, 110).get()).length, 100) + equal((await db.query2('snurp', ids).range(10, 110).get()).length, 100) deepEqual( - ( - await db - .query('snurp', ids) - .range(1e5, 1e5 + 2) - .sort('a', 'desc') - .get() - ).toObject(), + await db + .query2('snurp', ids) + .range(1e5, 1e5 + 2) + .sort('a', 'desc') + .get(), [ { id: 900000, @@ -253,7 +238,7 @@ await test('update', async (t) => { const promises: any[] = [] for (var j = 0; j < 1; j++) { for (var i = 0; i < 1e5; i++) { - promises.push(db.query('snurp', i).include('a').get()) + promises.push(db.query2('snurp', i).include('a').get()) } } diff --git a/test/upsert.ts b/test/upsert.ts index 7e88c9c2b6..53183aca20 100644 --- a/test/upsert.ts +++ b/test/upsert.ts @@ -1,7 +1,7 @@ import { deepEqual, equal } from './shared/assert.js' import test from './shared/test.js' import { start } from './shared/multi.js' -import { BasedDb } from '../src/index.js' +import { testDb } from './shared/index.js' await test('upsert', async (t) => { const { @@ -55,22 +55,35 @@ await test('upsert', async (t) => { }) } - await client2.upsert('article', { - externalId: 'flap', - name: 'flap', - contributors: [ - client2.upsert('user', { - email: 'james@flapmail.com', - name: 'James!', - }), - client2.upsert('user', { - email: 'derp@flapmail.com', - name: 'Derp!', - }), - ], - }) + await client2.upsert( + 'article', + { externalId: 'flap' }, + { + name: 'flap', + contributors: [ + client2.upsert( + 'user', + { + email: 'james@flapmail.com', + }, + { + name: 'James!', + }, + ), + client2.upsert( + 'user', + { + email: 'derp@flapmail.com', + }, + { + name: 'Derp!', + }, + ), + ], + }, + ) - deepEqual(await client1.query('article').include('*', '**').get(), [ + deepEqual(await client1.query2('article').include('*', '**').get(), [ { id: 1, externalId: 'flap', @@ -82,53 +95,3 @@ await test('upsert', async (t) => { }, ]) }) - -await test('upsert no alias', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start() - t.after(() => db.destroy()) - // t.after(() => db.stop()) - - await db.setSchema({ - types: { - lala: { - props: { - lele: 'string', - lili: 'number', - }, - }, - }, - }) - - // await db.drain() - - equal( - (await db.query('lala').include('*').get().toObject()).length, - 0, - 'before upsert', - ) - - await db.upsert('lala', { - lele: 'lulu', - lili: 813, - }) - - equal( - (await db.query('lala').include('*').get().toObject()).length, - 1, - 'after upsert', - ) - - await db.upsert('lala', { - lele: 'lulu', - lili: 813, - }) - - equal( - (await db.query('lala').include('*').get().toObject()).length, - 2, - 'upsert no alias should insert', - ) -}) diff --git a/test/validation/validation.ts b/test/validation/validation.ts index 9707b1313f..fcc0a8f1a4 100644 --- a/test/validation/validation.ts +++ b/test/validation/validation.ts @@ -1,17 +1,10 @@ import { BasedDb } from '../../src/index.js' import { deepEqual, throws } from '../shared/assert.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' await test('update', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => db.destroy()) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, de: {} }, types: { user: { @@ -46,7 +39,8 @@ await test('update', async (t) => { }) await throws(async () => { - await db.query('derp', { flap: 'snru' }).get() + // @ts-expect-error + await db.query2('derp', { flap: 'snru' }).get() }, true) db.create('user', { @@ -58,6 +52,7 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { text: { + // @ts-expect-error en: 123, }, }) @@ -67,6 +62,7 @@ await test('update', async (t) => { db.create( 'user', { + // @ts-expect-error text: 123, }, { locale: 'en' }, @@ -75,6 +71,7 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { + // @ts-expect-error text: { xh: 'hello!' }, }) }) @@ -91,11 +88,13 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { + // @ts-expect-error name: 1, }) }) await throws(async () => { + // @ts-expect-error await db.create('user', { date: {} }) }) @@ -104,11 +103,13 @@ await test('update', async (t) => { }) await throws(async () => { + // @ts-expect-error await db.create('user', { on: 255 + 1 }) }) await throws(async () => { db.create('user', { + // @ts-expect-error number: 'nla', }) }) @@ -127,6 +128,7 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { + // @ts-expect-error derp: [1, 2, 3, 4], }) }) @@ -149,29 +151,39 @@ await test('update', async (t) => { await throws(async () => { await db.update('user', cId, { + // @ts-expect-error cardinality: ['a', 'b', 1], }) }) - deepEqual(await db.query('user', cId).include('cardinality').get(), { - id: await cId, - cardinality: 2, - }) + deepEqual( + await db + .query2('user', await cId) + .include('cardinality') + .get(), + { + id: await cId, + cardinality: 2, + }, + ) await throws(async () => { db.create('user', { + // @ts-expect-error cardinality: [1, 2, 3, 4], }) }) await throws(async () => { db.create('user', { + // @ts-expect-error cardinality: { id: [1, 2, 3, 4] }, }) }) await throws(async () => { db.create('user', { + // @ts-expect-error friend: { id: undefined }, }) }) @@ -184,6 +196,7 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { + // @ts-expect-error name: 1, }) }) @@ -191,6 +204,7 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { name: 'jamex', + // @ts-expect-error friend: bad, }) }) @@ -198,6 +212,7 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { name: 'fred', + // @ts-expect-error connections: [good, bad], }) }) @@ -205,6 +220,7 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { name: 'wrongRating', + // @ts-expect-error u32: 'not a number', }) }) @@ -215,6 +231,7 @@ await test('update', async (t) => { async () => { db.create('user', { name: 'wrongRating', + // @ts-expect-error u32: 'not a number', }).catch((err) => { cnt++ @@ -233,6 +250,7 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { name: 'fred', + // @ts-expect-error connections: [good, bad], }) }) @@ -240,6 +258,7 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { name: 'wrongRating', + // @ts-expect-error u32: 'not a number', }) }) @@ -247,6 +266,7 @@ await test('update', async (t) => { await throws(() => db.create('user', { name: 'nope', + // @ts-expect-error randomField: true, }), ) @@ -263,7 +283,7 @@ await test('update', async (t) => { await db.drain() deepEqual( - await db.query('user').include('name', 'friend').get(), + await db.query2('user').include('name', 'friend').get(), [ { id: 1, friend: null, name: '' }, { id: 2, friend: null, name: '' }, @@ -324,6 +344,7 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { connections: { + // @ts-expect-error set: [], }, }) @@ -331,19 +352,21 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { + // @ts-expect-error connections: 1, }) }) await throws(async () => { db.create('user', { + // @ts-expect-error connections: { add: ['x'], }, }) }) - const id = await db.create('user', undefined) + const id = await db.create('user') await throws( async () => { @@ -441,24 +464,30 @@ await test('update', async (t) => { 'Too small out of bounds value should throw (int8)', ) - db.create('user', { - binaryData: 'not a binary', + await throws(async () => { + db.create('user', { + // @ts-expect-error + binaryData: 'not a binary', + }) }) await throws(async () => { db.create('user', { + // @ts-expect-error binaryData: 12345, }) }) await throws(async () => { db.create('user', { + // @ts-expect-error binaryData: { some: 'object' }, }) }) await throws(async () => { db.create('user', { + // @ts-expect-error binaryData: [1, 2, 3, 4], }) }) @@ -469,21 +498,12 @@ await test('update', async (t) => { }) await test('query', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => db.destroy()) - const drip = ['dope', 'cringe', 'meh'] - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, - it: { fallback: 'en' }, - fi: { fallback: 'en' }, + it: { fallback: ['en'] }, + fi: { fallback: ['en'] }, }, types: { todo: { @@ -527,53 +547,61 @@ await test('query', async (t) => { await throws( // @ts-ignore - () => db.query('user', '1').get(), + () => db.query2('user', '1').get(), false, 'throw on string as id', ) - await throws(() => db.query('derp').get(), false, 'non existing type') + // @ts-expect-error + await throws(() => db.query2('derp').get(), false, 'non existing type') - // @ts-ignore - await throws(() => db.query('user', 'derp derp').get(), false, 'incorrect id') + await throws( + // @ts-expect-error + () => db.query2('user', 'derp derp').get(), + false, + 'incorrect id', + ) await throws( - () => db.query('user', [1, 1221.11, 0]).get(), + () => db.query2('user', [1, 1221.11, 0]).get(), false, 'incorrect ids', ) await throws( - // @ts-ignore - () => db.query('user', [1, 'X', {}]).get(), + // @ts-expect-error + () => db.query2('user', [1, 'X', {}]).get(), false, 'incorrect ids 2', ) const x = new Uint32Array(new Array(2e6).map((v) => 1)) - await throws(() => db.query('user', x).get(), false, 'incorrect ids 2') + // @ts-expect-error + await throws(() => db.query2('user', x).get(), false, 'incorrect ids 2') await throws( - () => db.query('user').include('derp').get(), + // @ts-expect-error + () => db.query2('user').include('derp').get(), false, 'non existing field in include', ) await throws( - // @ts-ignore - () => db.query('user', { $id: 1 }).get(), + // @ts-expect-error + () => db.query2('user', { $id: 1 }).get(), false, 'incorrect alias', ) await throws( - () => db.query('user').filter('derp', '=', true).get(), + // @ts-expect-error + () => db.query2('user').filter('derp', '=', true).get(), false, 'non existing field in filter', ) await db - .query('user') + .query2('user') .filter('friend.description.en', '=', 'nice') .get() .catch((err) => { @@ -581,77 +609,82 @@ await test('query', async (t) => { }) await throws( - () => db.query('user').filter('friend.description.flap', '=', 'nice').get(), + () => + // @ts-expect-error + db.query2('user').filter('friend.description.flap', '=', 'nice').get(), false, 'non existing lang in filter', ) await throws( - () => db.query('user').filter('friend.description.flap', '=', 'nice').get(), + () => + // @ts-expect-error + db.query2('user').filter('friend.description.flap', '=', 'nice').get(), false, 'non existing lang in filter', ) await throws( - () => db.query('user').filter('friend.description.fr', '=', 'nice').get(), + // @ts-expect-error + () => db.query2('user').filter('friend.description.fr', '=', 'nice').get(), false, 'non existing lang in filter', ) await throws( - () => db.query('user').include('friend.description.flap').get(), + // @ts-expect-error + () => db.query2('user').include('friend.description.flap').get(), false, 'non existing lang in include #1', ) await throws( - () => db.query('user').include('friend.description.fr').get(), + // @ts-expect-error + () => db.query2('user').include('friend.description.fr').get(), false, 'non existing lang in include #2', ) await throws( - // @ts-ignore - () => db.query('user').filter('friend.description.fr', 'derp', 1).get(), + // @ts-expect-error + () => db.query2('user').filter('friend.description.fr', 'derp', 1).get(), false, 'Filter non existing operator', ) await throws( - // @ts-ignore - () => db.query('user').filter('friend.description.en', '>', 1).get(), + // @ts-expect-error + () => db.query2('user').filter('friend.description.en', '>', 1).get(), false, 'Filter incorrect operator on text', ) await throws( - // @ts-ignore - () => db.query('user').filter('rating', 'includes', 1).get(), + () => db.query2('user').filter('rating', 'includes', 1).get(), false, 'Filter incorrect operator on uint32', ) await throws( - // @ts-ignore - () => db.query('user').filter('isOn', 'includes', 1).get(), + // @ts-expect-error + () => db.query2('user').filter('isOn', 'includes', 1).get(), false, 'Filter incorrect operator on bool', ) - await db.query('user').filter('isOn', true).get() - await db.query('user').filter('isOn').get() - await db.query('user').filter('isOn', false).get() + // await db.query2('user').filter('isOn', true).get() + // await db.query2('user').filter('isOn').get() + // await db.query2('user').filter('isOn', false).get() await throws( - // @ts-ignore - () => db.query('user').filter('friend', 'includes', 1).get(), + () => db.query2('user').filter('friend', 'includes', 1).get(), false, 'Filter incorrect operator on reference', ) await throws( - // @ts-ignore - () => db.query('user').filter('connections', 'like', 1).get(), + // @ts-expect-error + () => db.query2('user').filter('connections', 'like', 1).get(), false, 'Filter incorrect operator on references', ) @@ -671,40 +704,37 @@ await test('query', async (t) => { deepEqual( await db - .query('user') + .query2('user') .filter('name', 'includes', '') .include('name') - .get() - .toObject(), + .get(), allData, 'skip empty string', ) deepEqual( await db - .query('user', []) + .query2('user', []) .filter('name', 'includes', '') .include('name') - .get() - .toObject(), + .get(), [], 'ignore empty ids', ) - deepEqual( - await db - .query('user') - .filter('friend.description.en', '=', undefined) - .include('name') - .get() - .toObject(), - allData, - 'skip undefined', - ) + // deepEqual( + // await db + // .query2('user') + // .filter('friend.description.en', '=', undefined) + // .include('name') + // .get(), + // allData, + // 'skip undefined', + // ) await throws( // @ts-ignore - () => db.query('user').filter('friend.description', 'like', 999).get(), + () => db.query2('user').filter('friend.description', 'like', 999).get(), false, 'Filter incorrect value on text', ) @@ -714,13 +744,14 @@ await test('query', async (t) => { () => db // @ts-ignore - .query({ id: 1, rating: 'derp' }) + .query2({ id: 1, rating: 'derp' }) .get(), false, 'Incorrect payload', ) - const q = db.query('flap') + // @ts-expect-error + const q = db.query2('flap') for (let i = 0; i < 2; i++) { await throws( async () => { @@ -736,17 +767,17 @@ await test('query', async (t) => { () => db // @ts-ignore - .query({ id: 1, rating: 'derp' }) + .query2({ id: 1, rating: 'derp' }) .get(), false, 'Incorrect payload', ) - await db.query('user').sort('drip', 'desc').get() + await db.query2('user').sort('drip', 'desc').get() await throws( async () => { - await db.query('user').sort('flurp').get() + await db.query2('user').sort('flurp').get() }, false, 'Non existing field on sort', @@ -754,102 +785,70 @@ await test('query', async (t) => { await throws(async () => { // @ts-ignore - await db.query('user').sort('drip', 'gurk').get() + await db.query2('user').sort('drip', 'gurk').get() }, false) await throws(async () => { - await db.query('user').sort('connections').get() + await db.query2('user').sort('connections').get() }, false) await throws(async () => { - await db.query('user').sort('friend').get() + await db.query2('user').sort('friend').get() }, false) await throws(async () => { - await db.query('user', 1).sort('drip').get() + await db.query2('user', 1).sort('drip').get() }, false) - await db.query('user', []).sort('drip').get() + await db.query2('user', []).sort('drip').get() - await db.query('user', [1, 2, 3]).sort('drip').get() + await db.query2('user', [1, 2, 3]).sort('drip').get() await throws(async () => { - await db.query('user').sort('drip').range(0, -10).get() + await db.query2('user').sort('drip').range(0, -10).get() }, false) await throws(async () => { - // @ts-ignore - await db.query('user').sort('drip').range('derp', -100).get() + // @ts-expect-error + await db.query2('user').sort('drip').range('derp', -100).get() }, false) await throws(async () => { - await db.query('user').locale('az').get() + // @ts-expect-error + await db.query2('user').locale('az').get() }, false) - await throws(async () => { - await db.query('user').search('xyz', 'derpderp').get() - }, false) + // await throws(async () => { + // // @ts-expect-error + // await db.query2('user').search('xyz', 'derpderp').get() + // }, false) - await throws(async () => { - await db.query('user').search('xyz', 'derpderp').get() - }, false) + // await throws(async () => { + // // @ts-expect-error + // await db.query2('user').search('xyz', 'derpderp').get() + // }, false) - await throws(async () => { - await db.query('user').search('xyz', 'blap').get() - }, false) + // await throws(async () => { + // // @ts-expect-error + // await db.query2('user').search('xyz', 'blap').get() + // }, false) - await throws(async () => { - // @ts-ignore - await db.query('user').search([1, 2, 3, 4], 'blap').get() - }, false) + // await throws(async () => { + // // @ts-expect-error + // await db.query2('user').search([1, 2, 3, 4], 'blap').get() + // }, false) await throws(async () => { const envs = await db - .query('user') + .query2('user') + // @ts-expect-error .filter('connections', 'includes', 0) .get() }, false) }) -await test('query - no schema', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => db.destroy()) - - setTimeout(async () => { - await db.setSchema({ - types: { - user: { - props: { - name: 'string', - }, - }, - }, - }) - }, 100) - - await throws(async () => { - await db.query('ploink').get() - }, false) - - await db.schemaIsSet() - deepEqual(await db.query('user').get().toObject(), []) -}) - await test('minmax', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => db.destroy()) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -868,7 +867,7 @@ await test('minmax', async (t) => { number: 0.5, }) - deepEqual(await db.query('user', id).get().toObject(), { + deepEqual(await db.query2('user', id).get(), { name: 'luigi', number: 0.5, id, @@ -876,13 +875,7 @@ await test('minmax', async (t) => { }) await test('set text without locale', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) // commenting this out fixes the crash part - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, it: {}, @@ -915,14 +908,7 @@ await test('set text without locale', async (t) => { }) await test('range validation', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - t.after(() => db.destroy()) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -940,110 +926,110 @@ await test('range validation', async (t) => { }) } - await db.query('user').range(0, 5).get() - await db.query('user').range(1, 10).get() - await db.query('user').range(0, 1).get() - await db.query('user').range(100, 101).get() - await db.query('user').range(1000, 1001).get() - await db.query('user').range(0, undefined).get() + await db.query2('user').range(0, 5).get() + await db.query2('user').range(1, 10).get() + await db.query2('user').range(0, 1).get() + await db.query2('user').range(100, 101).get() + await db.query2('user').range(1000, 1001).get() + await db.query2('user').range(0, undefined).get() await throws(async () => { - await db.query('user').range(0, 0).get() + await db.query2('user').range(0, 0).get() }, false) await throws(async () => { - await db.query('user').range(5, 5).get() + await db.query2('user').range(5, 5).get() }, false) await throws(async () => { - await db.query('user').range(4294967295, 4294967295).get() + await db.query2('user').range(4294967295, 4294967295).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range('invalid', 5).get() + await db.query2('user').range('invalid', 5).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(-1, 5).get() + await db.query2('user').range(-1, 5).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(1.5, 5).get() + await db.query2('user').range(1.5, 5).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(null, 5).get() + await db.query2('user').range(null, 5).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(undefined, 5).get() + await db.query2('user').range(undefined, 5).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range({}, 5).get() + await db.query2('user').range({}, 5).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range([], 5).get() + await db.query2('user').range([], 5).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(4294967296, 5).get() + await db.query2('user').range(4294967296, 5).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(0, 'invalid').get() + await db.query2('user').range(0, 'invalid').get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(0, -1).get() + await db.query2('user').range(0, -1).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(0, 1.5).get() + await db.query2('user').range(0, 1.5).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(0, null).get() + await db.query2('user').range(0, null).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(0, {}).get() + await db.query2('user').range(0, {}).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(0, []).get() + await db.query2('user').range(0, []).get() }, false) await throws(async () => { // @ts-ignore - await db.query('user').range(0, 4294967296).get() + await db.query2('user').range(0, 4294967296).get() }, false) await throws(async () => { - await db.query('user').range(5, 3).get() + await db.query2('user').range(5, 3).get() }, false) - await db.query('user').filter('rating', '>', 0).range(0, 5).get() - await db.query('user').sort('rating').range(0, 5).get() - await db.query('user').include('name').range(0, 5).get() + await db.query2('user').filter('rating', '>', 0).range(0, 5).get() + await db.query2('user').sort('rating').range(0, 5).get() + await db.query2('user').include('name').range(0, 5).get() const result = await db - .query('user') + .query2('user') .range(0, 5) .include('name', 'rating') .get() @@ -1051,11 +1037,7 @@ await test('range validation', async (t) => { }) await test('binary validation', async (t) => { - const db = new BasedDb({ path: t.tmp }) - await db.start({ clean: true }) - t.after(() => db.destroy()) - - await db.setSchema({ + const db = await testDb(t, { types: { user: { props: { @@ -1070,47 +1052,55 @@ await test('binary validation', async (t) => { name: 'test', binaryData: Buffer.from([1, 2, 3, 4]), }) - await db.create('user', { name: 'test2', binaryData: 'binary string' }) + + // await db.create('user', { name: 'test2', binaryData: 'binary string' }) await db.create('user', { name: 'test3', binaryData: new Uint8Array([5, 6, 7, 8]), }) await throws(async () => { + // @ts-expect-error await db.create('user', { name: 'test4', binaryData: 123 }) }) await throws(async () => { + // @ts-expect-error await db.create('user', { name: 'test5', binaryData: { some: 'object' } }) }) await throws(async () => { + // @ts-expect-error await db.create('user', { name: 'test6', binaryData: [1, 2, 3] }) }) await db - .query('user') + .query2('user') .filter('binaryData', '=', Buffer.from([1, 2, 3, 4])) .get() - await db.query('user').filter('binaryData', '=', 'binary string').get() + // await db.query2('user').filter('binaryData', '=', 'binary string').get() await db - .query('user') + .query2('user') .filter('binaryData', '=', new Uint8Array([5, 6, 7, 8])) .get() await throws(async () => { - await db.query('user').filter('binaryData', '=', 123).get() + // @ts-expect-error + await db.query2('user').filter('binaryData', '=', 123).get() }) await throws(async () => { - await db.query('user').filter('binaryData', '=', {}).get() + // @ts-expect-error + await db.query2('user').filter('binaryData', '=', {}).get() }) await throws(async () => { - await db.query('user').filter('binaryData', '=', { some: 'object' }).get() + // @ts-expect-error + await db.query2('user').filter('binaryData', '=', { some: 'object' }).get() }) await throws(async () => { - await db.query('user').filter('binaryData', '=', [1, 2, 3]).get() + // @ts-expect-error + await db.query2('user').filter('binaryData', '=', [1, 2, 3]).get() }) }) diff --git a/test/validation/validationAdvanced.ts b/test/validation/validationAdvanced.ts index 23c02b64c6..a8e85bd24b 100644 --- a/test/validation/validationAdvanced.ts +++ b/test/validation/validationAdvanced.ts @@ -1,18 +1,10 @@ import { convertToTimestamp } from '../../src/utils/index.js' -import { BasedDb } from '../../src/index.js' import { throws } from '../shared/assert.js' import test from '../shared/test.js' +import { testDb } from '../shared/index.js' await test('simple min / max validation', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, de: {} }, types: { user: { @@ -137,15 +129,7 @@ await test('simple min / max validation', async (t) => { }) await test('step validation', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => db.destroy()) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, de: {} }, types: { user: { @@ -222,15 +206,7 @@ await test('step validation', async (t) => { }) await test('min / max validation on reference edges', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { thing: {}, edgeUser: { @@ -534,15 +510,7 @@ await test('min / max validation on reference edges', async (t) => { }) await test('step validation on reference edges', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { types: { thing: {}, edgeUser: { @@ -708,20 +676,11 @@ await test('step validation on reference edges', async (t) => { }) await test('min / max / step validation on reference edges timestamp + string format', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => db.destroy()) - const minDateStr = '01/01/2000' const minTs = convertToTimestamp(minDateStr) const maxOffsetSeconds = 10 const stepMs = 1 // 1 second - - await db.setSchema({ + const db = await testDb(t, { types: { thing: {}, edgeUser: { diff --git a/test/validation/validationCustom.ts b/test/validation/validationCustom.ts index b2aabdf1cf..e315095684 100644 --- a/test/validation/validationCustom.ts +++ b/test/validation/validationCustom.ts @@ -1,17 +1,9 @@ -import { BasedDb } from '../../src/index.js' import { throws } from '../shared/assert.js' +import {testDb} from '../shared/index.js' import test from '../shared/test.js' await test('custom', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, de: {} }, types: { user: { diff --git a/test/validation/validationReferences.ts b/test/validation/validationReferences.ts index 710b2dab5b..4a994cc925 100644 --- a/test/validation/validationReferences.ts +++ b/test/validation/validationReferences.ts @@ -1,17 +1,9 @@ -import { BasedDb } from '../../src/index.js' import { deepEqual, throws } from '../shared/assert.js' +import { testDb } from '../shared/index.js' import test from '../shared/test.js' await test('update', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ + const db = await testDb(t, { locales: { en: {}, de: {} }, types: { flap: { @@ -44,7 +36,8 @@ await test('update', async (t) => { }) await throws(async () => { - return db.query('flap').include('x.$derp').get() + // @ts-expect-error + return db.query2('flap').include('x.$derp').get() }, 'Non existing reference on flap') const user1 = await db.create('user', { name: 'user1' }) @@ -71,7 +64,7 @@ await test('update', async (t) => { deepEqual( await db - .query('user', userWithConn) + .query2('user', userWithConn) .include('name', 'connections.id') .get(), { @@ -83,61 +76,72 @@ await test('update', async (t) => { ) await throws(async () => { + // @ts-expect-error db.create('user', { connections: user1 }) }, 'Expected array for references field connections') await throws(async () => { + // @ts-expect-error db.update('user', userWithConn, { connections: user1 }) }, 'Expected array or object for references field connections') await throws(async () => { + // @ts-expect-error db.create('user', { connections: [user1, 'not an id'] }) }, 'Invalid reference "not an id" for field connections') await throws(async () => { + // @ts-expect-error db.create('user', { connections: [user1, {}] }) }, 'Invalid reference "[object Object]" for field connections') await throws(async () => { + // @ts-expect-error db.create('user', { connections: [user1, invalidId] }) }, 'Invalid reference "usr_invalid" for field connections') await throws(async () => { + // @ts-expect-error db.update('user', userWithConn, { connections: { add: [invalidId] } }) }, 'Invalid reference "usr_invalid" for field add in connections') await throws(async () => { db.update('user', userWithConn, { + // @ts-expect-error connections: { update: [invalidId] }, }) }, 'Invalid reference "usr_invalid" for field update in connections') await throws(async () => { + // @ts-expect-error db.update('user', userWithConn, { connections: { add: [invalidId] } }) }, 'Invalid reference "usr_invalid" for field add in connections') await throws(async () => { db.update('user', userWithConn, { + // @ts-expect-error connections: { delete: [invalidId] }, }) }, 'Invalid reference "usr_invalid" for field delete in connections') await throws(async () => { + // @ts-expect-error db.update('user', userWithConn, { connections: { update: 'bla' } }) }, 'Expected array for field set in connections') await throws(async () => { + // @ts-expect-error db.update('user', userWithConn, { connections: { add: {} } }) }, 'Expected array for field add in connections') await throws(async () => { + // @ts-expect-error db.update('user', userWithConn, { connections: { delete: 123 } }) }, 'Expected array for field delete in connections') // --- Friends (with Edges) Validation --- const now = Date.now() const badge = new Uint8Array([1, 2, 3]) - const badgeString = 'badge-string' // String is also valid for binary const userWithFriends = await db.create('user', { name: 'friendlyUser', @@ -148,7 +152,7 @@ await test('update', async (t) => { $friendsSince: now, $friendShipBadge: badge, }, - { id: user2, $bestFriend: false, $friendShipBadge: badgeString }, // Minimal edge data + string badge + { id: user2, $bestFriend: false, $friendShipBadge: badge }, // Minimal edge data ], }) @@ -170,7 +174,7 @@ await test('update', async (t) => { }) deepEqual( - await db.query('user', userWithFriends).include('name', 'friends').get(), + await db.query2('user', userWithFriends).include('name', 'friends').get(), { id: 5, name: 'friendlyUser', @@ -190,7 +194,7 @@ await test('update', async (t) => { deepEqual( await db - .query('user', userWithFriends) + .query2('user', userWithFriends) .include( 'name', 'friends.$bestFriend', @@ -214,6 +218,9 @@ await test('update', async (t) => { id: 3, $bestFriend: true, $friendsSince: new Date('09/02/2000').getTime(), + $friendShipBadge: new Uint8Array([ + 98, 97, 100, 103, 101, 45, 115, 116, 114, 105, 110, 103, + ]), }, ], }, @@ -233,7 +240,7 @@ await test('update', async (t) => { deepEqual( await db - .query('user', userWithFriends) + .query2('user', userWithFriends) .include( 'name', 'friends.$bestFriend', @@ -265,27 +272,33 @@ await test('update', async (t) => { ) await throws(async () => { + // @ts-expect-error db.create('user', { friends: { id: user1 } }) }, 'Expected array for references field friends') await throws(async () => { + // @ts-expect-error db.update('user', userWithFriends, { friends: user1 }) }, 'Expected array or object for references field friends') await throws(async () => { + // @ts-expect-error db.create('user', { friends: [user1, 'not an id'] }) }, 'Invalid reference "not an id" for field friends') await throws(async () => { + // @ts-expect-error db.create('user', { friends: [user1, { $bestFriend: true }] }) }, 'Missing id in reference object for field friends') await throws(async () => { + // @ts-expect-error db.create('user', { friends: [user1, { id: invalidId }] }) }, 'Invalid reference "usr_invalid" for field friends') await throws(async () => { db.create('user', { + // @ts-expect-error friends: [{ id: user1, $bestFriend: 'yes' }], }) }, 'Incorrect type for $bestFriend expected boolean got string') @@ -298,39 +311,46 @@ await test('update', async (t) => { await throws(async () => { db.create('user', { + // @ts-expect-error friends: [{ id: user1, $friendShipBadge: [1, 2, 3] }], }) }, 'Incorrect type for $friendShipBadge expected binary got array') await throws(async () => { db.create('user', { + // @ts-expect-error friends: [{ id: user1, $friendLevel: 9000 }], }) }, 'Unknown edge field "$friendLevel" for reference field friends') await throws(async () => { db.update('user', userWithFriends, { + // @ts-expect-error friends: { add: [{ id: user1, $bestFriend: 'yes' }] }, }) }, 'Incorrect type for $bestFriend expected boolean got string') await throws(async () => { db.update('user', userWithFriends, { + // @ts-expect-error friends: { add: [{ $bestFriend: true }] }, }) }, 'Missing id in reference object for field add in friends') await throws(async () => { db.update('user', userWithFriends, { + // @ts-expect-error friends: { delete: [{ id: user1 }] }, }) }, 'Cannot have edge data in delete operation for field friends') await throws(async () => { + // @ts-expect-error db.update('user', userWithFriends, { friends: { add: {} } }) }, 'Expected array for field add in friends') await throws(async () => { + // @ts-expect-error db.update('user', userWithFriends, { friends: { delete: 123 } }) }, 'Expected array for field delete in friends') diff --git a/test/vector.ts b/test/vector.ts index e6d8ce70a0..8b82da45f7 100644 --- a/test/vector.ts +++ b/test/vector.ts @@ -1,7 +1,8 @@ -import { BasedDb } from '../src/index.js' +import { DbClient } from '../src/index.js' import test from './shared/test.js' import { deepEqual, equal } from './shared/assert.js' import { equals } from '../src/utils/index.js' +import { testDb } from './shared/index.js' const data = { cat: [1.5, -0.4, 7.2, 19.6, 20.2], @@ -12,14 +13,10 @@ const data = { car: [81.6, -72.1, 16, -20.2, 102], } -async function initDb(t) { - const db = new BasedDb({ - path: t.tmp, - }) - await db.start({ clean: true }) - t.after(() => t.backup(db)) - - await db.setSchema({ +async function initDb( + t: Parameters[1]>[0], +): Promise { + const client = await testDb(t, { types: { data: { props: { @@ -37,19 +34,20 @@ async function initDb(t) { }) for (const name in data) { - db.create('data', { + client.create('data', { a: new Float32Array(data[name]), name: name, }) } - await db.drain() + await client.drain() - return db + return client } await test('vector set/get', async (t) => { const db = await initDb(t) - const res = (await db.query('data').get()).toObject() + + const res = await db.query2('data').include('name', 'a').get() for (const r of res) { const a = new Uint8Array(r.a.buffer, 0, r.a.byteLength) const b = new Uint8Array(new Float32Array(data[r.name]).buffer) @@ -57,61 +55,34 @@ await test('vector set/get', async (t) => { } }) -await test('vector set wrong size', async (t) => { - const db = await initDb(t) - - const a = db.create('data', { - a: new Float32Array([1, 2, 3]), - name: 'hehe', - }) - const b = db.create('data', { - a: new Float32Array([1, 2, 3, 4, 5, 6]), - name: 'hehe', - }) - await db.drain() - - const [ra, rb] = await db - .query('data') - .filter('id', '=', [await a, await b]) - .include('a') - .get() - - // RFE is truncation right? - deepEqual(ra.a.length, 5) - deepEqual(rb.a.length, 5) -}) - await test('query by vector', async (t) => { const db = await initDb(t) const r1 = await db - .query('data') + .query2('data') .include('name') .filter('a', '=', new Float32Array(data['car'].slice(0, 5))) .get() - .toObject() deepEqual(r1[0].name, 'car') const r2 = await db - .query('data') + .query2('data') .include('name') .filter('a', '=', new Float32Array(data['car'])) .get() - .toObject() deepEqual(r2.length, 1) }) -// this is broken! see https://linear.app/1ce/issue/FDN-1302 needs alignment! +// FIXME this is broken! see https://linear.app/1ce/issue/FDN-1302 needs alignment! await test.skip('vector like', async (t) => { const db = await initDb(t) const fruit = new Float32Array([-5.1, 2.9, 0.8, 7.9, 3.1]) const res = await db - .query('data') + .query2('data') .include('name') .filter('a', 'like', fruit, { fn: 'euclideanDistance', score: 1 }) .get() - .toObject() deepEqual(res, [ { id: 3, name: 'apple' }, @@ -130,12 +101,11 @@ await test.skip('vector like', async (t) => { deepEqual( await db - .query('data') + .query2('data') .include('name') .range(0, 1e6) .filter('a', 'like', fruit, { fn: 'euclideanDistance', score: 1 }) - .get() - .toObject(), + .get(), [ { id: 3, @@ -164,18 +134,19 @@ await test('search', async (t) => { await db.drain() - deepEqual( - await db - .query('data') - .include('id', 'name') - .range(0, 3) - .search(fruit, 'a', { fn: 'euclideanDistance', score: 1 }) - .get(), - [ - { id: 3, $searchScore: 0.6100001335144043, name: 'apple' }, - { id: 4, $searchScore: 0.7999996542930603, name: 'strawberry' }, - ], - ) + // TODO add search + // deepEqual( + // await db + // .query2('data') + // .include('id', 'name') + // .range(0, 3) + // .search(fruit, 'a', { fn: 'euclideanDistance', score: 1 }) + // .get(), + // [ + // { id: 3, $searchScore: 0.6100001335144043, name: 'apple' }, + // { id: 4, $searchScore: 0.7999996542930603, name: 'strawberry' }, + // ], + // ) }) await test('vector misalign', async (t) => { diff --git a/test/youzi.ts b/test/youzi.ts deleted file mode 100644 index 0264210cd9..0000000000 --- a/test/youzi.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { BasedDb } from '../src/index.js' -import { equal } from './shared/assert.js' -import test from './shared/test.js' - -await test('reffies', async (t) => { - const db = new BasedDb({ - path: t.tmp, - }) - - await db.start({ clean: true }) - - t.after(() => t.backup(db)) - - await db.setSchema({ - types: { - user: { - name: 'string', - others: { - items: { - ref: 'user', - prop: 'others', - // $rating: 'number', - }, - }, - }, - }, - }) - - const userId = await db.create('user', { name: 'a' }) - - await db.create('user', { - // others: [ - // { - // id: userId, - // // $rating: 1, - // }, - // ], - others: [userId], - name: 'bxxxxxxxx', - }) - - const res = await db.query('user').include('*', '**').get().toObject() - - console.dir(res, { depth: null }) -}) diff --git a/testType.ts b/testType.ts new file mode 100644 index 0000000000..09fff2abb9 --- /dev/null +++ b/testType.ts @@ -0,0 +1,17 @@ +import type { ResolveInclude, PickOutput } from './src/db-client/query2/types.js'; +import type { BasedQuery2 } from './src/db-client/query2/index.js'; + +type TestSchema = { + types: { + user: { + props: { + name: { type: 'string' }; + }; + }; + }; +}; + +type Result = PickOutput; + +let a: Result = { id: 1, name: 'Luigi' }; + diff --git a/testType2.ts b/testType2.ts new file mode 100644 index 0000000000..935fbc8801 --- /dev/null +++ b/testType2.ts @@ -0,0 +1,24 @@ +import { testDb } from './test/shared/test.js'; +import type { DbClient } from './src/sdk.js'; + +async function main() { + const drip = ['dope', 'cringe', 'meh'] + const db = await testDb({} as any, { + locales: { + en: {}, + it: { fallback: ['en'] }, + fi: { fallback: ['en'] }, + }, + types: { + user: { + props: { + rating: 'uint32', + name: 'string', + } + } + } + }); + + const res = await db.query2('user').filter('name', 'includes', '').include('name').get(); + let b: { id: number, name: string }[] = res; +} diff --git a/testType3.ts b/testType3.ts new file mode 100644 index 0000000000..0e846f4ac8 --- /dev/null +++ b/testType3.ts @@ -0,0 +1,24 @@ +import testDb from './test/shared/test.js'; +import type { DbClient } from './src/sdk.js'; + +async function main() { + const drip = ['dope', 'cringe', 'meh'] + const db = await testDb({} as any, { + locales: { + en: {}, + it: { fallback: ['en'] }, + fi: { fallback: ['en'] }, + }, + types: { + user: { + props: { + rating: 'uint32', + name: 'string', + } + } + } + }); + + const res = await db.query2('user').filter('name', 'includes', '').include('name').get(); + let b: { id: number, name: string }[] = res; +} diff --git a/tsconfig.build.json b/tsconfig.build.json index 5e4139c281..f3798fa952 100644 --- a/tsconfig.build.json +++ b/tsconfig.build.json @@ -4,5 +4,6 @@ "noEmit": false, "rootDir": "src" }, - "include": ["src"] + "include": ["src"], + "exclude": ["dist", "**/_*"] } diff --git a/tsconfig.json b/tsconfig.json index a8678cdf2c..e5f4ff1efb 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -10,8 +10,9 @@ "noEmit": true, "jsx": "react", "declarationMap": true, + "strictPropertyInitialization": true, "pretty": true }, "include": ["src", "test", "scripts"], - "exclude": ["dist"] + "exclude": ["dist", "**/_*"] } diff --git a/tsconfig.test.json b/tsconfig.test.json new file mode 100644 index 0000000000..1719acded3 --- /dev/null +++ b/tsconfig.test.json @@ -0,0 +1,18 @@ +{ + "extends": "@saulx/tsconfig/default.json", + "compilerOptions": { + "outDir": "dist", + "esModuleInterop": true, + "allowJs": true, + "noPropertyAccessFromIndexSignature": false, + "rootDir": ".", + "strictNullChecks": true, + "noEmit": true, + "jsx": "react", + "declarationMap": true, + "strictPropertyInitialization": true, + "pretty": true + }, + "include": ["test"], + "exclude": ["dist", "**/_*"] +}